1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from
;
101 unsigned HOST_WIDE_INT len
;
102 HOST_WIDE_INT offset
;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len
;
116 HOST_WIDE_INT offset
;
117 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static rtx
enqueue_insn (rtx
, rtx
);
123 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
125 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
126 struct move_by_pieces
*);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movstr (rtx
, rtx
, rtx
, unsigned);
129 static rtx
emit_block_move_via_libcall (rtx
, rtx
, rtx
);
130 static tree
emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
132 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
136 struct store_by_pieces
*);
137 static bool clear_storage_via_clrstr (rtx
, rtx
, unsigned);
138 static rtx
clear_storage_via_libcall (rtx
, rtx
);
139 static tree
clear_storage_libcall_fn (int);
140 static rtx
compress_float_constant (rtx
, rtx
);
141 static rtx
get_subtarget (rtx
);
142 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, int);
145 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
146 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
147 tree
, enum machine_mode
, int, tree
, int);
148 static rtx
var_rtx (tree
);
150 static unsigned HOST_WIDE_INT
highest_pow2_factor (tree
);
151 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (tree
, tree
);
153 static int is_aligning_offset (tree
, tree
);
154 static rtx
expand_increment (tree
, int, int);
155 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
156 enum expand_modifier
);
157 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
, int);
159 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
161 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
162 static rtx
const_vector_from_tree (tree
);
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load
[NUM_MACHINE_MODES
];
169 static char direct_store
[NUM_MACHINE_MODES
];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
196 /* This array records the insn_code of insns to perform block moves. */
197 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
199 /* This array records the insn_code of insns to perform block clears. */
200 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
202 /* These arrays record the insn_code of two different kinds of insns
203 to perform block compares. */
204 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
205 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
207 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
209 #ifndef SLOW_UNALIGNED_ACCESS
210 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
213 /* This is run once per compilation to set up which modes can be used
214 directly in memory and to initialize the block move optab. */
217 init_expr_once (void)
220 enum machine_mode mode
;
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
228 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
229 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
231 /* A scratch register we can modify in-place below to avoid
232 useless RTL allocations. */
233 reg
= gen_rtx_REG (VOIDmode
, -1);
235 insn
= rtx_alloc (INSN
);
236 pat
= gen_rtx_SET (0, NULL_RTX
, NULL_RTX
);
237 PATTERN (insn
) = pat
;
239 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
240 mode
= (enum machine_mode
) ((int) mode
+ 1))
244 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
245 PUT_MODE (mem
, mode
);
246 PUT_MODE (mem1
, mode
);
247 PUT_MODE (reg
, mode
);
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
252 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
253 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
254 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
257 if (! HARD_REGNO_MODE_OK (regno
, mode
))
263 SET_DEST (pat
) = reg
;
264 if (recog (pat
, insn
, &num_clobbers
) >= 0)
265 direct_load
[(int) mode
] = 1;
267 SET_SRC (pat
) = mem1
;
268 SET_DEST (pat
) = reg
;
269 if (recog (pat
, insn
, &num_clobbers
) >= 0)
270 direct_load
[(int) mode
] = 1;
273 SET_DEST (pat
) = mem
;
274 if (recog (pat
, insn
, &num_clobbers
) >= 0)
275 direct_store
[(int) mode
] = 1;
278 SET_DEST (pat
) = mem1
;
279 if (recog (pat
, insn
, &num_clobbers
) >= 0)
280 direct_store
[(int) mode
] = 1;
284 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
286 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
287 mode
= GET_MODE_WIDER_MODE (mode
))
289 enum machine_mode srcmode
;
290 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
291 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
295 ic
= can_extend_p (mode
, srcmode
, 0);
296 if (ic
== CODE_FOR_nothing
)
299 PUT_MODE (mem
, srcmode
);
301 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
302 float_extend_from_mem
[mode
][srcmode
] = true;
307 /* This is run at the start of compiling a function. */
312 cfun
->expr
= ggc_alloc_cleared (sizeof (struct expr_status
));
315 /* Small sanity check that the queue is empty at the end of a function. */
318 finish_expr_for_function (void)
324 /* Manage the queue of increment instructions to be output
325 for POSTINCREMENT_EXPR expressions, etc. */
327 /* Queue up to increment (or change) VAR later. BODY says how:
328 BODY should be the same thing you would pass to emit_insn
329 to increment right away. It will go to emit_insn later on.
331 The value is a QUEUED expression to be used in place of VAR
332 where you want to guarantee the pre-incrementation value of VAR. */
335 enqueue_insn (rtx var
, rtx body
)
337 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
338 body
, pending_chain
);
339 return pending_chain
;
342 /* Use protect_from_queue to convert a QUEUED expression
343 into something that you can put immediately into an instruction.
344 If the queued incrementation has not happened yet,
345 protect_from_queue returns the variable itself.
346 If the incrementation has happened, protect_from_queue returns a temp
347 that contains a copy of the old value of the variable.
349 Any time an rtx which might possibly be a QUEUED is to be put
350 into an instruction, it must be passed through protect_from_queue first.
351 QUEUED expressions are not meaningful in instructions.
353 Do not pass a value through protect_from_queue and then hold
354 on to it for a while before putting it in an instruction!
355 If the queue is flushed in between, incorrect code will result. */
358 protect_from_queue (rtx x
, int modify
)
360 RTX_CODE code
= GET_CODE (x
);
362 #if 0 /* A QUEUED can hang around after the queue is forced out. */
363 /* Shortcut for most common case. */
364 if (pending_chain
== 0)
370 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
371 use of autoincrement. Make a copy of the contents of the memory
372 location rather than a copy of the address, but not if the value is
373 of mode BLKmode. Don't modify X in place since it might be
375 if (code
== MEM
&& GET_MODE (x
) != BLKmode
376 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
379 rtx
new = replace_equiv_address_nv (x
, QUEUED_VAR (y
));
383 rtx temp
= gen_reg_rtx (GET_MODE (x
));
385 emit_insn_before (gen_move_insn (temp
, new),
390 /* Copy the address into a pseudo, so that the returned value
391 remains correct across calls to emit_queue. */
392 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
399 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
400 if (tem
!= XEXP (x
, 0))
406 else if (code
== PLUS
|| code
== MULT
)
408 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
409 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
410 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
419 /* If the increment has not happened, use the variable itself. Copy it
420 into a new pseudo so that the value remains correct across calls to
422 if (QUEUED_INSN (x
) == 0)
423 return copy_to_reg (QUEUED_VAR (x
));
424 /* If the increment has happened and a pre-increment copy exists,
426 if (QUEUED_COPY (x
) != 0)
427 return QUEUED_COPY (x
);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
433 return QUEUED_COPY (x
);
436 /* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
442 queued_subexp_p (rtx x
)
444 enum rtx_code code
= GET_CODE (x
);
450 return queued_subexp_p (XEXP (x
, 0));
454 return (queued_subexp_p (XEXP (x
, 0))
455 || queued_subexp_p (XEXP (x
, 1)));
461 /* Retrieve a mark on the queue. */
466 return pending_chain
;
469 /* Perform all the pending incrementations that have been enqueued
470 after MARK was retrieved. If MARK is null, perform all the
471 pending incrementations. */
474 emit_insns_enqueued_after_mark (rtx mark
)
478 /* The marked incrementation may have been emitted in the meantime
479 through a call to emit_queue. In this case, the mark is not valid
480 anymore so do nothing. */
481 if (mark
&& ! QUEUED_BODY (mark
))
484 while ((p
= pending_chain
) != mark
)
486 rtx body
= QUEUED_BODY (p
);
488 switch (GET_CODE (body
))
496 QUEUED_INSN (p
) = body
;
500 #ifdef ENABLE_CHECKING
507 QUEUED_INSN (p
) = emit_insn (body
);
512 pending_chain
= QUEUED_NEXT (p
);
516 /* Perform all the pending incrementations. */
521 emit_insns_enqueued_after_mark (NULL_RTX
);
524 /* Copy data from FROM to TO, where the machine modes are not the same.
525 Both modes may be integer, or both may be floating.
526 UNSIGNEDP should be nonzero if FROM is an unsigned type.
527 This causes zero-extension instead of sign-extension. */
530 convert_move (rtx to
, rtx from
, int unsignedp
)
532 enum machine_mode to_mode
= GET_MODE (to
);
533 enum machine_mode from_mode
= GET_MODE (from
);
534 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
535 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
539 /* rtx code for making an equivalent value. */
540 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
541 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
543 to
= protect_from_queue (to
, 1);
544 from
= protect_from_queue (from
, 0);
546 if (to_real
!= from_real
)
549 /* If the source and destination are already the same, then there's
554 /* If FROM is a SUBREG that indicates that we have already done at least
555 the required extension, strip it. We don't handle such SUBREGs as
558 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
559 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
560 >= GET_MODE_SIZE (to_mode
))
561 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
562 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
564 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
567 if (to_mode
== from_mode
568 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
570 emit_move_insn (to
, from
);
574 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
576 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
579 if (VECTOR_MODE_P (to_mode
))
580 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
582 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
584 emit_move_insn (to
, from
);
588 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
590 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
591 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
600 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
602 else if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
607 /* Try converting directly if the insn is supported. */
609 code
= tab
->handlers
[to_mode
][from_mode
].insn_code
;
610 if (code
!= CODE_FOR_nothing
)
612 emit_unop_insn (code
, to
, from
,
613 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
617 /* Otherwise use a libcall. */
618 libcall
= tab
->handlers
[to_mode
][from_mode
].libfunc
;
621 /* This conversion is not implemented yet. */
625 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
627 insns
= get_insns ();
629 emit_libcall_block (insns
, to
, value
,
630 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
632 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
636 /* Handle pointer conversion. */ /* SPEE 900220. */
637 /* Targets are expected to provide conversion insns between PxImode and
638 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
639 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
641 enum machine_mode full_mode
642 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
644 if (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
648 if (full_mode
!= from_mode
)
649 from
= convert_to_mode (full_mode
, from
, unsignedp
);
650 emit_unop_insn (trunc_optab
->handlers
[to_mode
][full_mode
].insn_code
,
654 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
656 enum machine_mode full_mode
657 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
659 if (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
663 emit_unop_insn (sext_optab
->handlers
[full_mode
][from_mode
].insn_code
,
665 if (to_mode
== full_mode
)
668 /* else proceed to integer conversions below. */
669 from_mode
= full_mode
;
672 /* Now both modes are integers. */
674 /* Handle expanding beyond a word. */
675 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
676 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
683 enum machine_mode lowpart_mode
;
684 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
686 /* Try converting directly if the insn is supported. */
687 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
690 /* If FROM is a SUBREG, put it into a register. Do this
691 so that we always generate the same set of insns for
692 better cse'ing; if an intermediate assignment occurred,
693 we won't be doing the operation directly on the SUBREG. */
694 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
695 from
= force_reg (from_mode
, from
);
696 emit_unop_insn (code
, to
, from
, equiv_code
);
699 /* Next, try converting via full word. */
700 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
701 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
702 != CODE_FOR_nothing
))
706 if (reg_overlap_mentioned_p (to
, from
))
707 from
= force_reg (from_mode
, from
);
708 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
710 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
711 emit_unop_insn (code
, to
,
712 gen_lowpart (word_mode
, to
), equiv_code
);
716 /* No special multiword conversion insn; do it by hand. */
719 /* Since we will turn this into a no conflict block, we must ensure
720 that the source does not overlap the target. */
722 if (reg_overlap_mentioned_p (to
, from
))
723 from
= force_reg (from_mode
, from
);
725 /* Get a copy of FROM widened to a word, if necessary. */
726 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
727 lowpart_mode
= word_mode
;
729 lowpart_mode
= from_mode
;
731 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
733 lowpart
= gen_lowpart (lowpart_mode
, to
);
734 emit_move_insn (lowpart
, lowfrom
);
736 /* Compute the value to put in each remaining word. */
738 fill_value
= const0_rtx
;
743 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
744 && STORE_FLAG_VALUE
== -1)
746 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
748 fill_value
= gen_reg_rtx (word_mode
);
749 emit_insn (gen_slt (fill_value
));
755 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
756 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
758 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
762 /* Fill the remaining words. */
763 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
765 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
766 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
771 if (fill_value
!= subword
)
772 emit_move_insn (subword
, fill_value
);
775 insns
= get_insns ();
778 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
779 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
783 /* Truncating multi-word to a word or less. */
784 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
785 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
788 && ! MEM_VOLATILE_P (from
)
789 && direct_load
[(int) to_mode
]
790 && ! mode_dependent_address_p (XEXP (from
, 0)))
792 || GET_CODE (from
) == SUBREG
))
793 from
= force_reg (from_mode
, from
);
794 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
798 /* Now follow all the conversions between integers
799 no more than a word long. */
801 /* For truncation, usually we can just refer to FROM in a narrower mode. */
802 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
803 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
804 GET_MODE_BITSIZE (from_mode
)))
807 && ! MEM_VOLATILE_P (from
)
808 && direct_load
[(int) to_mode
]
809 && ! mode_dependent_address_p (XEXP (from
, 0)))
811 || GET_CODE (from
) == SUBREG
))
812 from
= force_reg (from_mode
, from
);
813 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
814 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
815 from
= copy_to_reg (from
);
816 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
820 /* Handle extension. */
821 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
823 /* Convert directly if that works. */
824 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
828 from
= force_not_mem (from
);
830 emit_unop_insn (code
, to
, from
, equiv_code
);
835 enum machine_mode intermediate
;
839 /* Search for a mode to convert via. */
840 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
841 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
842 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
844 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
845 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
846 GET_MODE_BITSIZE (intermediate
))))
847 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
848 != CODE_FOR_nothing
))
850 convert_move (to
, convert_to_mode (intermediate
, from
,
851 unsignedp
), unsignedp
);
855 /* No suitable intermediate mode.
856 Generate what we need with shifts. */
857 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
858 - GET_MODE_BITSIZE (from_mode
), 0);
859 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
860 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
862 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
865 emit_move_insn (to
, tmp
);
870 /* Support special truncate insns for certain modes. */
871 if (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
!= CODE_FOR_nothing
)
873 emit_unop_insn (trunc_optab
->handlers
[to_mode
][from_mode
].insn_code
,
878 /* Handle truncation of volatile memrefs, and so on;
879 the things that couldn't be truncated directly,
880 and for which there was no special instruction.
882 ??? Code above formerly short-circuited this, for most integer
883 mode pairs, with a force_reg in from_mode followed by a recursive
884 call to this routine. Appears always to have been wrong. */
885 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
887 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
888 emit_move_insn (to
, temp
);
892 /* Mode combination is not recognized. */
896 /* Return an rtx for a value that would result
897 from converting X to mode MODE.
898 Both X and MODE may be floating, or both integer.
899 UNSIGNEDP is nonzero if X is an unsigned value.
900 This can be done by referring to a part of X in place
901 or by copying to a new temporary with conversion.
903 This function *must not* call protect_from_queue
904 except when putting X into an insn (in which case convert_move does it). */
907 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
909 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
912 /* Return an rtx for a value that would result
913 from converting X from mode OLDMODE to mode MODE.
914 Both modes may be floating, or both integer.
915 UNSIGNEDP is nonzero if X is an unsigned value.
917 This can be done by referring to a part of X in place
918 or by copying to a new temporary with conversion.
920 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
922 This function *must not* call protect_from_queue
923 except when putting X into an insn (in which case convert_move does it). */
926 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
930 /* If FROM is a SUBREG that indicates that we have already done at least
931 the required extension, strip it. */
933 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
934 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
935 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
936 x
= gen_lowpart (mode
, x
);
938 if (GET_MODE (x
) != VOIDmode
)
939 oldmode
= GET_MODE (x
);
944 /* There is one case that we must handle specially: If we are converting
945 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
946 we are to interpret the constant as unsigned, gen_lowpart will do
947 the wrong if the constant appears negative. What we want to do is
948 make the high-order word of the constant zero, not all ones. */
950 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
951 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
952 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
954 HOST_WIDE_INT val
= INTVAL (x
);
956 if (oldmode
!= VOIDmode
957 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
959 int width
= GET_MODE_BITSIZE (oldmode
);
961 /* We need to zero extend VAL. */
962 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
965 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
968 /* We can do this with a gen_lowpart if both desired and current modes
969 are integer, and this is either a constant integer, a register, or a
970 non-volatile MEM. Except for the constant case where MODE is no
971 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
973 if ((GET_CODE (x
) == CONST_INT
974 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
975 || (GET_MODE_CLASS (mode
) == MODE_INT
976 && GET_MODE_CLASS (oldmode
) == MODE_INT
977 && (GET_CODE (x
) == CONST_DOUBLE
978 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
979 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
980 && direct_load
[(int) mode
])
982 && (! HARD_REGISTER_P (x
)
983 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
984 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
985 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
987 /* ?? If we don't know OLDMODE, we have to assume here that
988 X does not need sign- or zero-extension. This may not be
989 the case, but it's the best we can do. */
990 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
991 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
993 HOST_WIDE_INT val
= INTVAL (x
);
994 int width
= GET_MODE_BITSIZE (oldmode
);
996 /* We must sign or zero-extend in this case. Start by
997 zero-extending, then sign extend if we need to. */
998 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1000 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1001 val
|= (HOST_WIDE_INT
) (-1) << width
;
1003 return gen_int_mode (val
, mode
);
1006 return gen_lowpart (mode
, x
);
1009 /* Converting from integer constant into mode is always equivalent to an
1010 subreg operation. */
1011 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
1013 if (GET_MODE_BITSIZE (mode
) != GET_MODE_BITSIZE (oldmode
))
1015 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
1018 temp
= gen_reg_rtx (mode
);
1019 convert_move (temp
, x
, unsignedp
);
1023 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1024 store efficiently. Due to internal GCC limitations, this is
1025 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1026 for an immediate constant. */
1028 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1030 /* Determine whether the LEN bytes can be moved by using several move
1031 instructions. Return nonzero if a call to move_by_pieces should
1035 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
1036 unsigned int align ATTRIBUTE_UNUSED
)
1038 return MOVE_BY_PIECES_P (len
, align
);
1041 /* Generate several move instructions to copy LEN bytes from block FROM to
1042 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1043 and TO through protect_from_queue before calling.
1045 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1046 used to push FROM to the stack.
1048 ALIGN is maximum stack alignment we can assume.
1050 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1051 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1055 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1056 unsigned int align
, int endp
)
1058 struct move_by_pieces data
;
1059 rtx to_addr
, from_addr
= XEXP (from
, 0);
1060 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1061 enum machine_mode mode
= VOIDmode
, tmode
;
1062 enum insn_code icode
;
1064 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
1067 data
.from_addr
= from_addr
;
1070 to_addr
= XEXP (to
, 0);
1073 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1074 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1076 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1083 #ifdef STACK_GROWS_DOWNWARD
1089 data
.to_addr
= to_addr
;
1092 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1093 || GET_CODE (from_addr
) == POST_INC
1094 || GET_CODE (from_addr
) == POST_DEC
);
1096 data
.explicit_inc_from
= 0;
1097 data
.explicit_inc_to
= 0;
1098 if (data
.reverse
) data
.offset
= len
;
1101 /* If copying requires more than two move insns,
1102 copy addresses to registers (to make displacements shorter)
1103 and use post-increment if available. */
1104 if (!(data
.autinc_from
&& data
.autinc_to
)
1105 && move_by_pieces_ninsns (len
, align
) > 2)
1107 /* Find the mode of the largest move... */
1108 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1109 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1110 if (GET_MODE_SIZE (tmode
) < max_size
)
1113 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1115 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1116 data
.autinc_from
= 1;
1117 data
.explicit_inc_from
= -1;
1119 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1121 data
.from_addr
= copy_addr_to_reg (from_addr
);
1122 data
.autinc_from
= 1;
1123 data
.explicit_inc_from
= 1;
1125 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1126 data
.from_addr
= copy_addr_to_reg (from_addr
);
1127 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1129 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1131 data
.explicit_inc_to
= -1;
1133 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1135 data
.to_addr
= copy_addr_to_reg (to_addr
);
1137 data
.explicit_inc_to
= 1;
1139 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1140 data
.to_addr
= copy_addr_to_reg (to_addr
);
1143 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1144 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1145 align
= MOVE_MAX
* BITS_PER_UNIT
;
1147 /* First move what we can in the largest integer mode, then go to
1148 successively smaller modes. */
1150 while (max_size
> 1)
1152 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1153 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1154 if (GET_MODE_SIZE (tmode
) < max_size
)
1157 if (mode
== VOIDmode
)
1160 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1161 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1162 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1164 max_size
= GET_MODE_SIZE (mode
);
1167 /* The code above should have handled everything. */
1181 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1182 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1184 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1187 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1194 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1202 /* Return number of insns required to move L bytes by pieces.
1203 ALIGN (in bits) is maximum alignment we can assume. */
1205 static unsigned HOST_WIDE_INT
1206 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
)
1208 unsigned HOST_WIDE_INT n_insns
= 0;
1209 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1211 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1212 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1213 align
= MOVE_MAX
* BITS_PER_UNIT
;
1215 while (max_size
> 1)
1217 enum machine_mode mode
= VOIDmode
, tmode
;
1218 enum insn_code icode
;
1220 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1221 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1222 if (GET_MODE_SIZE (tmode
) < max_size
)
1225 if (mode
== VOIDmode
)
1228 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1229 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1230 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1232 max_size
= GET_MODE_SIZE (mode
);
1240 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1241 with move instructions for mode MODE. GENFUN is the gen_... function
1242 to make a move insn for that mode. DATA has all the other info. */
1245 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1246 struct move_by_pieces
*data
)
1248 unsigned int size
= GET_MODE_SIZE (mode
);
1249 rtx to1
= NULL_RTX
, from1
;
1251 while (data
->len
>= size
)
1254 data
->offset
-= size
;
1258 if (data
->autinc_to
)
1259 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1262 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1265 if (data
->autinc_from
)
1266 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1269 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1271 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1272 emit_insn (gen_add2_insn (data
->to_addr
,
1273 GEN_INT (-(HOST_WIDE_INT
)size
)));
1274 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1275 emit_insn (gen_add2_insn (data
->from_addr
,
1276 GEN_INT (-(HOST_WIDE_INT
)size
)));
1279 emit_insn ((*genfun
) (to1
, from1
));
1282 #ifdef PUSH_ROUNDING
1283 emit_single_push_insn (mode
, from1
, NULL
);
1289 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1290 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1291 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1292 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1294 if (! data
->reverse
)
1295 data
->offset
+= size
;
1301 /* Emit code to move a block Y to a block X. This may be done with
1302 string-move instructions, with multiple scalar move instructions,
1303 or with a library call.
1305 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1306 SIZE is an rtx that says how long they are.
1307 ALIGN is the maximum alignment we can assume they have.
1308 METHOD describes what kind of copy this is, and what mechanisms may be used.
1310 Return the address of the new block, if memcpy is called and returns it,
1314 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1322 case BLOCK_OP_NORMAL
:
1323 may_use_call
= true;
1326 case BLOCK_OP_CALL_PARM
:
1327 may_use_call
= block_move_libcall_safe_for_call_parm ();
1329 /* Make inhibit_defer_pop nonzero around the library call
1330 to force it to pop the arguments right away. */
1334 case BLOCK_OP_NO_LIBCALL
:
1335 may_use_call
= false;
1342 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1344 x
= protect_from_queue (x
, 1);
1345 y
= protect_from_queue (y
, 0);
1346 size
= protect_from_queue (size
, 0);
1355 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1356 block copy is more efficient for other large modes, e.g. DCmode. */
1357 x
= adjust_address (x
, BLKmode
, 0);
1358 y
= adjust_address (y
, BLKmode
, 0);
1360 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1361 can be incorrect is coming from __builtin_memcpy. */
1362 if (GET_CODE (size
) == CONST_INT
)
1364 if (INTVAL (size
) == 0)
1367 x
= shallow_copy_rtx (x
);
1368 y
= shallow_copy_rtx (y
);
1369 set_mem_size (x
, size
);
1370 set_mem_size (y
, size
);
1373 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1374 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1375 else if (emit_block_move_via_movstr (x
, y
, size
, align
))
1377 else if (may_use_call
)
1378 retval
= emit_block_move_via_libcall (x
, y
, size
);
1380 emit_block_move_via_loop (x
, y
, size
, align
);
1382 if (method
== BLOCK_OP_CALL_PARM
)
1388 /* A subroutine of emit_block_move. Returns true if calling the
1389 block move libcall will not clobber any parameters which may have
1390 already been placed on the stack. */
1393 block_move_libcall_safe_for_call_parm (void)
1395 /* If arguments are pushed on the stack, then they're safe. */
1399 /* If registers go on the stack anyway, any argument is sure to clobber
1400 an outgoing argument. */
1401 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1403 tree fn
= emit_block_move_libcall_fn (false);
1405 if (REG_PARM_STACK_SPACE (fn
) != 0)
1410 /* If any argument goes in memory, then it might clobber an outgoing
1413 CUMULATIVE_ARGS args_so_far
;
1416 fn
= emit_block_move_libcall_fn (false);
1417 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1419 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1420 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1422 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1423 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1424 if (!tmp
|| !REG_P (tmp
))
1426 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1427 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
,
1431 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1437 /* A subroutine of emit_block_move. Expand a movstr pattern;
1438 return true if successful. */
1441 emit_block_move_via_movstr (rtx x
, rtx y
, rtx size
, unsigned int align
)
1443 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1444 int save_volatile_ok
= volatile_ok
;
1445 enum machine_mode mode
;
1447 /* Since this is a move insn, we don't care about volatility. */
1450 /* Try the most limited insn first, because there's no point
1451 including more than one in the machine description unless
1452 the more limited one has some advantage. */
1454 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1455 mode
= GET_MODE_WIDER_MODE (mode
))
1457 enum insn_code code
= movstr_optab
[(int) mode
];
1458 insn_operand_predicate_fn pred
;
1460 if (code
!= CODE_FOR_nothing
1461 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1462 here because if SIZE is less than the mode mask, as it is
1463 returned by the macro, it will definitely be less than the
1464 actual mode mask. */
1465 && ((GET_CODE (size
) == CONST_INT
1466 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1467 <= (GET_MODE_MASK (mode
) >> 1)))
1468 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1469 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1470 || (*pred
) (x
, BLKmode
))
1471 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1472 || (*pred
) (y
, BLKmode
))
1473 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1474 || (*pred
) (opalign
, VOIDmode
)))
1477 rtx last
= get_last_insn ();
1480 op2
= convert_to_mode (mode
, size
, 1);
1481 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1482 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1483 op2
= copy_to_mode_reg (mode
, op2
);
1485 /* ??? When called via emit_block_move_for_call, it'd be
1486 nice if there were some way to inform the backend, so
1487 that it doesn't fail the expansion because it thinks
1488 emitting the libcall would be more efficient. */
1490 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1494 volatile_ok
= save_volatile_ok
;
1498 delete_insns_since (last
);
1502 volatile_ok
= save_volatile_ok
;
1506 /* A subroutine of emit_block_move. Expand a call to memcpy.
1507 Return the return value from memcpy, 0 otherwise. */
1510 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
)
1512 rtx dst_addr
, src_addr
;
1513 tree call_expr
, arg_list
, fn
, src_tree
, dst_tree
, size_tree
;
1514 enum machine_mode size_mode
;
1517 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1519 It is unsafe to save the value generated by protect_from_queue and reuse
1520 it later. Consider what happens if emit_queue is called before the
1521 return value from protect_from_queue is used.
1523 Expansion of the CALL_EXPR below will call emit_queue before we are
1524 finished emitting RTL for argument setup. So if we are not careful we
1525 could get the wrong value for an argument.
1527 To avoid this problem we go ahead and emit code to copy the addresses of
1528 DST and SRC and SIZE into new pseudos.
1530 Note this is not strictly needed for library calls since they do not call
1531 emit_queue before loading their arguments. However, we may need to have
1532 library calls call emit_queue in the future since failing to do so could
1533 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1534 arguments in registers. */
1536 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1537 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1539 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1540 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1542 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1543 src_tree
= make_tree (ptr_type_node
, src_addr
);
1545 size_mode
= TYPE_MODE (sizetype
);
1547 size
= convert_to_mode (size_mode
, size
, 1);
1548 size
= copy_to_mode_reg (size_mode
, size
);
1550 /* It is incorrect to use the libcall calling conventions to call
1551 memcpy in this context. This could be a user call to memcpy and
1552 the user may wish to examine the return value from memcpy. For
1553 targets where libcalls and normal calls have different conventions
1554 for returning pointers, we could end up generating incorrect code. */
1556 size_tree
= make_tree (sizetype
, size
);
1558 fn
= emit_block_move_libcall_fn (true);
1559 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
1560 arg_list
= tree_cons (NULL_TREE
, src_tree
, arg_list
);
1561 arg_list
= tree_cons (NULL_TREE
, dst_tree
, arg_list
);
1563 /* Now we have to build up the CALL_EXPR itself. */
1564 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1565 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1566 call_expr
, arg_list
, NULL_TREE
);
1568 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1570 /* If we are initializing a readonly value, show the above call clobbered
1571 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1572 the delay slot scheduler might overlook conflicts and take nasty
1574 if (RTX_UNCHANGING_P (dst
))
1575 add_function_usage_to
1576 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode
,
1577 gen_rtx_CLOBBER (VOIDmode
, dst
),
1583 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1584 for the function we use for block copies. The first time FOR_CALL
1585 is true, we call assemble_external. */
1587 static GTY(()) tree block_move_fn
;
1590 init_block_move_fn (const char *asmspec
)
1596 fn
= get_identifier ("memcpy");
1597 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1598 const_ptr_type_node
, sizetype
,
1601 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1602 DECL_EXTERNAL (fn
) = 1;
1603 TREE_PUBLIC (fn
) = 1;
1604 DECL_ARTIFICIAL (fn
) = 1;
1605 TREE_NOTHROW (fn
) = 1;
1612 SET_DECL_RTL (block_move_fn
, NULL_RTX
);
1613 SET_DECL_ASSEMBLER_NAME (block_move_fn
, get_identifier (asmspec
));
1618 emit_block_move_libcall_fn (int for_call
)
1620 static bool emitted_extern
;
1623 init_block_move_fn (NULL
);
1625 if (for_call
&& !emitted_extern
)
1627 emitted_extern
= true;
1628 make_decl_rtl (block_move_fn
, NULL
);
1629 assemble_external (block_move_fn
);
1632 return block_move_fn
;
1635 /* A subroutine of emit_block_move. Copy the data via an explicit
1636 loop. This is used only when libcalls are forbidden. */
1637 /* ??? It'd be nice to copy in hunks larger than QImode. */
1640 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1641 unsigned int align ATTRIBUTE_UNUSED
)
1643 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1644 enum machine_mode iter_mode
;
1646 iter_mode
= GET_MODE (size
);
1647 if (iter_mode
== VOIDmode
)
1648 iter_mode
= word_mode
;
1650 top_label
= gen_label_rtx ();
1651 cmp_label
= gen_label_rtx ();
1652 iter
= gen_reg_rtx (iter_mode
);
1654 emit_move_insn (iter
, const0_rtx
);
1656 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1657 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1658 do_pending_stack_adjust ();
1660 emit_jump (cmp_label
);
1661 emit_label (top_label
);
1663 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1664 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1665 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1666 x
= change_address (x
, QImode
, x_addr
);
1667 y
= change_address (y
, QImode
, y_addr
);
1669 emit_move_insn (x
, y
);
1671 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1672 true, OPTAB_LIB_WIDEN
);
1674 emit_move_insn (iter
, tmp
);
1676 emit_label (cmp_label
);
1678 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1682 /* Copy all or part of a value X into registers starting at REGNO.
1683 The number of registers to be filled is NREGS. */
1686 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1689 #ifdef HAVE_load_multiple
1697 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1698 x
= validize_mem (force_const_mem (mode
, x
));
1700 /* See if the machine can do this with a load multiple insn. */
1701 #ifdef HAVE_load_multiple
1702 if (HAVE_load_multiple
)
1704 last
= get_last_insn ();
1705 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1713 delete_insns_since (last
);
1717 for (i
= 0; i
< nregs
; i
++)
1718 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1719 operand_subword_force (x
, i
, mode
));
1722 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1723 The number of registers to be filled is NREGS. */
1726 move_block_from_reg (int regno
, rtx x
, int nregs
)
1733 /* See if the machine can do this with a store multiple insn. */
1734 #ifdef HAVE_store_multiple
1735 if (HAVE_store_multiple
)
1737 rtx last
= get_last_insn ();
1738 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1746 delete_insns_since (last
);
1750 for (i
= 0; i
< nregs
; i
++)
1752 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1757 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1761 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1762 ORIG, where ORIG is a non-consecutive group of registers represented by
1763 a PARALLEL. The clone is identical to the original except in that the
1764 original set of registers is replaced by a new set of pseudo registers.
1765 The new set has the same modes as the original set. */
1768 gen_group_rtx (rtx orig
)
1773 if (GET_CODE (orig
) != PARALLEL
)
1776 length
= XVECLEN (orig
, 0);
1777 tmps
= alloca (sizeof (rtx
) * length
);
1779 /* Skip a NULL entry in first slot. */
1780 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1785 for (; i
< length
; i
++)
1787 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1788 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1790 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1793 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1796 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1797 where DST is non-consecutive registers represented by a PARALLEL.
1798 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1802 emit_group_load (rtx dst
, rtx orig_src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1807 if (GET_CODE (dst
) != PARALLEL
)
1810 /* Check for a NULL entry, used to indicate that the parameter goes
1811 both on the stack and in registers. */
1812 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1817 tmps
= alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1819 /* Process the pieces. */
1820 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1822 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1823 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1824 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1827 /* Handle trailing fragments that run over the size of the struct. */
1828 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1830 /* Arrange to shift the fragment to where it belongs.
1831 extract_bit_field loads to the lsb of the reg. */
1833 #ifdef BLOCK_REG_PADDING
1834 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1835 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1840 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1841 bytelen
= ssize
- bytepos
;
1846 /* If we won't be loading directly from memory, protect the real source
1847 from strange tricks we might play; but make sure that the source can
1848 be loaded directly into the destination. */
1850 if (!MEM_P (orig_src
)
1851 && (!CONSTANT_P (orig_src
)
1852 || (GET_MODE (orig_src
) != mode
1853 && GET_MODE (orig_src
) != VOIDmode
)))
1855 if (GET_MODE (orig_src
) == VOIDmode
)
1856 src
= gen_reg_rtx (mode
);
1858 src
= gen_reg_rtx (GET_MODE (orig_src
));
1860 emit_move_insn (src
, orig_src
);
1863 /* Optimize the access just a bit. */
1865 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1866 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1867 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1868 && bytelen
== GET_MODE_SIZE (mode
))
1870 tmps
[i
] = gen_reg_rtx (mode
);
1871 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1873 else if (GET_CODE (src
) == CONCAT
)
1875 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1876 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1878 if ((bytepos
== 0 && bytelen
== slen0
)
1879 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1881 /* The following assumes that the concatenated objects all
1882 have the same size. In this case, a simple calculation
1883 can be used to determine the object and the bit field
1885 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1886 if (! CONSTANT_P (tmps
[i
])
1887 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1888 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1889 (bytepos
% slen0
) * BITS_PER_UNIT
,
1890 1, NULL_RTX
, mode
, mode
, ssize
);
1892 else if (bytepos
== 0)
1894 rtx mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1895 emit_move_insn (mem
, src
);
1896 tmps
[i
] = adjust_address (mem
, mode
, 0);
1901 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1902 SIMD register, which is currently broken. While we get GCC
1903 to emit proper RTL for these cases, let's dump to memory. */
1904 else if (VECTOR_MODE_P (GET_MODE (dst
))
1907 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1910 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1911 emit_move_insn (mem
, src
);
1912 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1914 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1915 && XVECLEN (dst
, 0) > 1)
1916 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1917 else if (CONSTANT_P (src
)
1918 || (REG_P (src
) && GET_MODE (src
) == mode
))
1921 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1922 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1926 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1927 tmps
[i
], 0, OPTAB_WIDEN
);
1932 /* Copy the extracted pieces into the proper (probable) hard regs. */
1933 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1934 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1937 /* Emit code to move a block SRC to block DST, where SRC and DST are
1938 non-consecutive groups of registers, each represented by a PARALLEL. */
1941 emit_group_move (rtx dst
, rtx src
)
1945 if (GET_CODE (src
) != PARALLEL
1946 || GET_CODE (dst
) != PARALLEL
1947 || XVECLEN (src
, 0) != XVECLEN (dst
, 0))
1950 /* Skip first entry if NULL. */
1951 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1952 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1953 XEXP (XVECEXP (src
, 0, i
), 0));
1956 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1957 where SRC is non-consecutive registers represented by a PARALLEL.
1958 SSIZE represents the total size of block ORIG_DST, or -1 if not
1962 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1967 if (GET_CODE (src
) != PARALLEL
)
1970 /* Check for a NULL entry, used to indicate that the parameter goes
1971 both on the stack and in registers. */
1972 if (XEXP (XVECEXP (src
, 0, 0), 0))
1977 tmps
= alloca (sizeof (rtx
) * XVECLEN (src
, 0));
1979 /* Copy the (probable) hard regs into pseudos. */
1980 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1982 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1983 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1984 emit_move_insn (tmps
[i
], reg
);
1988 /* If we won't be storing directly into memory, protect the real destination
1989 from strange tricks we might play. */
1991 if (GET_CODE (dst
) == PARALLEL
)
1995 /* We can get a PARALLEL dst if there is a conditional expression in
1996 a return statement. In that case, the dst and src are the same,
1997 so no action is necessary. */
1998 if (rtx_equal_p (dst
, src
))
2001 /* It is unclear if we can ever reach here, but we may as well handle
2002 it. Allocate a temporary, and split this into a store/load to/from
2005 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2006 emit_group_store (temp
, src
, type
, ssize
);
2007 emit_group_load (dst
, temp
, type
, ssize
);
2010 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2012 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2013 /* Make life a bit easier for combine. */
2014 emit_move_insn (dst
, CONST0_RTX (GET_MODE (orig_dst
)));
2017 /* Process the pieces. */
2018 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2020 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2021 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2022 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2025 /* Handle trailing fragments that run over the size of the struct. */
2026 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2028 /* store_bit_field always takes its value from the lsb.
2029 Move the fragment to the lsb if it's not already there. */
2031 #ifdef BLOCK_REG_PADDING
2032 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2033 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2039 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2040 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2041 tmps
[i
], 0, OPTAB_WIDEN
);
2043 bytelen
= ssize
- bytepos
;
2046 if (GET_CODE (dst
) == CONCAT
)
2048 if (bytepos
+ bytelen
<= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2049 dest
= XEXP (dst
, 0);
2050 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2052 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2053 dest
= XEXP (dst
, 1);
2055 else if (bytepos
== 0 && XVECLEN (src
, 0))
2057 dest
= assign_stack_temp (GET_MODE (dest
),
2058 GET_MODE_SIZE (GET_MODE (dest
)), 0);
2059 emit_move_insn (adjust_address (dest
, GET_MODE (tmps
[i
]), bytepos
),
2068 /* Optimize the access just a bit. */
2070 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2071 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2072 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2073 && bytelen
== GET_MODE_SIZE (mode
))
2074 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2076 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2077 mode
, tmps
[i
], ssize
);
2082 /* Copy from the pseudo into the (probable) hard reg. */
2083 if (orig_dst
!= dst
)
2084 emit_move_insn (orig_dst
, dst
);
2087 /* Generate code to copy a BLKmode object of TYPE out of a
2088 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2089 is null, a stack temporary is created. TGTBLK is returned.
2091 The purpose of this routine is to handle functions that return
2092 BLKmode structures in registers. Some machines (the PA for example)
2093 want to return all small structures in registers regardless of the
2094 structure's alignment. */
2097 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2099 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2100 rtx src
= NULL
, dst
= NULL
;
2101 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2102 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2106 tgtblk
= assign_temp (build_qualified_type (type
,
2108 | TYPE_QUAL_CONST
)),
2110 preserve_temp_slots (tgtblk
);
2113 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2114 into a new pseudo which is a full word. */
2116 if (GET_MODE (srcreg
) != BLKmode
2117 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2118 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2120 /* If the structure doesn't take up a whole number of words, see whether
2121 SRCREG is padded on the left or on the right. If it's on the left,
2122 set PADDING_CORRECTION to the number of bits to skip.
2124 In most ABIs, the structure will be returned at the least end of
2125 the register, which translates to right padding on little-endian
2126 targets and left padding on big-endian targets. The opposite
2127 holds if the structure is returned at the most significant
2128 end of the register. */
2129 if (bytes
% UNITS_PER_WORD
!= 0
2130 && (targetm
.calls
.return_in_msb (type
)
2132 : BYTES_BIG_ENDIAN
))
2134 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2136 /* Copy the structure BITSIZE bites at a time.
2138 We could probably emit more efficient code for machines which do not use
2139 strict alignment, but it doesn't seem worth the effort at the current
2141 for (bitpos
= 0, xbitpos
= padding_correction
;
2142 bitpos
< bytes
* BITS_PER_UNIT
;
2143 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2145 /* We need a new source operand each time xbitpos is on a
2146 word boundary and when xbitpos == padding_correction
2147 (the first time through). */
2148 if (xbitpos
% BITS_PER_WORD
== 0
2149 || xbitpos
== padding_correction
)
2150 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2153 /* We need a new destination operand each time bitpos is on
2155 if (bitpos
% BITS_PER_WORD
== 0)
2156 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2158 /* Use xbitpos for the source extraction (right justified) and
2159 xbitpos for the destination store (left justified). */
2160 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2161 extract_bit_field (src
, bitsize
,
2162 xbitpos
% BITS_PER_WORD
, 1,
2163 NULL_RTX
, word_mode
, word_mode
,
2171 /* Add a USE expression for REG to the (possibly empty) list pointed
2172 to by CALL_FUSAGE. REG must denote a hard register. */
2175 use_reg (rtx
*call_fusage
, rtx reg
)
2178 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2182 = gen_rtx_EXPR_LIST (VOIDmode
,
2183 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2190 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2194 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2197 for (i
= 0; i
< nregs
; i
++)
2198 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2201 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2202 PARALLEL REGS. This is for calls that pass values in multiple
2203 non-contiguous locations. The Irix 6 ABI has examples of this. */
2206 use_group_regs (rtx
*call_fusage
, rtx regs
)
2210 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2212 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
2217 if (reg
!= 0 && REG_P (reg
))
2218 use_reg (call_fusage
, reg
);
2223 /* Determine whether the LEN bytes generated by CONSTFUN can be
2224 stored to memory using several move instructions. CONSTFUNDATA is
2225 a pointer which will be passed as argument in every CONSTFUN call.
2226 ALIGN is maximum alignment we can assume. Return nonzero if a
2227 call to store_by_pieces should succeed. */
2230 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2231 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2232 void *constfundata
, unsigned int align
)
2234 unsigned HOST_WIDE_INT max_size
, l
;
2235 HOST_WIDE_INT offset
= 0;
2236 enum machine_mode mode
, tmode
;
2237 enum insn_code icode
;
2244 if (! STORE_BY_PIECES_P (len
, align
))
2247 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2248 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2249 align
= MOVE_MAX
* BITS_PER_UNIT
;
2251 /* We would first store what we can in the largest integer mode, then go to
2252 successively smaller modes. */
2255 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2260 max_size
= STORE_MAX_PIECES
+ 1;
2261 while (max_size
> 1)
2263 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2264 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2265 if (GET_MODE_SIZE (tmode
) < max_size
)
2268 if (mode
== VOIDmode
)
2271 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2272 if (icode
!= CODE_FOR_nothing
2273 && align
>= GET_MODE_ALIGNMENT (mode
))
2275 unsigned int size
= GET_MODE_SIZE (mode
);
2282 cst
= (*constfun
) (constfundata
, offset
, mode
);
2283 if (!LEGITIMATE_CONSTANT_P (cst
))
2293 max_size
= GET_MODE_SIZE (mode
);
2296 /* The code above should have handled everything. */
2304 /* Generate several move instructions to store LEN bytes generated by
2305 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2306 pointer which will be passed as argument in every CONSTFUN call.
2307 ALIGN is maximum alignment we can assume.
2308 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2309 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2313 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2314 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2315 void *constfundata
, unsigned int align
, int endp
)
2317 struct store_by_pieces data
;
2326 if (! STORE_BY_PIECES_P (len
, align
))
2328 to
= protect_from_queue (to
, 1);
2329 data
.constfun
= constfun
;
2330 data
.constfundata
= constfundata
;
2333 store_by_pieces_1 (&data
, align
);
2344 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2345 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2347 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2350 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2357 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2365 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2366 rtx with BLKmode). The caller must pass TO through protect_from_queue
2367 before calling. ALIGN is maximum alignment we can assume. */
2370 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2372 struct store_by_pieces data
;
2377 data
.constfun
= clear_by_pieces_1
;
2378 data
.constfundata
= NULL
;
2381 store_by_pieces_1 (&data
, align
);
2384 /* Callback routine for clear_by_pieces.
2385 Return const0_rtx unconditionally. */
2388 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2389 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2390 enum machine_mode mode ATTRIBUTE_UNUSED
)
2395 /* Subroutine of clear_by_pieces and store_by_pieces.
2396 Generate several move instructions to store LEN bytes of block TO. (A MEM
2397 rtx with BLKmode). The caller must pass TO through protect_from_queue
2398 before calling. ALIGN is maximum alignment we can assume. */
2401 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2402 unsigned int align ATTRIBUTE_UNUSED
)
2404 rtx to_addr
= XEXP (data
->to
, 0);
2405 unsigned HOST_WIDE_INT max_size
= STORE_MAX_PIECES
+ 1;
2406 enum machine_mode mode
= VOIDmode
, tmode
;
2407 enum insn_code icode
;
2410 data
->to_addr
= to_addr
;
2412 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2413 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2415 data
->explicit_inc_to
= 0;
2417 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2419 data
->offset
= data
->len
;
2421 /* If storing requires more than two move insns,
2422 copy addresses to registers (to make displacements shorter)
2423 and use post-increment if available. */
2424 if (!data
->autinc_to
2425 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2427 /* Determine the main mode we'll be using. */
2428 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2429 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2430 if (GET_MODE_SIZE (tmode
) < max_size
)
2433 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2435 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2436 data
->autinc_to
= 1;
2437 data
->explicit_inc_to
= -1;
2440 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2441 && ! data
->autinc_to
)
2443 data
->to_addr
= copy_addr_to_reg (to_addr
);
2444 data
->autinc_to
= 1;
2445 data
->explicit_inc_to
= 1;
2448 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2449 data
->to_addr
= copy_addr_to_reg (to_addr
);
2452 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2453 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2454 align
= MOVE_MAX
* BITS_PER_UNIT
;
2456 /* First store what we can in the largest integer mode, then go to
2457 successively smaller modes. */
2459 while (max_size
> 1)
2461 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2462 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2463 if (GET_MODE_SIZE (tmode
) < max_size
)
2466 if (mode
== VOIDmode
)
2469 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2470 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2471 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2473 max_size
= GET_MODE_SIZE (mode
);
2476 /* The code above should have handled everything. */
2481 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2482 with move instructions for mode MODE. GENFUN is the gen_... function
2483 to make a move insn for that mode. DATA has all the other info. */
2486 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2487 struct store_by_pieces
*data
)
2489 unsigned int size
= GET_MODE_SIZE (mode
);
2492 while (data
->len
>= size
)
2495 data
->offset
-= size
;
2497 if (data
->autinc_to
)
2498 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2501 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2503 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2504 emit_insn (gen_add2_insn (data
->to_addr
,
2505 GEN_INT (-(HOST_WIDE_INT
) size
)));
2507 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2508 emit_insn ((*genfun
) (to1
, cst
));
2510 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2511 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2513 if (! data
->reverse
)
2514 data
->offset
+= size
;
2520 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2521 its length in bytes. */
2524 clear_storage (rtx object
, rtx size
)
2527 unsigned int align
= (MEM_P (object
) ? MEM_ALIGN (object
)
2528 : GET_MODE_ALIGNMENT (GET_MODE (object
)));
2530 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2531 just move a zero. Otherwise, do this a piece at a time. */
2532 if (GET_MODE (object
) != BLKmode
2533 && GET_CODE (size
) == CONST_INT
2534 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (object
)))
2535 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2538 object
= protect_from_queue (object
, 1);
2539 size
= protect_from_queue (size
, 0);
2541 if (size
== const0_rtx
)
2543 else if (GET_CODE (size
) == CONST_INT
2544 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2545 clear_by_pieces (object
, INTVAL (size
), align
);
2546 else if (clear_storage_via_clrstr (object
, size
, align
))
2549 retval
= clear_storage_via_libcall (object
, size
);
2555 /* A subroutine of clear_storage. Expand a clrstr pattern;
2556 return true if successful. */
2559 clear_storage_via_clrstr (rtx object
, rtx size
, unsigned int align
)
2561 /* Try the most limited insn first, because there's no point
2562 including more than one in the machine description unless
2563 the more limited one has some advantage. */
2565 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2566 enum machine_mode mode
;
2568 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2569 mode
= GET_MODE_WIDER_MODE (mode
))
2571 enum insn_code code
= clrstr_optab
[(int) mode
];
2572 insn_operand_predicate_fn pred
;
2574 if (code
!= CODE_FOR_nothing
2575 /* We don't need MODE to be narrower than
2576 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2577 the mode mask, as it is returned by the macro, it will
2578 definitely be less than the actual mode mask. */
2579 && ((GET_CODE (size
) == CONST_INT
2580 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2581 <= (GET_MODE_MASK (mode
) >> 1)))
2582 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2583 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2584 || (*pred
) (object
, BLKmode
))
2585 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2586 || (*pred
) (opalign
, VOIDmode
)))
2589 rtx last
= get_last_insn ();
2592 op1
= convert_to_mode (mode
, size
, 1);
2593 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2594 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2595 op1
= copy_to_mode_reg (mode
, op1
);
2597 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2604 delete_insns_since (last
);
2611 /* A subroutine of clear_storage. Expand a call to memset.
2612 Return the return value of memset, 0 otherwise. */
2615 clear_storage_via_libcall (rtx object
, rtx size
)
2617 tree call_expr
, arg_list
, fn
, object_tree
, size_tree
;
2618 enum machine_mode size_mode
;
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos.
2634 Note this is not strictly needed for library calls since they
2635 do not call emit_queue before loading their arguments. However,
2636 we may need to have library calls call emit_queue in the future
2637 since failing to do so could cause problems for targets which
2638 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2640 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2642 size_mode
= TYPE_MODE (sizetype
);
2643 size
= convert_to_mode (size_mode
, size
, 1);
2644 size
= copy_to_mode_reg (size_mode
, size
);
2646 /* It is incorrect to use the libcall calling conventions to call
2647 memset in this context. This could be a user call to memset and
2648 the user may wish to examine the return value from memset. For
2649 targets where libcalls and normal calls have different conventions
2650 for returning pointers, we could end up generating incorrect code. */
2652 object_tree
= make_tree (ptr_type_node
, object
);
2653 size_tree
= make_tree (sizetype
, size
);
2655 fn
= clear_storage_libcall_fn (true);
2656 arg_list
= tree_cons (NULL_TREE
, size_tree
, NULL_TREE
);
2657 arg_list
= tree_cons (NULL_TREE
, integer_zero_node
, arg_list
);
2658 arg_list
= tree_cons (NULL_TREE
, object_tree
, arg_list
);
2660 /* Now we have to build up the CALL_EXPR itself. */
2661 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2662 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2663 call_expr
, arg_list
, NULL_TREE
);
2665 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2667 /* If we are initializing a readonly value, show the above call
2668 clobbered it. Otherwise, a load from it may erroneously be
2669 hoisted from a loop. */
2670 if (RTX_UNCHANGING_P (object
))
2671 emit_insn (gen_rtx_CLOBBER (VOIDmode
, object
));
2676 /* A subroutine of clear_storage_via_libcall. Create the tree node
2677 for the function we use for block clears. The first time FOR_CALL
2678 is true, we call assemble_external. */
2680 static GTY(()) tree block_clear_fn
;
2683 init_block_clear_fn (const char *asmspec
)
2685 if (!block_clear_fn
)
2689 fn
= get_identifier ("memset");
2690 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2691 integer_type_node
, sizetype
,
2694 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2695 DECL_EXTERNAL (fn
) = 1;
2696 TREE_PUBLIC (fn
) = 1;
2697 DECL_ARTIFICIAL (fn
) = 1;
2698 TREE_NOTHROW (fn
) = 1;
2700 block_clear_fn
= fn
;
2705 SET_DECL_RTL (block_clear_fn
, NULL_RTX
);
2706 SET_DECL_ASSEMBLER_NAME (block_clear_fn
, get_identifier (asmspec
));
2711 clear_storage_libcall_fn (int for_call
)
2713 static bool emitted_extern
;
2715 if (!block_clear_fn
)
2716 init_block_clear_fn (NULL
);
2718 if (for_call
&& !emitted_extern
)
2720 emitted_extern
= true;
2721 make_decl_rtl (block_clear_fn
, NULL
);
2722 assemble_external (block_clear_fn
);
2725 return block_clear_fn
;
2728 /* Generate code to copy Y into X.
2729 Both Y and X must have the same mode, except that
2730 Y can be a constant with VOIDmode.
2731 This mode cannot be BLKmode; use emit_block_move for that.
2733 Return the last instruction emitted. */
2736 emit_move_insn (rtx x
, rtx y
)
2738 enum machine_mode mode
= GET_MODE (x
);
2739 rtx y_cst
= NULL_RTX
;
2742 x
= protect_from_queue (x
, 1);
2743 y
= protect_from_queue (y
, 0);
2745 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2751 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2752 && (last_insn
= compress_float_constant (x
, y
)))
2757 if (!LEGITIMATE_CONSTANT_P (y
))
2759 y
= force_const_mem (mode
, y
);
2761 /* If the target's cannot_force_const_mem prevented the spill,
2762 assume that the target's move expanders will also take care
2763 of the non-legitimate constant. */
2769 /* If X or Y are memory references, verify that their addresses are valid
2772 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2773 && ! push_operand (x
, GET_MODE (x
)))
2775 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2776 x
= validize_mem (x
);
2779 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2781 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2782 y
= validize_mem (y
);
2784 if (mode
== BLKmode
)
2787 last_insn
= emit_move_insn_1 (x
, y
);
2789 if (y_cst
&& REG_P (x
)
2790 && (set
= single_set (last_insn
)) != NULL_RTX
2791 && SET_DEST (set
) == x
2792 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
2793 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
2798 /* Low level part of emit_move_insn.
2799 Called just like emit_move_insn, but assumes X and Y
2800 are basically valid. */
2803 emit_move_insn_1 (rtx x
, rtx y
)
2805 enum machine_mode mode
= GET_MODE (x
);
2806 enum machine_mode submode
;
2807 enum mode_class
class = GET_MODE_CLASS (mode
);
2809 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2812 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2814 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2816 /* Expand complex moves by moving real part and imag part, if possible. */
2817 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2818 && BLKmode
!= (submode
= GET_MODE_INNER (mode
))
2819 && (mov_optab
->handlers
[(int) submode
].insn_code
2820 != CODE_FOR_nothing
))
2822 /* Don't split destination if it is a stack push. */
2823 int stack
= push_operand (x
, GET_MODE (x
));
2825 #ifdef PUSH_ROUNDING
2826 /* In case we output to the stack, but the size is smaller than the
2827 machine can push exactly, we need to use move instructions. */
2829 && (PUSH_ROUNDING (GET_MODE_SIZE (submode
))
2830 != GET_MODE_SIZE (submode
)))
2833 HOST_WIDE_INT offset1
, offset2
;
2835 /* Do not use anti_adjust_stack, since we don't want to update
2836 stack_pointer_delta. */
2837 temp
= expand_binop (Pmode
,
2838 #ifdef STACK_GROWS_DOWNWARD
2846 (GET_MODE_SIZE (GET_MODE (x
)))),
2847 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
2849 if (temp
!= stack_pointer_rtx
)
2850 emit_move_insn (stack_pointer_rtx
, temp
);
2852 #ifdef STACK_GROWS_DOWNWARD
2854 offset2
= GET_MODE_SIZE (submode
);
2856 offset1
= -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)));
2857 offset2
= (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x
)))
2858 + GET_MODE_SIZE (submode
));
2861 emit_move_insn (change_address (x
, submode
,
2862 gen_rtx_PLUS (Pmode
,
2864 GEN_INT (offset1
))),
2865 gen_realpart (submode
, y
));
2866 emit_move_insn (change_address (x
, submode
,
2867 gen_rtx_PLUS (Pmode
,
2869 GEN_INT (offset2
))),
2870 gen_imagpart (submode
, y
));
2874 /* If this is a stack, push the highpart first, so it
2875 will be in the argument order.
2877 In that case, change_address is used only to convert
2878 the mode, not to change the address. */
2881 /* Note that the real part always precedes the imag part in memory
2882 regardless of machine's endianness. */
2883 #ifdef STACK_GROWS_DOWNWARD
2884 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2885 gen_imagpart (submode
, y
));
2886 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2887 gen_realpart (submode
, y
));
2889 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2890 gen_realpart (submode
, y
));
2891 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2892 gen_imagpart (submode
, y
));
2897 rtx realpart_x
, realpart_y
;
2898 rtx imagpart_x
, imagpart_y
;
2900 /* If this is a complex value with each part being smaller than a
2901 word, the usual calling sequence will likely pack the pieces into
2902 a single register. Unfortunately, SUBREG of hard registers only
2903 deals in terms of words, so we have a problem converting input
2904 arguments to the CONCAT of two registers that is used elsewhere
2905 for complex values. If this is before reload, we can copy it into
2906 memory and reload. FIXME, we should see about using extract and
2907 insert on integer registers, but complex short and complex char
2908 variables should be rarely used. */
2909 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2910 && (reload_in_progress
| reload_completed
) == 0)
2913 = (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2915 = (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2917 if (packed_dest_p
|| packed_src_p
)
2919 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2920 ? MODE_FLOAT
: MODE_INT
);
2922 enum machine_mode reg_mode
2923 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2925 if (reg_mode
!= BLKmode
)
2927 rtx mem
= assign_stack_temp (reg_mode
,
2928 GET_MODE_SIZE (mode
), 0);
2929 rtx cmem
= adjust_address (mem
, mode
, 0);
2933 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2935 emit_move_insn_1 (cmem
, y
);
2936 return emit_move_insn_1 (sreg
, mem
);
2940 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2942 emit_move_insn_1 (mem
, sreg
);
2943 return emit_move_insn_1 (x
, cmem
);
2949 realpart_x
= gen_realpart (submode
, x
);
2950 realpart_y
= gen_realpart (submode
, y
);
2951 imagpart_x
= gen_imagpart (submode
, x
);
2952 imagpart_y
= gen_imagpart (submode
, y
);
2954 /* Show the output dies here. This is necessary for SUBREGs
2955 of pseudos since we cannot track their lifetimes correctly;
2956 hard regs shouldn't appear here except as return values.
2957 We never want to emit such a clobber after reload. */
2959 && ! (reload_in_progress
|| reload_completed
)
2960 && (GET_CODE (realpart_x
) == SUBREG
2961 || GET_CODE (imagpart_x
) == SUBREG
))
2962 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2964 emit_move_insn (realpart_x
, realpart_y
);
2965 emit_move_insn (imagpart_x
, imagpart_y
);
2968 return get_last_insn ();
2971 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2972 find a mode to do it in. If we have a movcc, use it. Otherwise,
2973 find the MODE_INT mode of the same width. */
2974 else if (GET_MODE_CLASS (mode
) == MODE_CC
2975 && mov_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2977 enum insn_code insn_code
;
2978 enum machine_mode tmode
= VOIDmode
;
2982 && mov_optab
->handlers
[(int) CCmode
].insn_code
!= CODE_FOR_nothing
)
2985 for (tmode
= QImode
; tmode
!= VOIDmode
;
2986 tmode
= GET_MODE_WIDER_MODE (tmode
))
2987 if (GET_MODE_SIZE (tmode
) == GET_MODE_SIZE (mode
))
2990 if (tmode
== VOIDmode
)
2993 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2994 may call change_address which is not appropriate if we were
2995 called when a reload was in progress. We don't have to worry
2996 about changing the address since the size in bytes is supposed to
2997 be the same. Copy the MEM to change the mode and move any
2998 substitutions from the old MEM to the new one. */
3000 if (reload_in_progress
)
3002 x
= gen_lowpart_common (tmode
, x1
);
3003 if (x
== 0 && MEM_P (x1
))
3005 x
= adjust_address_nv (x1
, tmode
, 0);
3006 copy_replacements (x1
, x
);
3009 y
= gen_lowpart_common (tmode
, y1
);
3010 if (y
== 0 && MEM_P (y1
))
3012 y
= adjust_address_nv (y1
, tmode
, 0);
3013 copy_replacements (y1
, y
);
3018 x
= gen_lowpart (tmode
, x
);
3019 y
= gen_lowpart (tmode
, y
);
3022 insn_code
= mov_optab
->handlers
[(int) tmode
].insn_code
;
3023 return emit_insn (GEN_FCN (insn_code
) (x
, y
));
3026 /* Try using a move pattern for the corresponding integer mode. This is
3027 only safe when simplify_subreg can convert MODE constants into integer
3028 constants. At present, it can only do this reliably if the value
3029 fits within a HOST_WIDE_INT. */
3030 else if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3031 && (submode
= int_mode_for_mode (mode
)) != BLKmode
3032 && mov_optab
->handlers
[submode
].insn_code
!= CODE_FOR_nothing
)
3033 return emit_insn (GEN_FCN (mov_optab
->handlers
[submode
].insn_code
)
3034 (simplify_gen_subreg (submode
, x
, mode
, 0),
3035 simplify_gen_subreg (submode
, y
, mode
, 0)));
3037 /* This will handle any multi-word or full-word mode that lacks a move_insn
3038 pattern. However, you will get better code if you define such patterns,
3039 even if they must turn into multiple assembler instructions. */
3040 else if (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
3047 #ifdef PUSH_ROUNDING
3049 /* If X is a push on the stack, do the push now and replace
3050 X with a reference to the stack pointer. */
3051 if (push_operand (x
, GET_MODE (x
)))
3056 /* Do not use anti_adjust_stack, since we don't want to update
3057 stack_pointer_delta. */
3058 temp
= expand_binop (Pmode
,
3059 #ifdef STACK_GROWS_DOWNWARD
3067 (GET_MODE_SIZE (GET_MODE (x
)))),
3068 stack_pointer_rtx
, 0, OPTAB_LIB_WIDEN
);
3070 if (temp
!= stack_pointer_rtx
)
3071 emit_move_insn (stack_pointer_rtx
, temp
);
3073 code
= GET_CODE (XEXP (x
, 0));
3075 /* Just hope that small offsets off SP are OK. */
3076 if (code
== POST_INC
)
3077 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3078 GEN_INT (-((HOST_WIDE_INT
)
3079 GET_MODE_SIZE (GET_MODE (x
)))));
3080 else if (code
== POST_DEC
)
3081 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3082 GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
3084 temp
= stack_pointer_rtx
;
3086 x
= change_address (x
, VOIDmode
, temp
);
3090 /* If we are in reload, see if either operand is a MEM whose address
3091 is scheduled for replacement. */
3092 if (reload_in_progress
&& MEM_P (x
)
3093 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3094 x
= replace_equiv_address_nv (x
, inner
);
3095 if (reload_in_progress
&& MEM_P (y
)
3096 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3097 y
= replace_equiv_address_nv (y
, inner
);
3103 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3106 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3107 rtx ypart
= operand_subword (y
, i
, 1, mode
);
3109 /* If we can't get a part of Y, put Y into memory if it is a
3110 constant. Otherwise, force it into a register. If we still
3111 can't get a part of Y, abort. */
3112 if (ypart
== 0 && CONSTANT_P (y
))
3114 y
= force_const_mem (mode
, y
);
3115 ypart
= operand_subword (y
, i
, 1, mode
);
3117 else if (ypart
== 0)
3118 ypart
= operand_subword_force (y
, i
, mode
);
3120 if (xpart
== 0 || ypart
== 0)
3123 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3125 last_insn
= emit_move_insn (xpart
, ypart
);
3131 /* Show the output dies here. This is necessary for SUBREGs
3132 of pseudos since we cannot track their lifetimes correctly;
3133 hard regs shouldn't appear here except as return values.
3134 We never want to emit such a clobber after reload. */
3136 && ! (reload_in_progress
|| reload_completed
)
3137 && need_clobber
!= 0)
3138 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
3148 /* If Y is representable exactly in a narrower mode, and the target can
3149 perform the extension directly from constant or memory, then emit the
3150 move as an extension. */
3153 compress_float_constant (rtx x
, rtx y
)
3155 enum machine_mode dstmode
= GET_MODE (x
);
3156 enum machine_mode orig_srcmode
= GET_MODE (y
);
3157 enum machine_mode srcmode
;
3160 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3162 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3163 srcmode
!= orig_srcmode
;
3164 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3167 rtx trunc_y
, last_insn
;
3169 /* Skip if the target can't extend this way. */
3170 ic
= can_extend_p (dstmode
, srcmode
, 0);
3171 if (ic
== CODE_FOR_nothing
)
3174 /* Skip if the narrowed value isn't exact. */
3175 if (! exact_real_truncate (srcmode
, &r
))
3178 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3180 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3182 /* Skip if the target needs extra instructions to perform
3184 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3187 else if (float_extend_from_mem
[dstmode
][srcmode
])
3188 trunc_y
= validize_mem (force_const_mem (srcmode
, trunc_y
));
3192 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3193 last_insn
= get_last_insn ();
3196 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3204 /* Pushing data onto the stack. */
3206 /* Push a block of length SIZE (perhaps variable)
3207 and return an rtx to address the beginning of the block.
3208 Note that it is not possible for the value returned to be a QUEUED.
3209 The value may be virtual_outgoing_args_rtx.
3211 EXTRA is the number of bytes of padding to push in addition to SIZE.
3212 BELOW nonzero means this padding comes at low addresses;
3213 otherwise, the padding comes at high addresses. */
3216 push_block (rtx size
, int extra
, int below
)
3220 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3221 if (CONSTANT_P (size
))
3222 anti_adjust_stack (plus_constant (size
, extra
));
3223 else if (REG_P (size
) && extra
== 0)
3224 anti_adjust_stack (size
);
3227 temp
= copy_to_mode_reg (Pmode
, size
);
3229 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3230 temp
, 0, OPTAB_LIB_WIDEN
);
3231 anti_adjust_stack (temp
);
3234 #ifndef STACK_GROWS_DOWNWARD
3240 temp
= virtual_outgoing_args_rtx
;
3241 if (extra
!= 0 && below
)
3242 temp
= plus_constant (temp
, extra
);
3246 if (GET_CODE (size
) == CONST_INT
)
3247 temp
= plus_constant (virtual_outgoing_args_rtx
,
3248 -INTVAL (size
) - (below
? 0 : extra
));
3249 else if (extra
!= 0 && !below
)
3250 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3251 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3253 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3254 negate_rtx (Pmode
, size
));
3257 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3260 #ifdef PUSH_ROUNDING
3262 /* Emit single push insn. */
3265 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3268 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3270 enum insn_code icode
;
3271 insn_operand_predicate_fn pred
;
3273 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3274 /* If there is push pattern, use it. Otherwise try old way of throwing
3275 MEM representing push operation to move expander. */
3276 icode
= push_optab
->handlers
[(int) mode
].insn_code
;
3277 if (icode
!= CODE_FOR_nothing
)
3279 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3280 && !((*pred
) (x
, mode
))))
3281 x
= force_reg (mode
, x
);
3282 emit_insn (GEN_FCN (icode
) (x
));
3285 if (GET_MODE_SIZE (mode
) == rounded_size
)
3286 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3287 /* If we are to pad downward, adjust the stack pointer first and
3288 then store X into the stack location using an offset. This is
3289 because emit_move_insn does not know how to pad; it does not have
3291 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3293 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3294 HOST_WIDE_INT offset
;
3296 emit_move_insn (stack_pointer_rtx
,
3297 expand_binop (Pmode
,
3298 #ifdef STACK_GROWS_DOWNWARD
3304 GEN_INT (rounded_size
),
3305 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3307 offset
= (HOST_WIDE_INT
) padding_size
;
3308 #ifdef STACK_GROWS_DOWNWARD
3309 if (STACK_PUSH_CODE
== POST_DEC
)
3310 /* We have already decremented the stack pointer, so get the
3312 offset
+= (HOST_WIDE_INT
) rounded_size
;
3314 if (STACK_PUSH_CODE
== POST_INC
)
3315 /* We have already incremented the stack pointer, so get the
3317 offset
-= (HOST_WIDE_INT
) rounded_size
;
3319 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3323 #ifdef STACK_GROWS_DOWNWARD
3324 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3325 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3326 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3328 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3329 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3330 GEN_INT (rounded_size
));
3332 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3335 dest
= gen_rtx_MEM (mode
, dest_addr
);
3339 set_mem_attributes (dest
, type
, 1);
3341 if (flag_optimize_sibling_calls
)
3342 /* Function incoming arguments may overlap with sibling call
3343 outgoing arguments and we cannot allow reordering of reads
3344 from function arguments with stores to outgoing arguments
3345 of sibling calls. */
3346 set_mem_alias_set (dest
, 0);
3348 emit_move_insn (dest
, x
);
3352 /* Generate code to push X onto the stack, assuming it has mode MODE and
3354 MODE is redundant except when X is a CONST_INT (since they don't
3356 SIZE is an rtx for the size of data to be copied (in bytes),
3357 needed only if X is BLKmode.
3359 ALIGN (in bits) is maximum alignment we can assume.
3361 If PARTIAL and REG are both nonzero, then copy that many of the first
3362 words of X into registers starting with REG, and push the rest of X.
3363 The amount of space pushed is decreased by PARTIAL words,
3364 rounded *down* to a multiple of PARM_BOUNDARY.
3365 REG must be a hard register in this case.
3366 If REG is zero but PARTIAL is not, take any all others actions for an
3367 argument partially in registers, but do not actually load any
3370 EXTRA is the amount in bytes of extra space to leave next to this arg.
3371 This is ignored if an argument block has already been allocated.
3373 On a machine that lacks real push insns, ARGS_ADDR is the address of
3374 the bottom of the argument block for this call. We use indexing off there
3375 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3376 argument block has not been preallocated.
3378 ARGS_SO_FAR is the size of args previously pushed for this call.
3380 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3381 for arguments passed in registers. If nonzero, it will be the number
3382 of bytes required. */
3385 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3386 unsigned int align
, int partial
, rtx reg
, int extra
,
3387 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3391 enum direction stack_direction
3392 #ifdef STACK_GROWS_DOWNWARD
3398 /* Decide where to pad the argument: `downward' for below,
3399 `upward' for above, or `none' for don't pad it.
3400 Default is below for small data on big-endian machines; else above. */
3401 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3403 /* Invert direction if stack is post-decrement.
3405 if (STACK_PUSH_CODE
== POST_DEC
)
3406 if (where_pad
!= none
)
3407 where_pad
= (where_pad
== downward
? upward
: downward
);
3409 xinner
= x
= protect_from_queue (x
, 0);
3411 if (mode
== BLKmode
)
3413 /* Copy a block into the stack, entirely or partially. */
3416 int used
= partial
* UNITS_PER_WORD
;
3420 if (reg
&& GET_CODE (reg
) == PARALLEL
)
3422 /* Use the size of the elt to compute offset. */
3423 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
3424 used
= partial
* GET_MODE_SIZE (GET_MODE (elt
));
3425 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3428 offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3435 /* USED is now the # of bytes we need not copy to the stack
3436 because registers will take care of them. */
3439 xinner
= adjust_address (xinner
, BLKmode
, used
);
3441 /* If the partial register-part of the arg counts in its stack size,
3442 skip the part of stack space corresponding to the registers.
3443 Otherwise, start copying to the beginning of the stack space,
3444 by setting SKIP to 0. */
3445 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3447 #ifdef PUSH_ROUNDING
3448 /* Do it with several push insns if that doesn't take lots of insns
3449 and if there is no difficulty with push insns that skip bytes
3450 on the stack for alignment purposes. */
3453 && GET_CODE (size
) == CONST_INT
3455 && MEM_ALIGN (xinner
) >= align
3456 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3457 /* Here we avoid the case of a structure whose weak alignment
3458 forces many pushes of a small amount of data,
3459 and such small pushes do rounding that causes trouble. */
3460 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3461 || align
>= BIGGEST_ALIGNMENT
3462 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3463 == (align
/ BITS_PER_UNIT
)))
3464 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3466 /* Push padding now if padding above and stack grows down,
3467 or if padding below and stack grows up.
3468 But if space already allocated, this has already been done. */
3469 if (extra
&& args_addr
== 0
3470 && where_pad
!= none
&& where_pad
!= stack_direction
)
3471 anti_adjust_stack (GEN_INT (extra
));
3473 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3476 #endif /* PUSH_ROUNDING */
3480 /* Otherwise make space on the stack and copy the data
3481 to the address of that space. */
3483 /* Deduct words put into registers from the size we must copy. */
3486 if (GET_CODE (size
) == CONST_INT
)
3487 size
= GEN_INT (INTVAL (size
) - used
);
3489 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3490 GEN_INT (used
), NULL_RTX
, 0,
3494 /* Get the address of the stack space.
3495 In this case, we do not deal with EXTRA separately.
3496 A single stack adjust will do. */
3499 temp
= push_block (size
, extra
, where_pad
== downward
);
3502 else if (GET_CODE (args_so_far
) == CONST_INT
)
3503 temp
= memory_address (BLKmode
,
3504 plus_constant (args_addr
,
3505 skip
+ INTVAL (args_so_far
)));
3507 temp
= memory_address (BLKmode
,
3508 plus_constant (gen_rtx_PLUS (Pmode
,
3513 if (!ACCUMULATE_OUTGOING_ARGS
)
3515 /* If the source is referenced relative to the stack pointer,
3516 copy it to another register to stabilize it. We do not need
3517 to do this if we know that we won't be changing sp. */
3519 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3520 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3521 temp
= copy_to_reg (temp
);
3524 target
= gen_rtx_MEM (BLKmode
, temp
);
3528 set_mem_attributes (target
, type
, 1);
3529 /* Function incoming arguments may overlap with sibling call
3530 outgoing arguments and we cannot allow reordering of reads
3531 from function arguments with stores to outgoing arguments
3532 of sibling calls. */
3533 set_mem_alias_set (target
, 0);
3536 /* ALIGN may well be better aligned than TYPE, e.g. due to
3537 PARM_BOUNDARY. Assume the caller isn't lying. */
3538 set_mem_align (target
, align
);
3540 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3543 else if (partial
> 0)
3545 /* Scalar partly in registers. */
3547 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3550 /* # words of start of argument
3551 that we must make space for but need not store. */
3552 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3553 int args_offset
= INTVAL (args_so_far
);
3556 /* Push padding now if padding above and stack grows down,
3557 or if padding below and stack grows up.
3558 But if space already allocated, this has already been done. */
3559 if (extra
&& args_addr
== 0
3560 && where_pad
!= none
&& where_pad
!= stack_direction
)
3561 anti_adjust_stack (GEN_INT (extra
));
3563 /* If we make space by pushing it, we might as well push
3564 the real data. Otherwise, we can leave OFFSET nonzero
3565 and leave the space uninitialized. */
3569 /* Now NOT_STACK gets the number of words that we don't need to
3570 allocate on the stack. */
3571 not_stack
= partial
- offset
;
3573 /* If the partial register-part of the arg counts in its stack size,
3574 skip the part of stack space corresponding to the registers.
3575 Otherwise, start copying to the beginning of the stack space,
3576 by setting SKIP to 0. */
3577 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3579 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3580 x
= validize_mem (force_const_mem (mode
, x
));
3582 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3583 SUBREGs of such registers are not allowed. */
3584 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3585 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3586 x
= copy_to_reg (x
);
3588 /* Loop over all the words allocated on the stack for this arg. */
3589 /* We can do it by words, because any scalar bigger than a word
3590 has a size a multiple of a word. */
3591 #ifndef PUSH_ARGS_REVERSED
3592 for (i
= not_stack
; i
< size
; i
++)
3594 for (i
= size
- 1; i
>= not_stack
; i
--)
3596 if (i
>= not_stack
+ offset
)
3597 emit_push_insn (operand_subword_force (x
, i
, mode
),
3598 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3600 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3602 reg_parm_stack_space
, alignment_pad
);
3609 /* Push padding now if padding above and stack grows down,
3610 or if padding below and stack grows up.
3611 But if space already allocated, this has already been done. */
3612 if (extra
&& args_addr
== 0
3613 && where_pad
!= none
&& where_pad
!= stack_direction
)
3614 anti_adjust_stack (GEN_INT (extra
));
3616 #ifdef PUSH_ROUNDING
3617 if (args_addr
== 0 && PUSH_ARGS
)
3618 emit_single_push_insn (mode
, x
, type
);
3622 if (GET_CODE (args_so_far
) == CONST_INT
)
3624 = memory_address (mode
,
3625 plus_constant (args_addr
,
3626 INTVAL (args_so_far
)));
3628 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3630 dest
= gen_rtx_MEM (mode
, addr
);
3633 set_mem_attributes (dest
, type
, 1);
3634 /* Function incoming arguments may overlap with sibling call
3635 outgoing arguments and we cannot allow reordering of reads
3636 from function arguments with stores to outgoing arguments
3637 of sibling calls. */
3638 set_mem_alias_set (dest
, 0);
3641 emit_move_insn (dest
, x
);
3645 /* If part should go in registers, copy that part
3646 into the appropriate registers. Do this now, at the end,
3647 since mem-to-mem copies above may do function calls. */
3648 if (partial
> 0 && reg
!= 0)
3650 /* Handle calls that pass values in multiple non-contiguous locations.
3651 The Irix 6 ABI has examples of this. */
3652 if (GET_CODE (reg
) == PARALLEL
)
3653 emit_group_load (reg
, x
, type
, -1);
3655 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3658 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3659 anti_adjust_stack (GEN_INT (extra
));
3661 if (alignment_pad
&& args_addr
== 0)
3662 anti_adjust_stack (alignment_pad
);
3665 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3669 get_subtarget (rtx x
)
3672 /* Only registers can be subtargets. */
3674 /* If the register is readonly, it can't be set more than once. */
3675 || RTX_UNCHANGING_P (x
)
3676 /* Don't use hard regs to avoid extending their life. */
3677 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3678 /* Avoid subtargets inside loops,
3679 since they hide some invariant expressions. */
3680 || preserve_subexpressions_p ())
3684 /* Expand an assignment that stores the value of FROM into TO.
3685 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3686 (This may contain a QUEUED rtx;
3687 if the value is constant, this rtx is a constant.)
3688 Otherwise, the returned value is NULL_RTX. */
3691 expand_assignment (tree to
, tree from
, int want_value
)
3696 /* Don't crash if the lhs of the assignment was erroneous. */
3698 if (TREE_CODE (to
) == ERROR_MARK
)
3700 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3701 return want_value
? result
: NULL_RTX
;
3704 /* Assignment of a structure component needs special treatment
3705 if the structure component's rtx is not simply a MEM.
3706 Assignment of an array element at a constant index, and assignment of
3707 an array element in an unaligned packed structure field, has the same
3710 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3711 || TREE_CODE (to
) == ARRAY_REF
|| TREE_CODE (to
) == ARRAY_RANGE_REF
3712 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
3714 enum machine_mode mode1
;
3715 HOST_WIDE_INT bitsize
, bitpos
;
3723 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3724 &unsignedp
, &volatilep
);
3726 /* If we are going to use store_bit_field and extract_bit_field,
3727 make sure to_rtx will be safe for multiple use. */
3729 if (mode1
== VOIDmode
&& want_value
)
3730 tem
= stabilize_reference (tem
);
3732 orig_to_rtx
= to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
3736 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3738 if (!MEM_P (to_rtx
))
3741 #ifdef POINTERS_EXTEND_UNSIGNED
3742 if (GET_MODE (offset_rtx
) != Pmode
)
3743 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
3745 if (GET_MODE (offset_rtx
) != ptr_mode
)
3746 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3749 /* A constant address in TO_RTX can have VOIDmode, we must not try
3750 to call force_reg for that case. Avoid that case. */
3752 && GET_MODE (to_rtx
) == BLKmode
3753 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3755 && (bitpos
% bitsize
) == 0
3756 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3757 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
3759 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
3763 to_rtx
= offset_address (to_rtx
, offset_rtx
,
3764 highest_pow2_factor_for_target (to
,
3770 /* If the field is at offset zero, we could have been given the
3771 DECL_RTX of the parent struct. Don't munge it. */
3772 to_rtx
= shallow_copy_rtx (to_rtx
);
3774 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
3777 /* Deal with volatile and readonly fields. The former is only done
3778 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3779 if (volatilep
&& MEM_P (to_rtx
))
3781 if (to_rtx
== orig_to_rtx
)
3782 to_rtx
= copy_rtx (to_rtx
);
3783 MEM_VOLATILE_P (to_rtx
) = 1;
3786 if (TREE_CODE (to
) == COMPONENT_REF
3787 && TREE_READONLY (TREE_OPERAND (to
, 1))
3788 /* We can't assert that a MEM won't be set more than once
3789 if the component is not addressable because another
3790 non-addressable component may be referenced by the same MEM. */
3791 && ! (MEM_P (to_rtx
) && ! can_address_p (to
)))
3793 if (to_rtx
== orig_to_rtx
)
3794 to_rtx
= copy_rtx (to_rtx
);
3795 RTX_UNCHANGING_P (to_rtx
) = 1;
3798 if (MEM_P (to_rtx
) && ! can_address_p (to
))
3800 if (to_rtx
== orig_to_rtx
)
3801 to_rtx
= copy_rtx (to_rtx
);
3802 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
3805 while (mode1
== VOIDmode
&& !want_value
3806 && bitpos
+ bitsize
<= BITS_PER_WORD
3807 && bitsize
< BITS_PER_WORD
3808 && GET_MODE_BITSIZE (GET_MODE (to_rtx
)) <= BITS_PER_WORD
3809 && !TREE_SIDE_EFFECTS (to
)
3810 && !TREE_THIS_VOLATILE (to
))
3814 HOST_WIDE_INT count
= bitpos
;
3819 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
3820 || TREE_CODE_CLASS (TREE_CODE (src
)) != '2')
3823 op0
= TREE_OPERAND (src
, 0);
3824 op1
= TREE_OPERAND (src
, 1);
3827 if (! operand_equal_p (to
, op0
, 0))
3830 if (BYTES_BIG_ENDIAN
)
3831 count
= GET_MODE_BITSIZE (GET_MODE (to_rtx
)) - bitpos
- bitsize
;
3833 /* Special case some bitfield op= exp. */
3834 switch (TREE_CODE (src
))
3841 /* For now, just optimize the case of the topmost bitfield
3842 where we don't need to do any masking and also
3843 1 bit bitfields where xor can be used.
3844 We might win by one instruction for the other bitfields
3845 too if insv/extv instructions aren't used, so that
3846 can be added later. */
3847 if (count
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (to_rtx
))
3848 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
3850 value
= expand_expr (op1
, NULL_RTX
, VOIDmode
, 0);
3851 value
= protect_from_queue (value
, 0);
3852 to_rtx
= protect_from_queue (to_rtx
, 1);
3853 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
3855 && count
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (to_rtx
)))
3857 value
= expand_and (GET_MODE (to_rtx
), value
, const1_rtx
,
3861 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (to_rtx
),
3862 value
, build_int_2 (count
, 0),
3864 result
= expand_binop (GET_MODE (to_rtx
), binop
, to_rtx
,
3865 value
, to_rtx
, 1, OPTAB_WIDEN
);
3866 if (result
!= to_rtx
)
3867 emit_move_insn (to_rtx
, result
);
3878 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3880 /* Spurious cast for HPUX compiler. */
3881 ? ((enum machine_mode
)
3882 TYPE_MODE (TREE_TYPE (to
)))
3884 unsignedp
, TREE_TYPE (tem
), get_alias_set (to
));
3886 preserve_temp_slots (result
);
3890 /* If the value is meaningful, convert RESULT to the proper mode.
3891 Otherwise, return nothing. */
3892 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3893 TYPE_MODE (TREE_TYPE (from
)),
3895 TYPE_UNSIGNED (TREE_TYPE (to
)))
3899 /* If the rhs is a function call and its value is not an aggregate,
3900 call the function before we start to compute the lhs.
3901 This is needed for correct code for cases such as
3902 val = setjmp (buf) on machines where reference to val
3903 requires loading up part of an address in a separate insn.
3905 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3906 since it might be a promoted variable where the zero- or sign- extension
3907 needs to be done. Handling this in the normal way is safe because no
3908 computation is done before the call. */
3909 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
3910 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3911 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3912 && REG_P (DECL_RTL (to
))))
3917 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3919 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3921 /* Handle calls that return values in multiple non-contiguous locations.
3922 The Irix 6 ABI has examples of this. */
3923 if (GET_CODE (to_rtx
) == PARALLEL
)
3924 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
3925 int_size_in_bytes (TREE_TYPE (from
)));
3926 else if (GET_MODE (to_rtx
) == BLKmode
)
3927 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
3930 if (POINTER_TYPE_P (TREE_TYPE (to
)))
3931 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3932 emit_move_insn (to_rtx
, value
);
3934 preserve_temp_slots (to_rtx
);
3937 return want_value
? to_rtx
: NULL_RTX
;
3940 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3941 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3944 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3946 /* Don't move directly into a return register. */
3947 if (TREE_CODE (to
) == RESULT_DECL
3948 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
3953 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3955 if (GET_CODE (to_rtx
) == PARALLEL
)
3956 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
3957 int_size_in_bytes (TREE_TYPE (from
)));
3959 emit_move_insn (to_rtx
, temp
);
3961 preserve_temp_slots (to_rtx
);
3964 return want_value
? to_rtx
: NULL_RTX
;
3967 /* In case we are returning the contents of an object which overlaps
3968 the place the value is being stored, use a safe function when copying
3969 a value through a pointer into a structure value return block. */
3970 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3971 && current_function_returns_struct
3972 && !current_function_returns_pcc_struct
)
3977 size
= expr_size (from
);
3978 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3980 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3981 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3982 XEXP (from_rtx
, 0), Pmode
,
3983 convert_to_mode (TYPE_MODE (sizetype
),
3984 size
, TYPE_UNSIGNED (sizetype
)),
3985 TYPE_MODE (sizetype
));
3987 preserve_temp_slots (to_rtx
);
3990 return want_value
? to_rtx
: NULL_RTX
;
3993 /* Compute FROM and store the value in the rtx we got. */
3996 result
= store_expr (from
, to_rtx
, want_value
);
3997 preserve_temp_slots (result
);
4000 return want_value
? result
: NULL_RTX
;
4003 /* Generate code for computing expression EXP,
4004 and storing the value into TARGET.
4005 TARGET may contain a QUEUED rtx.
4007 If WANT_VALUE & 1 is nonzero, return a copy of the value
4008 not in TARGET, so that we can be sure to use the proper
4009 value in a containing expression even if TARGET has something
4010 else stored in it. If possible, we copy the value through a pseudo
4011 and return that pseudo. Or, if the value is constant, we try to
4012 return the constant. In some cases, we return a pseudo
4013 copied *from* TARGET.
4015 If the mode is BLKmode then we may return TARGET itself.
4016 It turns out that in BLKmode it doesn't cause a problem.
4017 because C has no operators that could combine two different
4018 assignments into the same BLKmode object with different values
4019 with no sequence point. Will other languages need this to
4022 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4023 to catch quickly any cases where the caller uses the value
4024 and fails to set WANT_VALUE.
4026 If WANT_VALUE & 2 is set, this is a store into a call param on the
4027 stack, and block moves may need to be treated specially. */
4030 store_expr (tree exp
, rtx target
, int want_value
)
4033 rtx alt_rtl
= NULL_RTX
;
4034 rtx mark
= mark_queue ();
4035 int dont_return_target
= 0;
4036 int dont_store_target
= 0;
4038 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4040 /* C++ can generate ?: expressions with a throw expression in one
4041 branch and an rvalue in the other. Here, we resolve attempts to
4042 store the throw expression's nonexistent result. */
4045 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
4048 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4050 /* Perform first part of compound expression, then assign from second
4052 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4053 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4055 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
4057 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4059 /* For conditional expression, get safe form of the target. Then
4060 test the condition, doing the appropriate assignment on either
4061 side. This avoids the creation of unnecessary temporaries.
4062 For non-BLKmode, it is more efficient not to do this. */
4064 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4067 target
= protect_from_queue (target
, 1);
4069 do_pending_stack_adjust ();
4071 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4072 start_cleanup_deferral ();
4073 store_expr (TREE_OPERAND (exp
, 1), target
, want_value
& 2);
4074 end_cleanup_deferral ();
4076 emit_jump_insn (gen_jump (lab2
));
4079 start_cleanup_deferral ();
4080 store_expr (TREE_OPERAND (exp
, 2), target
, want_value
& 2);
4081 end_cleanup_deferral ();
4086 return want_value
& 1 ? target
: NULL_RTX
;
4088 else if (queued_subexp_p (target
))
4089 /* If target contains a postincrement, let's not risk
4090 using it as the place to generate the rhs. */
4092 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
4094 /* Expand EXP into a new pseudo. */
4095 temp
= gen_reg_rtx (GET_MODE (target
));
4096 temp
= expand_expr (exp
, temp
, GET_MODE (target
),
4098 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4101 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
),
4103 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4105 /* If target is volatile, ANSI requires accessing the value
4106 *from* the target, if it is accessed. So make that happen.
4107 In no case return the target itself. */
4108 if (! MEM_VOLATILE_P (target
) && (want_value
& 1) != 0)
4109 dont_return_target
= 1;
4111 else if ((want_value
& 1) != 0
4113 && ! MEM_VOLATILE_P (target
)
4114 && GET_MODE (target
) != BLKmode
)
4115 /* If target is in memory and caller wants value in a register instead,
4116 arrange that. Pass TARGET as target for expand_expr so that,
4117 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4118 We know expand_expr will not use the target in that case.
4119 Don't do this if TARGET is volatile because we are supposed
4120 to write it and then read it. */
4122 temp
= expand_expr (exp
, target
, GET_MODE (target
),
4123 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4124 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
4126 /* If TEMP is already in the desired TARGET, only copy it from
4127 memory and don't store it there again. */
4129 || (rtx_equal_p (temp
, target
)
4130 && ! side_effects_p (temp
) && ! side_effects_p (target
)))
4131 dont_store_target
= 1;
4132 temp
= copy_to_reg (temp
);
4134 dont_return_target
= 1;
4136 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4137 /* If this is a scalar in a register that is stored in a wider mode
4138 than the declared mode, compute the result into its declared mode
4139 and then convert to the wider mode. Our value is the computed
4142 rtx inner_target
= 0;
4144 /* If we don't want a value, we can do the conversion inside EXP,
4145 which will often result in some optimizations. Do the conversion
4146 in two steps: first change the signedness, if needed, then
4147 the extend. But don't do this if the type of EXP is a subtype
4148 of something else since then the conversion might involve
4149 more than just converting modes. */
4150 if ((want_value
& 1) == 0
4151 && INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4152 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4154 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4155 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4157 (lang_hooks
.types
.signed_or_unsigned_type
4158 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)), exp
);
4160 exp
= convert (lang_hooks
.types
.type_for_mode
4161 (GET_MODE (SUBREG_REG (target
)),
4162 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4165 inner_target
= SUBREG_REG (target
);
4168 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4169 want_value
& 2 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4171 /* If TEMP is a MEM and we want a result value, make the access
4172 now so it gets done only once. Strictly speaking, this is
4173 only necessary if the MEM is volatile, or if the address
4174 overlaps TARGET. But not performing the load twice also
4175 reduces the amount of rtl we generate and then have to CSE. */
4176 if (MEM_P (temp
) && (want_value
& 1) != 0)
4177 temp
= copy_to_reg (temp
);
4179 /* If TEMP is a VOIDmode constant, use convert_modes to make
4180 sure that we properly convert it. */
4181 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4183 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4184 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4185 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4186 GET_MODE (target
), temp
,
4187 SUBREG_PROMOTED_UNSIGNED_P (target
));
4190 convert_move (SUBREG_REG (target
), temp
,
4191 SUBREG_PROMOTED_UNSIGNED_P (target
));
4193 /* If we promoted a constant, change the mode back down to match
4194 target. Otherwise, the caller might get confused by a result whose
4195 mode is larger than expected. */
4197 if ((want_value
& 1) != 0 && GET_MODE (temp
) != GET_MODE (target
))
4199 if (GET_MODE (temp
) != VOIDmode
)
4201 temp
= gen_lowpart_SUBREG (GET_MODE (target
), temp
);
4202 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4203 SUBREG_PROMOTED_UNSIGNED_SET (temp
,
4204 SUBREG_PROMOTED_UNSIGNED_P (target
));
4207 temp
= convert_modes (GET_MODE (target
),
4208 GET_MODE (SUBREG_REG (target
)),
4209 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4212 return want_value
& 1 ? temp
: NULL_RTX
;
4216 temp
= expand_expr_real (exp
, target
, GET_MODE (target
),
4218 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4220 /* Return TARGET if it's a specified hardware register.
4221 If TARGET is a volatile mem ref, either return TARGET
4222 or return a reg copied *from* TARGET; ANSI requires this.
4224 Otherwise, if TEMP is not TARGET, return TEMP
4225 if it is constant (for efficiency),
4226 or if we really want the correct value. */
4227 if (!(target
&& REG_P (target
)
4228 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4229 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4230 && ! rtx_equal_p (temp
, target
)
4231 && (CONSTANT_P (temp
) || (want_value
& 1) != 0))
4232 dont_return_target
= 1;
4235 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4236 the same as that of TARGET, adjust the constant. This is needed, for
4237 example, in case it is a CONST_DOUBLE and we want only a word-sized
4239 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4240 && TREE_CODE (exp
) != ERROR_MARK
4241 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4242 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4243 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4245 /* If value was not generated in the target, store it there.
4246 Convert the value to TARGET's type first if necessary and emit the
4247 pending incrementations that have been queued when expanding EXP.
4248 Note that we cannot emit the whole queue blindly because this will
4249 effectively disable the POST_INC optimization later.
4251 If TEMP and TARGET compare equal according to rtx_equal_p, but
4252 one or both of them are volatile memory refs, we have to distinguish
4254 - expand_expr has used TARGET. In this case, we must not generate
4255 another copy. This can be detected by TARGET being equal according
4257 - expand_expr has not used TARGET - that means that the source just
4258 happens to have the same RTX form. Since temp will have been created
4259 by expand_expr, it will compare unequal according to == .
4260 We must generate a copy in this case, to reach the correct number
4261 of volatile memory references. */
4263 if ((! rtx_equal_p (temp
, target
)
4264 || (temp
!= target
&& (side_effects_p (temp
)
4265 || side_effects_p (target
))))
4266 && TREE_CODE (exp
) != ERROR_MARK
4267 && ! dont_store_target
4268 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4269 but TARGET is not valid memory reference, TEMP will differ
4270 from TARGET although it is really the same location. */
4271 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4272 /* If there's nothing to copy, don't bother. Don't call expr_size
4273 unless necessary, because some front-ends (C++) expr_size-hook
4274 aborts on objects that are not supposed to be bit-copied or
4276 && expr_size (exp
) != const0_rtx
)
4278 emit_insns_enqueued_after_mark (mark
);
4279 target
= protect_from_queue (target
, 1);
4280 temp
= protect_from_queue (temp
, 0);
4281 if (GET_MODE (temp
) != GET_MODE (target
)
4282 && GET_MODE (temp
) != VOIDmode
)
4284 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4285 if (dont_return_target
)
4287 /* In this case, we will return TEMP,
4288 so make sure it has the proper mode.
4289 But don't forget to store the value into TARGET. */
4290 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4291 emit_move_insn (target
, temp
);
4294 convert_move (target
, temp
, unsignedp
);
4297 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4299 /* Handle copying a string constant into an array. The string
4300 constant may be shorter than the array. So copy just the string's
4301 actual length, and clear the rest. First get the size of the data
4302 type of the string, which is actually the size of the target. */
4303 rtx size
= expr_size (exp
);
4305 if (GET_CODE (size
) == CONST_INT
4306 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4307 emit_block_move (target
, temp
, size
,
4309 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4312 /* Compute the size of the data to copy from the string. */
4314 = size_binop (MIN_EXPR
,
4315 make_tree (sizetype
, size
),
4316 size_int (TREE_STRING_LENGTH (exp
)));
4318 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4320 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4323 /* Copy that much. */
4324 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4325 TYPE_UNSIGNED (sizetype
));
4326 emit_block_move (target
, temp
, copy_size_rtx
,
4328 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4330 /* Figure out how much is left in TARGET that we have to clear.
4331 Do all calculations in ptr_mode. */
4332 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4334 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4335 target
= adjust_address (target
, BLKmode
,
4336 INTVAL (copy_size_rtx
));
4340 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4341 copy_size_rtx
, NULL_RTX
, 0,
4344 #ifdef POINTERS_EXTEND_UNSIGNED
4345 if (GET_MODE (copy_size_rtx
) != Pmode
)
4346 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4347 TYPE_UNSIGNED (sizetype
));
4350 target
= offset_address (target
, copy_size_rtx
,
4351 highest_pow2_factor (copy_size
));
4352 label
= gen_label_rtx ();
4353 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4354 GET_MODE (size
), 0, label
);
4357 if (size
!= const0_rtx
)
4358 clear_storage (target
, size
);
4364 /* Handle calls that return values in multiple non-contiguous locations.
4365 The Irix 6 ABI has examples of this. */
4366 else if (GET_CODE (target
) == PARALLEL
)
4367 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4368 int_size_in_bytes (TREE_TYPE (exp
)));
4369 else if (GET_MODE (temp
) == BLKmode
)
4370 emit_block_move (target
, temp
, expr_size (exp
),
4372 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4375 temp
= force_operand (temp
, target
);
4377 emit_move_insn (target
, temp
);
4381 /* If we don't want a value, return NULL_RTX. */
4382 if ((want_value
& 1) == 0)
4385 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4386 ??? The latter test doesn't seem to make sense. */
4387 else if (dont_return_target
&& !MEM_P (temp
))
4390 /* Return TARGET itself if it is a hard register. */
4391 else if ((want_value
& 1) != 0
4392 && GET_MODE (target
) != BLKmode
4393 && ! (REG_P (target
)
4394 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4395 return copy_to_reg (target
);
4401 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4402 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4403 are set to non-constant values and place it in *P_NC_ELTS. */
4406 categorize_ctor_elements_1 (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4407 HOST_WIDE_INT
*p_nc_elts
)
4409 HOST_WIDE_INT nz_elts
, nc_elts
;
4415 for (list
= CONSTRUCTOR_ELTS (ctor
); list
; list
= TREE_CHAIN (list
))
4417 tree value
= TREE_VALUE (list
);
4418 tree purpose
= TREE_PURPOSE (list
);
4422 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4424 tree lo_index
= TREE_OPERAND (purpose
, 0);
4425 tree hi_index
= TREE_OPERAND (purpose
, 1);
4427 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4428 mult
= (tree_low_cst (hi_index
, 1)
4429 - tree_low_cst (lo_index
, 1) + 1);
4432 switch (TREE_CODE (value
))
4436 HOST_WIDE_INT nz
= 0, nc
= 0;
4437 categorize_ctor_elements_1 (value
, &nz
, &nc
);
4438 nz_elts
+= mult
* nz
;
4439 nc_elts
+= mult
* nc
;
4445 if (!initializer_zerop (value
))
4449 if (!initializer_zerop (TREE_REALPART (value
)))
4451 if (!initializer_zerop (TREE_IMAGPART (value
)))
4457 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4458 if (!initializer_zerop (TREE_VALUE (v
)))
4465 if (!initializer_constant_valid_p (value
, TREE_TYPE (value
)))
4471 *p_nz_elts
+= nz_elts
;
4472 *p_nc_elts
+= nc_elts
;
4476 categorize_ctor_elements (tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4477 HOST_WIDE_INT
*p_nc_elts
)
4481 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_nc_elts
);
4484 /* Count the number of scalars in TYPE. Return -1 on overflow or
4488 count_type_elements (tree type
)
4490 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4491 switch (TREE_CODE (type
))
4495 tree telts
= array_type_nelts (type
);
4496 if (telts
&& host_integerp (telts
, 1))
4498 HOST_WIDE_INT n
= tree_low_cst (telts
, 1);
4499 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
));
4510 HOST_WIDE_INT n
= 0, t
;
4513 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4514 if (TREE_CODE (f
) == FIELD_DECL
)
4516 t
= count_type_elements (TREE_TYPE (f
));
4526 case QUAL_UNION_TYPE
:
4528 /* Ho hum. How in the world do we guess here? Clearly it isn't
4529 right to count the fields. Guess based on the number of words. */
4530 HOST_WIDE_INT n
= int_size_in_bytes (type
);
4533 return n
/ UNITS_PER_WORD
;
4540 /* ??? This is broke. We should encode the vector width in the tree. */
4541 return GET_MODE_NUNITS (TYPE_MODE (type
));
4550 case REFERENCE_TYPE
:
4564 /* Return 1 if EXP contains mostly (3/4) zeros. */
4567 mostly_zeros_p (tree exp
)
4569 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4572 HOST_WIDE_INT nz_elts
, nc_elts
, elts
;
4574 /* If there are no ranges of true bits, it is all zero. */
4575 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4576 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4578 categorize_ctor_elements (exp
, &nz_elts
, &nc_elts
);
4579 elts
= count_type_elements (TREE_TYPE (exp
));
4581 return nz_elts
< elts
/ 4;
4584 return initializer_zerop (exp
);
4587 /* Helper function for store_constructor.
4588 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4589 TYPE is the type of the CONSTRUCTOR, not the element type.
4590 CLEARED is as for store_constructor.
4591 ALIAS_SET is the alias set to use for any stores.
4593 This provides a recursive shortcut back to store_constructor when it isn't
4594 necessary to go through store_field. This is so that we can pass through
4595 the cleared field to let store_constructor know that we may not have to
4596 clear a substructure if the outer structure has already been cleared. */
4599 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
4600 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
4601 tree exp
, tree type
, int cleared
, int alias_set
)
4603 if (TREE_CODE (exp
) == CONSTRUCTOR
4604 /* We can only call store_constructor recursively if the size and
4605 bit position are on a byte boundary. */
4606 && bitpos
% BITS_PER_UNIT
== 0
4607 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
4608 /* If we have a nonzero bitpos for a register target, then we just
4609 let store_field do the bitfield handling. This is unlikely to
4610 generate unnecessary clear instructions anyways. */
4611 && (bitpos
== 0 || MEM_P (target
)))
4615 = adjust_address (target
,
4616 GET_MODE (target
) == BLKmode
4618 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4619 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
4622 /* Update the alias set, if required. */
4623 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
4624 && MEM_ALIAS_SET (target
) != 0)
4626 target
= copy_rtx (target
);
4627 set_mem_alias_set (target
, alias_set
);
4630 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
4633 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
4637 /* Store the value of constructor EXP into the rtx TARGET.
4638 TARGET is either a REG or a MEM; we know it cannot conflict, since
4639 safe_from_p has been called.
4640 CLEARED is true if TARGET is known to have been zero'd.
4641 SIZE is the number of bytes of TARGET we are allowed to modify: this
4642 may not be the same as the size of EXP if we are assigning to a field
4643 which has been packed to exclude padding bits. */
4646 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
4648 tree type
= TREE_TYPE (exp
);
4649 #ifdef WORD_REGISTER_OPERATIONS
4650 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4653 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4654 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4658 /* If size is zero or the target is already cleared, do nothing. */
4659 if (size
== 0 || cleared
)
4661 /* We either clear the aggregate or indicate the value is dead. */
4662 else if ((TREE_CODE (type
) == UNION_TYPE
4663 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4664 && ! CONSTRUCTOR_ELTS (exp
))
4665 /* If the constructor is empty, clear the union. */
4667 clear_storage (target
, expr_size (exp
));
4671 /* If we are building a static constructor into a register,
4672 set the initial value as zero so we can fold the value into
4673 a constant. But if more than one register is involved,
4674 this probably loses. */
4675 else if (REG_P (target
) && TREE_STATIC (exp
)
4676 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4678 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4682 /* If the constructor has fewer fields than the structure
4683 or if we are initializing the structure to mostly zeros,
4684 clear the whole structure first. Don't do this if TARGET is a
4685 register whose mode size isn't equal to SIZE since clear_storage
4686 can't handle this case. */
4688 && ((list_length (CONSTRUCTOR_ELTS (exp
)) != fields_length (type
))
4689 || mostly_zeros_p (exp
))
4691 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
4694 rtx xtarget
= target
;
4696 if (readonly_fields_p (type
))
4698 xtarget
= copy_rtx (xtarget
);
4699 RTX_UNCHANGING_P (xtarget
) = 1;
4702 clear_storage (xtarget
, GEN_INT (size
));
4707 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4709 /* Store each element of the constructor into
4710 the corresponding field of TARGET. */
4712 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4714 tree field
= TREE_PURPOSE (elt
);
4715 tree value
= TREE_VALUE (elt
);
4716 enum machine_mode mode
;
4717 HOST_WIDE_INT bitsize
;
4718 HOST_WIDE_INT bitpos
= 0;
4720 rtx to_rtx
= target
;
4722 /* Just ignore missing fields.
4723 We cleared the whole structure, above,
4724 if any fields are missing. */
4728 if (cleared
&& initializer_zerop (value
))
4731 if (host_integerp (DECL_SIZE (field
), 1))
4732 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4736 mode
= DECL_MODE (field
);
4737 if (DECL_BIT_FIELD (field
))
4740 offset
= DECL_FIELD_OFFSET (field
);
4741 if (host_integerp (offset
, 0)
4742 && host_integerp (bit_position (field
), 0))
4744 bitpos
= int_bit_position (field
);
4748 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4755 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
4756 make_tree (TREE_TYPE (exp
),
4759 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4760 if (!MEM_P (to_rtx
))
4763 #ifdef POINTERS_EXTEND_UNSIGNED
4764 if (GET_MODE (offset_rtx
) != Pmode
)
4765 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4767 if (GET_MODE (offset_rtx
) != ptr_mode
)
4768 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4771 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4772 highest_pow2_factor (offset
));
4775 if (TREE_READONLY (field
))
4778 to_rtx
= copy_rtx (to_rtx
);
4780 RTX_UNCHANGING_P (to_rtx
) = 1;
4783 #ifdef WORD_REGISTER_OPERATIONS
4784 /* If this initializes a field that is smaller than a word, at the
4785 start of a word, try to widen it to a full word.
4786 This special case allows us to output C++ member function
4787 initializations in a form that the optimizers can understand. */
4789 && bitsize
< BITS_PER_WORD
4790 && bitpos
% BITS_PER_WORD
== 0
4791 && GET_MODE_CLASS (mode
) == MODE_INT
4792 && TREE_CODE (value
) == INTEGER_CST
4794 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4796 tree type
= TREE_TYPE (value
);
4798 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4800 type
= lang_hooks
.types
.type_for_size
4801 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
4802 value
= convert (type
, value
);
4805 if (BYTES_BIG_ENDIAN
)
4807 = fold (build (LSHIFT_EXPR
, type
, value
,
4808 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4809 bitsize
= BITS_PER_WORD
;
4814 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
4815 && DECL_NONADDRESSABLE_P (field
))
4817 to_rtx
= copy_rtx (to_rtx
);
4818 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4821 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4822 value
, type
, cleared
,
4823 get_alias_set (TREE_TYPE (field
)));
4826 else if (TREE_CODE (type
) == ARRAY_TYPE
4827 || TREE_CODE (type
) == VECTOR_TYPE
)
4833 tree elttype
= TREE_TYPE (type
);
4835 HOST_WIDE_INT minelt
= 0;
4836 HOST_WIDE_INT maxelt
= 0;
4840 unsigned n_elts
= 0;
4842 if (TREE_CODE (type
) == ARRAY_TYPE
)
4843 domain
= TYPE_DOMAIN (type
);
4845 /* Vectors do not have domains; look up the domain of
4846 the array embedded in the debug representation type.
4847 FIXME Would probably be more efficient to treat vectors
4848 separately from arrays. */
4850 domain
= TYPE_DEBUG_REPRESENTATION_TYPE (type
);
4851 domain
= TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain
)));
4852 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
4854 enum machine_mode mode
= GET_MODE (target
);
4856 icode
= (int) vec_init_optab
->handlers
[mode
].insn_code
;
4857 if (icode
!= CODE_FOR_nothing
)
4861 elt_size
= GET_MODE_SIZE (GET_MODE_INNER (mode
));
4862 n_elts
= (GET_MODE_SIZE (mode
) / elt_size
);
4863 vector
= alloca (n_elts
);
4864 for (i
= 0; i
< n_elts
; i
++)
4865 vector
[i
] = CONST0_RTX (GET_MODE_INNER (mode
));
4870 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
4871 && TYPE_MAX_VALUE (domain
)
4872 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
4873 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4875 /* If we have constant bounds for the range of the type, get them. */
4878 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4879 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4882 /* If the constructor has fewer elements than the array,
4883 clear the whole array first. Similarly if this is
4884 static constructor of a non-BLKmode object. */
4885 if (cleared
|| (REG_P (target
) && TREE_STATIC (exp
)))
4889 HOST_WIDE_INT count
= 0, zero_count
= 0;
4890 need_to_clear
= ! const_bounds_p
;
4892 /* This loop is a more accurate version of the loop in
4893 mostly_zeros_p (it handles RANGE_EXPR in an index).
4894 It is also needed to check for missing elements. */
4895 for (elt
= CONSTRUCTOR_ELTS (exp
);
4896 elt
!= NULL_TREE
&& ! need_to_clear
;
4897 elt
= TREE_CHAIN (elt
))
4899 tree index
= TREE_PURPOSE (elt
);
4900 HOST_WIDE_INT this_node_count
;
4902 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4904 tree lo_index
= TREE_OPERAND (index
, 0);
4905 tree hi_index
= TREE_OPERAND (index
, 1);
4907 if (! host_integerp (lo_index
, 1)
4908 || ! host_integerp (hi_index
, 1))
4914 this_node_count
= (tree_low_cst (hi_index
, 1)
4915 - tree_low_cst (lo_index
, 1) + 1);
4918 this_node_count
= 1;
4920 count
+= this_node_count
;
4921 if (mostly_zeros_p (TREE_VALUE (elt
)))
4922 zero_count
+= this_node_count
;
4925 /* Clear the entire array first if there are any missing elements,
4926 or if the incidence of zero elements is >= 75%. */
4928 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4932 if (need_to_clear
&& size
> 0 && !vector
)
4937 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4939 clear_storage (target
, GEN_INT (size
));
4943 else if (REG_P (target
))
4944 /* Inform later passes that the old value is dead. */
4945 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4947 /* Store each element of the constructor into
4948 the corresponding element of TARGET, determined
4949 by counting the elements. */
4950 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4952 elt
= TREE_CHAIN (elt
), i
++)
4954 enum machine_mode mode
;
4955 HOST_WIDE_INT bitsize
;
4956 HOST_WIDE_INT bitpos
;
4958 tree value
= TREE_VALUE (elt
);
4959 tree index
= TREE_PURPOSE (elt
);
4960 rtx xtarget
= target
;
4962 if (cleared
&& initializer_zerop (value
))
4965 unsignedp
= TYPE_UNSIGNED (elttype
);
4966 mode
= TYPE_MODE (elttype
);
4967 if (mode
== BLKmode
)
4968 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4969 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4972 bitsize
= GET_MODE_BITSIZE (mode
);
4974 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4976 tree lo_index
= TREE_OPERAND (index
, 0);
4977 tree hi_index
= TREE_OPERAND (index
, 1);
4978 rtx index_r
, pos_rtx
;
4979 HOST_WIDE_INT lo
, hi
, count
;
4985 /* If the range is constant and "small", unroll the loop. */
4987 && host_integerp (lo_index
, 0)
4988 && host_integerp (hi_index
, 0)
4989 && (lo
= tree_low_cst (lo_index
, 0),
4990 hi
= tree_low_cst (hi_index
, 0),
4991 count
= hi
- lo
+ 1,
4994 || (host_integerp (TYPE_SIZE (elttype
), 1)
4995 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4998 lo
-= minelt
; hi
-= minelt
;
4999 for (; lo
<= hi
; lo
++)
5001 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5004 && !MEM_KEEP_ALIAS_SET_P (target
)
5005 && TREE_CODE (type
) == ARRAY_TYPE
5006 && TYPE_NONALIASED_COMPONENT (type
))
5008 target
= copy_rtx (target
);
5009 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5012 store_constructor_field
5013 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5014 get_alias_set (elttype
));
5019 rtx loop_start
= gen_label_rtx ();
5020 rtx loop_end
= gen_label_rtx ();
5023 expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
5024 unsignedp
= TYPE_UNSIGNED (domain
);
5026 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5029 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5031 SET_DECL_RTL (index
, index_r
);
5032 store_expr (lo_index
, index_r
, 0);
5034 /* Build the head of the loop. */
5035 do_pending_stack_adjust ();
5037 emit_label (loop_start
);
5039 /* Assign value to element index. */
5041 = convert (ssizetype
,
5042 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5043 index
, TYPE_MIN_VALUE (domain
))));
5044 position
= size_binop (MULT_EXPR
, position
,
5046 TYPE_SIZE_UNIT (elttype
)));
5048 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
5049 xtarget
= offset_address (target
, pos_rtx
,
5050 highest_pow2_factor (position
));
5051 xtarget
= adjust_address (xtarget
, mode
, 0);
5052 if (TREE_CODE (value
) == CONSTRUCTOR
)
5053 store_constructor (value
, xtarget
, cleared
,
5054 bitsize
/ BITS_PER_UNIT
);
5056 store_expr (value
, xtarget
, 0);
5058 /* Generate a conditional jump to exit the loop. */
5059 exit_cond
= build (LT_EXPR
, integer_type_node
,
5061 jumpif (exit_cond
, loop_end
);
5063 /* Update the loop counter, and jump to the head of
5065 expand_increment (build (PREINCREMENT_EXPR
,
5067 index
, integer_one_node
), 0, 0);
5068 emit_jump (loop_start
);
5070 /* Build the end of the loop. */
5071 emit_label (loop_end
);
5074 else if ((index
!= 0 && ! host_integerp (index
, 0))
5075 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5083 index
= ssize_int (1);
5086 index
= convert (ssizetype
,
5087 fold (build (MINUS_EXPR
, index
,
5088 TYPE_MIN_VALUE (domain
))));
5090 position
= size_binop (MULT_EXPR
, index
,
5092 TYPE_SIZE_UNIT (elttype
)));
5093 xtarget
= offset_address (target
,
5094 expand_expr (position
, 0, VOIDmode
, 0),
5095 highest_pow2_factor (position
));
5096 xtarget
= adjust_address (xtarget
, mode
, 0);
5097 store_expr (value
, xtarget
, 0);
5104 pos
= tree_low_cst (index
, 0) - minelt
;
5107 vector
[pos
] = expand_expr (value
, NULL_RTX
, VOIDmode
, 0);
5112 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5113 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5115 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5117 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5118 && TREE_CODE (type
) == ARRAY_TYPE
5119 && TYPE_NONALIASED_COMPONENT (type
))
5121 target
= copy_rtx (target
);
5122 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5124 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5125 type
, cleared
, get_alias_set (elttype
));
5130 emit_insn (GEN_FCN (icode
) (target
,
5131 gen_rtx_PARALLEL (GET_MODE (target
),
5132 gen_rtvec_v (n_elts
, vector
))));
5136 /* Set constructor assignments. */
5137 else if (TREE_CODE (type
) == SET_TYPE
)
5139 tree elt
= CONSTRUCTOR_ELTS (exp
);
5140 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
5141 tree domain
= TYPE_DOMAIN (type
);
5142 tree domain_min
, domain_max
, bitlength
;
5144 /* The default implementation strategy is to extract the constant
5145 parts of the constructor, use that to initialize the target,
5146 and then "or" in whatever non-constant ranges we need in addition.
5148 If a large set is all zero or all ones, it is
5149 probably better to set it using memset.
5150 Also, if a large set has just a single range, it may also be
5151 better to first clear all the first clear the set (using
5152 memset), and set the bits we want. */
5154 /* Check for all zeros. */
5155 if (elt
== NULL_TREE
&& size
> 0)
5158 clear_storage (target
, GEN_INT (size
));
5162 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
5163 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
5164 bitlength
= size_binop (PLUS_EXPR
,
5165 size_diffop (domain_max
, domain_min
),
5168 nbits
= tree_low_cst (bitlength
, 1);
5170 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5171 are "complicated" (more than one range), initialize (the
5172 constant parts) by copying from a constant. */
5173 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
5174 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
5176 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
5177 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
5178 char *bit_buffer
= alloca (nbits
);
5179 HOST_WIDE_INT word
= 0;
5180 unsigned int bit_pos
= 0;
5181 unsigned int ibit
= 0;
5182 unsigned int offset
= 0; /* In bytes from beginning of set. */
5184 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
5187 if (bit_buffer
[ibit
])
5189 if (BYTES_BIG_ENDIAN
)
5190 word
|= (1 << (set_word_size
- 1 - bit_pos
));
5192 word
|= 1 << bit_pos
;
5196 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
5198 if (word
!= 0 || ! cleared
)
5200 rtx datum
= gen_int_mode (word
, mode
);
5203 /* The assumption here is that it is safe to use
5204 XEXP if the set is multi-word, but not if
5205 it's single-word. */
5207 to_rtx
= adjust_address (target
, mode
, offset
);
5208 else if (offset
== 0)
5212 emit_move_insn (to_rtx
, datum
);
5219 offset
+= set_word_size
/ BITS_PER_UNIT
;
5224 /* Don't bother clearing storage if the set is all ones. */
5225 if (TREE_CHAIN (elt
) != NULL_TREE
5226 || (TREE_PURPOSE (elt
) == NULL_TREE
5228 : ( ! host_integerp (TREE_VALUE (elt
), 0)
5229 || ! host_integerp (TREE_PURPOSE (elt
), 0)
5230 || (tree_low_cst (TREE_VALUE (elt
), 0)
5231 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
5232 != (HOST_WIDE_INT
) nbits
))))
5233 clear_storage (target
, expr_size (exp
));
5235 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
5237 /* Start of range of element or NULL. */
5238 tree startbit
= TREE_PURPOSE (elt
);
5239 /* End of range of element, or element value. */
5240 tree endbit
= TREE_VALUE (elt
);
5241 HOST_WIDE_INT startb
, endb
;
5242 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
5244 bitlength_rtx
= expand_expr (bitlength
,
5245 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
5247 /* Handle non-range tuple element like [ expr ]. */
5248 if (startbit
== NULL_TREE
)
5250 startbit
= save_expr (endbit
);
5254 startbit
= convert (sizetype
, startbit
);
5255 endbit
= convert (sizetype
, endbit
);
5256 if (! integer_zerop (domain_min
))
5258 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
5259 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
5261 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
5262 EXPAND_CONST_ADDRESS
);
5263 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
5264 EXPAND_CONST_ADDRESS
);
5270 ((build_qualified_type (lang_hooks
.types
.type_for_mode
5271 (GET_MODE (target
), 0),
5274 emit_move_insn (targetx
, target
);
5277 else if (MEM_P (target
))
5282 /* Optimization: If startbit and endbit are constants divisible
5283 by BITS_PER_UNIT, call memset instead. */
5284 if (TREE_CODE (startbit
) == INTEGER_CST
5285 && TREE_CODE (endbit
) == INTEGER_CST
5286 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
5287 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
5289 emit_library_call (memset_libfunc
, LCT_NORMAL
,
5291 plus_constant (XEXP (targetx
, 0),
5292 startb
/ BITS_PER_UNIT
),
5294 constm1_rtx
, TYPE_MODE (integer_type_node
),
5295 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
5296 TYPE_MODE (sizetype
));
5299 emit_library_call (setbits_libfunc
, LCT_NORMAL
,
5300 VOIDmode
, 4, XEXP (targetx
, 0),
5301 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
5302 startbit_rtx
, TYPE_MODE (sizetype
),
5303 endbit_rtx
, TYPE_MODE (sizetype
));
5306 emit_move_insn (target
, targetx
);
5314 /* Store the value of EXP (an expression tree)
5315 into a subfield of TARGET which has mode MODE and occupies
5316 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5317 If MODE is VOIDmode, it means that we are storing into a bit-field.
5319 If VALUE_MODE is VOIDmode, return nothing in particular.
5320 UNSIGNEDP is not used in this case.
5322 Otherwise, return an rtx for the value stored. This rtx
5323 has mode VALUE_MODE if that is convenient to do.
5324 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5326 TYPE is the type of the underlying object,
5328 ALIAS_SET is the alias set for the destination. This value will
5329 (in general) be different from that for TARGET, since TARGET is a
5330 reference to the containing structure. */
5333 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5334 enum machine_mode mode
, tree exp
, enum machine_mode value_mode
,
5335 int unsignedp
, tree type
, int alias_set
)
5337 HOST_WIDE_INT width_mask
= 0;
5339 if (TREE_CODE (exp
) == ERROR_MARK
)
5342 /* If we have nothing to store, do nothing unless the expression has
5345 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5346 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5347 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5349 /* If we are storing into an unaligned field of an aligned union that is
5350 in a register, we may have the mode of TARGET being an integer mode but
5351 MODE == BLKmode. In that case, get an aligned object whose size and
5352 alignment are the same as TARGET and store TARGET into it (we can avoid
5353 the store if the field being stored is the entire width of TARGET). Then
5354 call ourselves recursively to store the field into a BLKmode version of
5355 that object. Finally, load from the object into TARGET. This is not
5356 very efficient in general, but should only be slightly more expensive
5357 than the otherwise-required unaligned accesses. Perhaps this can be
5358 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5359 twice, once with emit_move_insn and once via store_field. */
5362 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5364 rtx object
= assign_temp (type
, 0, 1, 1);
5365 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5367 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5368 emit_move_insn (object
, target
);
5370 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, type
,
5373 emit_move_insn (target
, object
);
5375 /* We want to return the BLKmode version of the data. */
5379 if (GET_CODE (target
) == CONCAT
)
5381 /* We're storing into a struct containing a single __complex. */
5385 return store_expr (exp
, target
, value_mode
!= VOIDmode
);
5388 /* If the structure is in a register or if the component
5389 is a bit field, we cannot use addressing to access it.
5390 Use bit-field techniques or SUBREG to store in it. */
5392 if (mode
== VOIDmode
5393 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5394 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5395 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5397 || GET_CODE (target
) == SUBREG
5398 /* If the field isn't aligned enough to store as an ordinary memref,
5399 store it as a bit field. */
5401 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5402 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5403 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5404 || (bitpos
% BITS_PER_UNIT
!= 0)))
5405 /* If the RHS and field are a constant size and the size of the
5406 RHS isn't the same size as the bitfield, we must use bitfield
5409 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5410 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5412 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5414 /* If BITSIZE is narrower than the size of the type of EXP
5415 we will be narrowing TEMP. Normally, what's wanted are the
5416 low-order bits. However, if EXP's type is a record and this is
5417 big-endian machine, we want the upper BITSIZE bits. */
5418 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5419 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5420 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5421 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5422 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5426 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5428 if (mode
!= VOIDmode
&& mode
!= BLKmode
5429 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5430 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5432 /* If the modes of TARGET and TEMP are both BLKmode, both
5433 must be in memory and BITPOS must be aligned on a byte
5434 boundary. If so, we simply do a block copy. */
5435 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5437 if (!MEM_P (target
) || !MEM_P (temp
)
5438 || bitpos
% BITS_PER_UNIT
!= 0)
5441 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5442 emit_block_move (target
, temp
,
5443 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5447 return value_mode
== VOIDmode
? const0_rtx
: target
;
5450 /* Store the value in the bitfield. */
5451 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
,
5452 int_size_in_bytes (type
));
5454 if (value_mode
!= VOIDmode
)
5456 /* The caller wants an rtx for the value.
5457 If possible, avoid refetching from the bitfield itself. */
5459 && ! (MEM_P (target
) && MEM_VOLATILE_P (target
)))
5462 enum machine_mode tmode
;
5464 tmode
= GET_MODE (temp
);
5465 if (tmode
== VOIDmode
)
5469 return expand_and (tmode
, temp
,
5470 gen_int_mode (width_mask
, tmode
),
5473 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5474 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5475 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5478 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5479 NULL_RTX
, value_mode
, VOIDmode
,
5480 int_size_in_bytes (type
));
5486 rtx addr
= XEXP (target
, 0);
5487 rtx to_rtx
= target
;
5489 /* If a value is wanted, it must be the lhs;
5490 so make the address stable for multiple use. */
5492 if (value_mode
!= VOIDmode
&& !REG_P (addr
)
5493 && ! CONSTANT_ADDRESS_P (addr
)
5494 /* A frame-pointer reference is already stable. */
5495 && ! (GET_CODE (addr
) == PLUS
5496 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5497 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5498 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5499 to_rtx
= replace_equiv_address (to_rtx
, copy_to_reg (addr
));
5501 /* Now build a reference to just the desired component. */
5503 to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5505 if (to_rtx
== target
)
5506 to_rtx
= copy_rtx (to_rtx
);
5508 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5509 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5510 set_mem_alias_set (to_rtx
, alias_set
);
5512 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5516 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5517 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5518 codes and find the ultimate containing object, which we return.
5520 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5521 bit position, and *PUNSIGNEDP to the signedness of the field.
5522 If the position of the field is variable, we store a tree
5523 giving the variable offset (in units) in *POFFSET.
5524 This offset is in addition to the bit position.
5525 If the position is not variable, we store 0 in *POFFSET.
5527 If any of the extraction expressions is volatile,
5528 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5530 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5531 is a mode that can be used to access the field. In that case, *PBITSIZE
5534 If the field describes a variable-sized object, *PMODE is set to
5535 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5536 this case, but the address of the object can be found. */
5539 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5540 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5541 enum machine_mode
*pmode
, int *punsignedp
,
5545 enum machine_mode mode
= VOIDmode
;
5546 tree offset
= size_zero_node
;
5547 tree bit_offset
= bitsize_zero_node
;
5550 /* First get the mode, signedness, and size. We do this from just the
5551 outermost expression. */
5552 if (TREE_CODE (exp
) == COMPONENT_REF
)
5554 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5555 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5556 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5558 *punsignedp
= DECL_UNSIGNED (TREE_OPERAND (exp
, 1));
5560 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5562 size_tree
= TREE_OPERAND (exp
, 1);
5563 *punsignedp
= BIT_FIELD_REF_UNSIGNED (exp
);
5567 mode
= TYPE_MODE (TREE_TYPE (exp
));
5568 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5570 if (mode
== BLKmode
)
5571 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5573 *pbitsize
= GET_MODE_BITSIZE (mode
);
5578 if (! host_integerp (size_tree
, 1))
5579 mode
= BLKmode
, *pbitsize
= -1;
5581 *pbitsize
= tree_low_cst (size_tree
, 1);
5584 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5585 and find the ultimate containing object. */
5588 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5589 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5590 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5592 tree field
= TREE_OPERAND (exp
, 1);
5593 tree this_offset
= component_ref_field_offset (exp
);
5595 /* If this field hasn't been filled in yet, don't go
5596 past it. This should only happen when folding expressions
5597 made during type construction. */
5598 if (this_offset
== 0)
5601 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5602 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5603 DECL_FIELD_BIT_OFFSET (field
));
5605 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5608 else if (TREE_CODE (exp
) == ARRAY_REF
5609 || TREE_CODE (exp
) == ARRAY_RANGE_REF
)
5611 tree index
= TREE_OPERAND (exp
, 1);
5612 tree low_bound
= array_ref_low_bound (exp
);
5613 tree unit_size
= array_ref_element_size (exp
);
5615 /* We assume all arrays have sizes that are a multiple of a byte.
5616 First subtract the lower bound, if any, in the type of the
5617 index, then convert to sizetype and multiply by the size of the
5619 if (! integer_zerop (low_bound
))
5620 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5623 offset
= size_binop (PLUS_EXPR
, offset
,
5624 size_binop (MULT_EXPR
,
5625 convert (sizetype
, index
),
5629 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5630 conversions that don't change the mode, and all view conversions
5631 except those that need to "step up" the alignment. */
5632 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5633 && ! (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
5634 && ! ((TYPE_ALIGN (TREE_TYPE (exp
))
5635 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5637 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5638 < BIGGEST_ALIGNMENT
)
5639 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
5640 || TYPE_ALIGN_OK (TREE_TYPE
5641 (TREE_OPERAND (exp
, 0))))))
5642 && ! ((TREE_CODE (exp
) == NOP_EXPR
5643 || TREE_CODE (exp
) == CONVERT_EXPR
)
5644 && (TYPE_MODE (TREE_TYPE (exp
))
5645 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5648 /* If any reference in the chain is volatile, the effect is volatile. */
5649 if (TREE_THIS_VOLATILE (exp
))
5652 exp
= TREE_OPERAND (exp
, 0);
5655 /* If OFFSET is constant, see if we can return the whole thing as a
5656 constant bit position. Otherwise, split it up. */
5657 if (host_integerp (offset
, 0)
5658 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5660 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5661 && host_integerp (tem
, 0))
5662 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5664 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5670 /* Return a tree of sizetype representing the size, in bytes, of the element
5671 of EXP, an ARRAY_REF. */
5674 array_ref_element_size (tree exp
)
5676 tree aligned_size
= TREE_OPERAND (exp
, 3);
5677 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5679 /* If a size was specified in the ARRAY_REF, it's the size measured
5680 in alignment units of the element type. So multiply by that value. */
5682 return size_binop (MULT_EXPR
, aligned_size
,
5683 size_int (TYPE_ALIGN (elmt_type
) / BITS_PER_UNIT
));
5685 /* Otherwise, take the size from that of the element type. Substitute
5686 any PLACEHOLDER_EXPR that we have. */
5688 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
5691 /* Return a tree representing the lower bound of the array mentioned in
5692 EXP, an ARRAY_REF. */
5695 array_ref_low_bound (tree exp
)
5697 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5699 /* If a lower bound is specified in EXP, use it. */
5700 if (TREE_OPERAND (exp
, 2))
5701 return TREE_OPERAND (exp
, 2);
5703 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5704 substituting for a PLACEHOLDER_EXPR as needed. */
5705 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5706 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
5708 /* Otherwise, return a zero of the appropriate type. */
5709 return fold_convert (TREE_TYPE (TREE_OPERAND (exp
, 1)), integer_zero_node
);
5712 /* Return a tree representing the offset, in bytes, of the field referenced
5713 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5716 component_ref_field_offset (tree exp
)
5718 tree aligned_offset
= TREE_OPERAND (exp
, 2);
5719 tree field
= TREE_OPERAND (exp
, 1);
5721 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5722 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5725 return size_binop (MULT_EXPR
, aligned_offset
,
5726 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
5728 /* Otherwise, take the offset from that of the field. Substitute
5729 any PLACEHOLDER_EXPR that we have. */
5731 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
5734 /* Return 1 if T is an expression that get_inner_reference handles. */
5737 handled_component_p (tree t
)
5739 switch (TREE_CODE (t
))
5744 case ARRAY_RANGE_REF
:
5745 case NON_LVALUE_EXPR
:
5746 case VIEW_CONVERT_EXPR
:
5749 /* ??? Sure they are handled, but get_inner_reference may return
5750 a different PBITSIZE, depending upon whether the expression is
5751 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5754 return (TYPE_MODE (TREE_TYPE (t
))
5755 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t
, 0))));
5762 /* Given an rtx VALUE that may contain additions and multiplications, return
5763 an equivalent value that just refers to a register, memory, or constant.
5764 This is done by generating instructions to perform the arithmetic and
5765 returning a pseudo-register containing the value.
5767 The returned value may be a REG, SUBREG, MEM or constant. */
5770 force_operand (rtx value
, rtx target
)
5773 /* Use subtarget as the target for operand 0 of a binary operation. */
5774 rtx subtarget
= get_subtarget (target
);
5775 enum rtx_code code
= GET_CODE (value
);
5777 /* Check for subreg applied to an expression produced by loop optimizer. */
5779 && !REG_P (SUBREG_REG (value
))
5780 && !MEM_P (SUBREG_REG (value
)))
5782 value
= simplify_gen_subreg (GET_MODE (value
),
5783 force_reg (GET_MODE (SUBREG_REG (value
)),
5784 force_operand (SUBREG_REG (value
),
5786 GET_MODE (SUBREG_REG (value
)),
5787 SUBREG_BYTE (value
));
5788 code
= GET_CODE (value
);
5791 /* Check for a PIC address load. */
5792 if ((code
== PLUS
|| code
== MINUS
)
5793 && XEXP (value
, 0) == pic_offset_table_rtx
5794 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5795 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5796 || GET_CODE (XEXP (value
, 1)) == CONST
))
5799 subtarget
= gen_reg_rtx (GET_MODE (value
));
5800 emit_move_insn (subtarget
, value
);
5804 if (code
== ZERO_EXTEND
|| code
== SIGN_EXTEND
)
5807 target
= gen_reg_rtx (GET_MODE (value
));
5808 convert_move (target
, force_operand (XEXP (value
, 0), NULL
),
5809 code
== ZERO_EXTEND
);
5813 if (ARITHMETIC_P (value
))
5815 op2
= XEXP (value
, 1);
5816 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
5818 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
5821 op2
= negate_rtx (GET_MODE (value
), op2
);
5824 /* Check for an addition with OP2 a constant integer and our first
5825 operand a PLUS of a virtual register and something else. In that
5826 case, we want to emit the sum of the virtual register and the
5827 constant first and then add the other value. This allows virtual
5828 register instantiation to simply modify the constant rather than
5829 creating another one around this addition. */
5830 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
5831 && GET_CODE (XEXP (value
, 0)) == PLUS
5832 && REG_P (XEXP (XEXP (value
, 0), 0))
5833 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5834 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5836 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
5837 XEXP (XEXP (value
, 0), 0), op2
,
5838 subtarget
, 0, OPTAB_LIB_WIDEN
);
5839 return expand_simple_binop (GET_MODE (value
), code
, temp
,
5840 force_operand (XEXP (XEXP (value
,
5842 target
, 0, OPTAB_LIB_WIDEN
);
5845 op1
= force_operand (XEXP (value
, 0), subtarget
);
5846 op2
= force_operand (op2
, NULL_RTX
);
5850 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
5852 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
5853 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5854 target
, 1, OPTAB_LIB_WIDEN
);
5856 return expand_divmod (0,
5857 FLOAT_MODE_P (GET_MODE (value
))
5858 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
5859 GET_MODE (value
), op1
, op2
, target
, 0);
5862 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5866 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
5870 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
5874 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5875 target
, 0, OPTAB_LIB_WIDEN
);
5878 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
5879 target
, 1, OPTAB_LIB_WIDEN
);
5882 if (UNARY_P (value
))
5884 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
5885 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
5888 #ifdef INSN_SCHEDULING
5889 /* On machines that have insn scheduling, we want all memory reference to be
5890 explicit, so we need to deal with such paradoxical SUBREGs. */
5891 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
5892 && (GET_MODE_SIZE (GET_MODE (value
))
5893 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
5895 = simplify_gen_subreg (GET_MODE (value
),
5896 force_reg (GET_MODE (SUBREG_REG (value
)),
5897 force_operand (SUBREG_REG (value
),
5899 GET_MODE (SUBREG_REG (value
)),
5900 SUBREG_BYTE (value
));
5906 /* Subroutine of expand_expr: return nonzero iff there is no way that
5907 EXP can reference X, which is being modified. TOP_P is nonzero if this
5908 call is going to be used to determine whether we need a temporary
5909 for EXP, as opposed to a recursive call to this function.
5911 It is always safe for this routine to return zero since it merely
5912 searches for optimization opportunities. */
5915 safe_from_p (rtx x
, tree exp
, int top_p
)
5921 /* If EXP has varying size, we MUST use a target since we currently
5922 have no way of allocating temporaries of variable size
5923 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5924 So we assume here that something at a higher level has prevented a
5925 clash. This is somewhat bogus, but the best we can do. Only
5926 do this when X is BLKmode and when we are at the top level. */
5927 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5928 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5929 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5930 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5931 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5933 && GET_MODE (x
) == BLKmode
)
5934 /* If X is in the outgoing argument area, it is always safe. */
5936 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5937 || (GET_CODE (XEXP (x
, 0)) == PLUS
5938 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5941 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5942 find the underlying pseudo. */
5943 if (GET_CODE (x
) == SUBREG
)
5946 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5950 /* Now look at our tree code and possibly recurse. */
5951 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5954 exp_rtl
= DECL_RTL_IF_SET (exp
);
5961 if (TREE_CODE (exp
) == TREE_LIST
)
5965 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
5967 exp
= TREE_CHAIN (exp
);
5970 if (TREE_CODE (exp
) != TREE_LIST
)
5971 return safe_from_p (x
, exp
, 0);
5974 else if (TREE_CODE (exp
) == ERROR_MARK
)
5975 return 1; /* An already-visited SAVE_EXPR? */
5980 /* The only case we look at here is the DECL_INITIAL inside a
5982 return (TREE_CODE (exp
) != DECL_EXPR
5983 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
5984 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
5985 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
5989 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
5994 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5998 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5999 the expression. If it is set, we conflict iff we are that rtx or
6000 both are in memory. Otherwise, we check all operands of the
6001 expression recursively. */
6003 switch (TREE_CODE (exp
))
6006 /* If the operand is static or we are static, we can't conflict.
6007 Likewise if we don't conflict with the operand at all. */
6008 if (staticp (TREE_OPERAND (exp
, 0))
6009 || TREE_STATIC (exp
)
6010 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6013 /* Otherwise, the only way this can conflict is if we are taking
6014 the address of a DECL a that address if part of X, which is
6016 exp
= TREE_OPERAND (exp
, 0);
6019 if (!DECL_RTL_SET_P (exp
)
6020 || !MEM_P (DECL_RTL (exp
)))
6023 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6029 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6030 get_alias_set (exp
)))
6035 /* Assume that the call will clobber all hard registers and
6037 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6042 case WITH_CLEANUP_EXPR
:
6043 exp_rtl
= WITH_CLEANUP_EXPR_RTL (exp
);
6046 case CLEANUP_POINT_EXPR
:
6048 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6051 /* The only operand we look at is operand 1. The rest aren't
6052 part of the expression. */
6053 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
6059 /* If we have an rtx, we do not need to scan our operands. */
6063 nops
= first_rtl_op (TREE_CODE (exp
));
6064 for (i
= 0; i
< nops
; i
++)
6065 if (TREE_OPERAND (exp
, i
) != 0
6066 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6069 /* If this is a language-specific tree code, it may require
6070 special handling. */
6071 if ((unsigned int) TREE_CODE (exp
)
6072 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6073 && !lang_hooks
.safe_from_p (x
, exp
))
6077 /* If we have an rtl, find any enclosed object. Then see if we conflict
6081 if (GET_CODE (exp_rtl
) == SUBREG
)
6083 exp_rtl
= SUBREG_REG (exp_rtl
);
6085 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6089 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6090 are memory and they conflict. */
6091 return ! (rtx_equal_p (x
, exp_rtl
)
6092 || (MEM_P (x
) && MEM_P (exp_rtl
)
6093 && true_dependence (exp_rtl
, VOIDmode
, x
,
6094 rtx_addr_varies_p
)));
6097 /* If we reach here, it is safe. */
6101 /* Subroutine of expand_expr: return rtx if EXP is a
6102 variable or parameter; else return 0. */
6108 switch (TREE_CODE (exp
))
6112 return DECL_RTL (exp
);
6118 /* Return the highest power of two that EXP is known to be a multiple of.
6119 This is used in updating alignment of MEMs in array references. */
6121 static unsigned HOST_WIDE_INT
6122 highest_pow2_factor (tree exp
)
6124 unsigned HOST_WIDE_INT c0
, c1
;
6126 switch (TREE_CODE (exp
))
6129 /* We can find the lowest bit that's a one. If the low
6130 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6131 We need to handle this case since we can find it in a COND_EXPR,
6132 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6133 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6135 if (TREE_CONSTANT_OVERFLOW (exp
))
6136 return BIGGEST_ALIGNMENT
;
6139 /* Note: tree_low_cst is intentionally not used here,
6140 we don't care about the upper bits. */
6141 c0
= TREE_INT_CST_LOW (exp
);
6143 return c0
? c0
: BIGGEST_ALIGNMENT
;
6147 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6148 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6149 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6150 return MIN (c0
, c1
);
6153 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6154 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6157 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6159 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6160 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6162 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6163 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6164 return MAX (1, c0
/ c1
);
6168 case NON_LVALUE_EXPR
: case NOP_EXPR
: case CONVERT_EXPR
:
6170 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6173 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6176 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6177 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6178 return MIN (c0
, c1
);
6187 /* Similar, except that the alignment requirements of TARGET are
6188 taken into account. Assume it is at least as aligned as its
6189 type, unless it is a COMPONENT_REF in which case the layout of
6190 the structure gives the alignment. */
6192 static unsigned HOST_WIDE_INT
6193 highest_pow2_factor_for_target (tree target
, tree exp
)
6195 unsigned HOST_WIDE_INT target_align
, factor
;
6197 factor
= highest_pow2_factor (exp
);
6198 if (TREE_CODE (target
) == COMPONENT_REF
)
6199 target_align
= DECL_ALIGN (TREE_OPERAND (target
, 1)) / BITS_PER_UNIT
;
6201 target_align
= TYPE_ALIGN (TREE_TYPE (target
)) / BITS_PER_UNIT
;
6202 return MAX (factor
, target_align
);
6205 /* Expands variable VAR. */
6208 expand_var (tree var
)
6210 if (DECL_EXTERNAL (var
))
6213 if (TREE_STATIC (var
))
6214 /* If this is an inlined copy of a static local variable,
6215 look up the original decl. */
6216 var
= DECL_ORIGIN (var
);
6218 if (TREE_STATIC (var
)
6219 ? !TREE_ASM_WRITTEN (var
)
6220 : !DECL_RTL_SET_P (var
))
6222 if (TREE_CODE (var
) == VAR_DECL
&& DECL_DEFER_OUTPUT (var
))
6224 /* Prepare a mem & address for the decl. */
6227 if (TREE_STATIC (var
))
6230 x
= gen_rtx_MEM (DECL_MODE (var
),
6231 gen_reg_rtx (Pmode
));
6233 set_mem_attributes (x
, var
, 1);
6234 SET_DECL_RTL (var
, x
);
6236 else if (lang_hooks
.expand_decl (var
))
6238 else if (TREE_CODE (var
) == VAR_DECL
&& !TREE_STATIC (var
))
6240 else if (TREE_CODE (var
) == VAR_DECL
&& TREE_STATIC (var
))
6241 rest_of_decl_compilation (var
, NULL
, 0, 0);
6242 else if (TREE_CODE (var
) == TYPE_DECL
6243 || TREE_CODE (var
) == CONST_DECL
6244 || TREE_CODE (var
) == FUNCTION_DECL
6245 || TREE_CODE (var
) == LABEL_DECL
)
6246 /* No expansion needed. */;
6252 /* Expands declarations of variables in list VARS. */
6255 expand_vars (tree vars
)
6257 for (; vars
; vars
= TREE_CHAIN (vars
))
6261 if (DECL_EXTERNAL (var
))
6265 expand_decl_init (var
);
6269 /* Subroutine of expand_expr. Expand the two operands of a binary
6270 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6271 The value may be stored in TARGET if TARGET is nonzero. The
6272 MODIFIER argument is as documented by expand_expr. */
6275 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6276 enum expand_modifier modifier
)
6278 if (! safe_from_p (target
, exp1
, 1))
6280 if (operand_equal_p (exp0
, exp1
, 0))
6282 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6283 *op1
= copy_rtx (*op0
);
6287 /* If we need to preserve evaluation order, copy exp0 into its own
6288 temporary variable so that it can't be clobbered by exp1. */
6289 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6290 exp0
= save_expr (exp0
);
6291 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6292 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6297 /* expand_expr: generate code for computing expression EXP.
6298 An rtx for the computed value is returned. The value is never null.
6299 In the case of a void EXP, const0_rtx is returned.
6301 The value may be stored in TARGET if TARGET is nonzero.
6302 TARGET is just a suggestion; callers must assume that
6303 the rtx returned may not be the same as TARGET.
6305 If TARGET is CONST0_RTX, it means that the value will be ignored.
6307 If TMODE is not VOIDmode, it suggests generating the
6308 result in mode TMODE. But this is done only when convenient.
6309 Otherwise, TMODE is ignored and the value generated in its natural mode.
6310 TMODE is just a suggestion; callers must assume that
6311 the rtx returned may not have mode TMODE.
6313 Note that TARGET may have neither TMODE nor MODE. In that case, it
6314 probably will not be used.
6316 If MODIFIER is EXPAND_SUM then when EXP is an addition
6317 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6318 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6319 products as above, or REG or MEM, or constant.
6320 Ordinarily in such cases we would output mul or add instructions
6321 and then return a pseudo reg containing the sum.
6323 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6324 it also marks a label as absolutely required (it can't be dead).
6325 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6326 This is used for outputting expressions used in initializers.
6328 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6329 with a constant address even if that address is not normally legitimate.
6330 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6332 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6333 a call parameter. Such targets require special care as we haven't yet
6334 marked TARGET so that it's safe from being trashed by libcalls. We
6335 don't want to use TARGET for anything but the final result;
6336 Intermediate values must go elsewhere. Additionally, calls to
6337 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6339 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6340 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6341 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6342 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6345 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
6346 enum expand_modifier
, rtx
*);
6349 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
6350 enum expand_modifier modifier
, rtx
*alt_rtl
)
6353 rtx ret
, last
= NULL
;
6355 /* Handle ERROR_MARK before anybody tries to access its type. */
6356 if (TREE_CODE (exp
) == ERROR_MARK
6357 || TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
)
6359 ret
= CONST0_RTX (tmode
);
6360 return ret
? ret
: const0_rtx
;
6363 if (flag_non_call_exceptions
)
6365 rn
= lookup_stmt_eh_region (exp
);
6366 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6368 last
= get_last_insn ();
6371 /* If this is an expression of some kind and it has an associated line
6372 number, then emit the line number before expanding the expression.
6374 We need to save and restore the file and line information so that
6375 errors discovered during expansion are emitted with the right
6376 information. It would be better of the diagnostic routines
6377 used the file/line information embedded in the tree nodes rather
6379 if (cfun
&& EXPR_HAS_LOCATION (exp
))
6381 location_t saved_location
= input_location
;
6382 input_location
= EXPR_LOCATION (exp
);
6383 emit_line_note (input_location
);
6385 /* Record where the insns produced belong. */
6386 record_block_change (TREE_BLOCK (exp
));
6388 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6390 input_location
= saved_location
;
6394 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
6397 /* If using non-call exceptions, mark all insns that may trap.
6398 expand_call() will mark CALL_INSNs before we get to this code,
6399 but it doesn't handle libcalls, and these may trap. */
6403 for (insn
= next_real_insn (last
); insn
;
6404 insn
= next_real_insn (insn
))
6406 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
6407 /* If we want exceptions for non-call insns, any
6408 may_trap_p instruction may throw. */
6409 && GET_CODE (PATTERN (insn
)) != CLOBBER
6410 && GET_CODE (PATTERN (insn
)) != USE
6411 && (GET_CODE (insn
) == CALL_INSN
|| may_trap_p (PATTERN (insn
))))
6413 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
6423 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6424 enum expand_modifier modifier
, rtx
*alt_rtl
)
6427 tree type
= TREE_TYPE (exp
);
6429 enum machine_mode mode
;
6430 enum tree_code code
= TREE_CODE (exp
);
6432 rtx subtarget
, original_target
;
6436 mode
= TYPE_MODE (type
);
6437 unsignedp
= TYPE_UNSIGNED (type
);
6439 /* Use subtarget as the target for operand 0 of a binary operation. */
6440 subtarget
= get_subtarget (target
);
6441 original_target
= target
;
6442 ignore
= (target
== const0_rtx
6443 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
6444 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
6445 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
6446 && TREE_CODE (type
) == VOID_TYPE
));
6448 /* If we are going to ignore this result, we need only do something
6449 if there is a side-effect somewhere in the expression. If there
6450 is, short-circuit the most common cases here. Note that we must
6451 not call expand_expr with anything but const0_rtx in case this
6452 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6456 if (! TREE_SIDE_EFFECTS (exp
))
6459 /* Ensure we reference a volatile object even if value is ignored, but
6460 don't do this if all we are doing is taking its address. */
6461 if (TREE_THIS_VOLATILE (exp
)
6462 && TREE_CODE (exp
) != FUNCTION_DECL
6463 && mode
!= VOIDmode
&& mode
!= BLKmode
6464 && modifier
!= EXPAND_CONST_ADDRESS
)
6466 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
6468 temp
= copy_to_reg (temp
);
6472 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6473 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6474 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6477 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6478 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
6480 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6481 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6484 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6485 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6486 /* If the second operand has no side effects, just evaluate
6488 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6490 else if (code
== BIT_FIELD_REF
)
6492 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
6493 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
6494 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
6501 /* If will do cse, generate all results into pseudo registers
6502 since 1) that allows cse to find more things
6503 and 2) otherwise cse could produce an insn the machine
6504 cannot support. An exception is a CONSTRUCTOR into a multi-word
6505 MEM: that's much more likely to be most efficient into the MEM.
6506 Another is a CALL_EXPR which must return in memory. */
6508 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6509 && (!REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
6510 && ! (code
== CONSTRUCTOR
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6511 && ! (code
== CALL_EXPR
&& aggregate_value_p (exp
, exp
)))
6518 tree function
= decl_function_context (exp
);
6520 temp
= label_rtx (exp
);
6521 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
6523 if (function
!= current_function_decl
6525 LABEL_REF_NONLOCAL_P (temp
) = 1;
6527 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
6532 if (!DECL_RTL_SET_P (exp
))
6534 error ("%Jprior parameter's size depends on '%D'", exp
, exp
);
6535 return CONST0_RTX (mode
);
6538 /* ... fall through ... */
6541 /* If a static var's type was incomplete when the decl was written,
6542 but the type is complete now, lay out the decl now. */
6543 if (DECL_SIZE (exp
) == 0
6544 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
6545 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6546 layout_decl (exp
, 0);
6548 /* ... fall through ... */
6552 if (DECL_RTL (exp
) == 0)
6555 /* Ensure variable marked as used even if it doesn't go through
6556 a parser. If it hasn't be used yet, write out an external
6558 if (! TREE_USED (exp
))
6560 assemble_external (exp
);
6561 TREE_USED (exp
) = 1;
6564 /* Show we haven't gotten RTL for this yet. */
6567 /* Handle variables inherited from containing functions. */
6568 context
= decl_function_context (exp
);
6570 if (context
!= 0 && context
!= current_function_decl
6571 /* If var is static, we don't need a static chain to access it. */
6572 && ! (MEM_P (DECL_RTL (exp
))
6573 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6577 /* Mark as non-local and addressable. */
6578 DECL_NONLOCAL (exp
) = 1;
6579 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6581 lang_hooks
.mark_addressable (exp
);
6582 if (!MEM_P (DECL_RTL (exp
)))
6584 addr
= XEXP (DECL_RTL (exp
), 0);
6587 = replace_equiv_address (addr
,
6588 fix_lexical_addr (XEXP (addr
, 0), exp
));
6590 addr
= fix_lexical_addr (addr
, exp
);
6592 temp
= replace_equiv_address (DECL_RTL (exp
), addr
);
6595 /* This is the case of an array whose size is to be determined
6596 from its initializer, while the initializer is still being parsed.
6599 else if (MEM_P (DECL_RTL (exp
))
6600 && REG_P (XEXP (DECL_RTL (exp
), 0)))
6601 temp
= validize_mem (DECL_RTL (exp
));
6603 /* If DECL_RTL is memory, we are in the normal case and either
6604 the address is not valid or it is not a register and -fforce-addr
6605 is specified, get the address into a register. */
6607 else if (MEM_P (DECL_RTL (exp
))
6608 && modifier
!= EXPAND_CONST_ADDRESS
6609 && modifier
!= EXPAND_SUM
6610 && modifier
!= EXPAND_INITIALIZER
6611 && (! memory_address_p (DECL_MODE (exp
),
6612 XEXP (DECL_RTL (exp
), 0))
6614 && !REG_P (XEXP (DECL_RTL (exp
), 0)))))
6617 *alt_rtl
= DECL_RTL (exp
);
6618 temp
= replace_equiv_address (DECL_RTL (exp
),
6619 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6622 /* If we got something, return it. But first, set the alignment
6623 if the address is a register. */
6626 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
6627 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6632 /* If the mode of DECL_RTL does not match that of the decl, it
6633 must be a promoted value. We return a SUBREG of the wanted mode,
6634 but mark it so that we know that it was already extended. */
6636 if (REG_P (DECL_RTL (exp
))
6637 && GET_MODE (DECL_RTL (exp
)) != DECL_MODE (exp
))
6639 /* Get the signedness used for this variable. Ensure we get the
6640 same mode we got when the variable was declared. */
6641 if (GET_MODE (DECL_RTL (exp
))
6642 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
6643 (TREE_CODE (exp
) == RESULT_DECL
? 1 : 0)))
6646 temp
= gen_lowpart_SUBREG (mode
, DECL_RTL (exp
));
6647 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6648 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
6652 return DECL_RTL (exp
);
6655 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
6656 TREE_INT_CST_HIGH (exp
), mode
);
6658 /* ??? If overflow is set, fold will have done an incomplete job,
6659 which can result in (plus xx (const_int 0)), which can get
6660 simplified by validate_replace_rtx during virtual register
6661 instantiation, which can result in unrecognizable insns.
6662 Avoid this by forcing all overflows into registers. */
6663 if (TREE_CONSTANT_OVERFLOW (exp
)
6664 && modifier
!= EXPAND_INITIALIZER
)
6665 temp
= force_reg (mode
, temp
);
6670 return const_vector_from_tree (exp
);
6673 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
6676 /* If optimized, generate immediate CONST_DOUBLE
6677 which will be turned into memory by reload if necessary.
6679 We used to force a register so that loop.c could see it. But
6680 this does not allow gen_* patterns to perform optimizations with
6681 the constants. It also produces two insns in cases like "x = 1.0;".
6682 On most machines, floating-point constants are not permitted in
6683 many insns, so we'd end up copying it to a register in any case.
6685 Now, we do the copying in expand_binop, if appropriate. */
6686 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
6687 TYPE_MODE (TREE_TYPE (exp
)));
6690 /* Handle evaluating a complex constant in a CONCAT target. */
6691 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
6693 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6696 rtarg
= XEXP (original_target
, 0);
6697 itarg
= XEXP (original_target
, 1);
6699 /* Move the real and imaginary parts separately. */
6700 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, 0);
6701 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, 0);
6704 emit_move_insn (rtarg
, op0
);
6706 emit_move_insn (itarg
, op1
);
6708 return original_target
;
6711 /* ... fall through ... */
6714 temp
= output_constant_def (exp
, 1);
6716 /* temp contains a constant address.
6717 On RISC machines where a constant address isn't valid,
6718 make some insns to get that address into a register. */
6719 if (modifier
!= EXPAND_CONST_ADDRESS
6720 && modifier
!= EXPAND_INITIALIZER
6721 && modifier
!= EXPAND_SUM
6722 && (! memory_address_p (mode
, XEXP (temp
, 0))
6723 || flag_force_addr
))
6724 return replace_equiv_address (temp
,
6725 copy_rtx (XEXP (temp
, 0)));
6730 tree val
= TREE_OPERAND (exp
, 0);
6731 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
6733 if (TREE_CODE (val
) != VAR_DECL
|| !DECL_ARTIFICIAL (val
))
6735 /* We can indeed still hit this case, typically via builtin
6736 expanders calling save_expr immediately before expanding
6737 something. Assume this means that we only have to deal
6738 with non-BLKmode values. */
6739 if (GET_MODE (ret
) == BLKmode
)
6742 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
6743 DECL_ARTIFICIAL (val
) = 1;
6744 TREE_OPERAND (exp
, 0) = val
;
6746 if (!CONSTANT_P (ret
))
6747 ret
= copy_to_reg (ret
);
6748 SET_DECL_RTL (val
, ret
);
6757 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6758 TREE_OPERAND (exp
, 0)
6759 = lang_hooks
.unsave_expr_now (TREE_OPERAND (exp
, 0));
6764 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6765 expand_goto (TREE_OPERAND (exp
, 0));
6767 expand_computed_goto (TREE_OPERAND (exp
, 0));
6770 /* These are lowered during gimplification, so we should never ever
6776 case LABELED_BLOCK_EXPR
:
6777 if (LABELED_BLOCK_BODY (exp
))
6778 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6779 /* Should perhaps use expand_label, but this is simpler and safer. */
6780 do_pending_stack_adjust ();
6781 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6784 case EXIT_BLOCK_EXPR
:
6785 if (EXIT_BLOCK_RETURN (exp
))
6786 sorry ("returned value in block_exit_expr");
6787 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6792 tree block
= BIND_EXPR_BLOCK (exp
);
6795 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6796 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6797 mark_ends
= (block
!= NULL_TREE
);
6798 expand_start_bindings_and_block (mark_ends
? 0 : 2, block
);
6800 /* If VARS have not yet been expanded, expand them now. */
6801 expand_vars (BIND_EXPR_VARS (exp
));
6803 /* TARGET was clobbered early in this function. The correct
6804 indicator or whether or not we need the value of this
6805 expression is the IGNORE variable. */
6806 temp
= expand_expr (BIND_EXPR_BODY (exp
),
6807 ignore
? const0_rtx
: target
,
6810 expand_end_bindings (BIND_EXPR_VARS (exp
), mark_ends
, 0);
6816 /* If we don't need the result, just ensure we evaluate any
6822 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6823 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
6828 /* All elts simple constants => refer to a constant in memory. But
6829 if this is a non-BLKmode mode, let it store a field at a time
6830 since that should make a CONST_INT or CONST_DOUBLE when we
6831 fold. Likewise, if we have a target we can use, it is best to
6832 store directly into the target unless the type is large enough
6833 that memcpy will be used. If we are making an initializer and
6834 all operands are constant, put it in memory as well.
6836 FIXME: Avoid trying to fill vector constructors piece-meal.
6837 Output them with output_constant_def below unless we're sure
6838 they're zeros. This should go away when vector initializers
6839 are treated like VECTOR_CST instead of arrays.
6841 else if ((TREE_STATIC (exp
)
6842 && ((mode
== BLKmode
6843 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6844 || TREE_ADDRESSABLE (exp
)
6845 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6846 && (! MOVE_BY_PIECES_P
6847 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6849 && ! mostly_zeros_p (exp
))))
6850 || ((modifier
== EXPAND_INITIALIZER
6851 || modifier
== EXPAND_CONST_ADDRESS
)
6852 && TREE_CONSTANT (exp
)))
6854 rtx constructor
= output_constant_def (exp
, 1);
6856 if (modifier
!= EXPAND_CONST_ADDRESS
6857 && modifier
!= EXPAND_INITIALIZER
6858 && modifier
!= EXPAND_SUM
)
6859 constructor
= validize_mem (constructor
);
6865 /* Handle calls that pass values in multiple non-contiguous
6866 locations. The Irix 6 ABI has examples of this. */
6867 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6868 || GET_CODE (target
) == PARALLEL
6869 || modifier
== EXPAND_STACK_PARM
)
6871 = assign_temp (build_qualified_type (type
,
6873 | (TREE_READONLY (exp
)
6874 * TYPE_QUAL_CONST
))),
6875 0, TREE_ADDRESSABLE (exp
), 1);
6877 store_constructor (exp
, target
, 0, int_expr_size (exp
));
6883 tree exp1
= TREE_OPERAND (exp
, 0);
6885 if (modifier
!= EXPAND_WRITE
)
6889 t
= fold_read_from_constant_string (exp
);
6891 return expand_expr (t
, target
, tmode
, modifier
);
6894 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6895 op0
= memory_address (mode
, op0
);
6896 temp
= gen_rtx_MEM (mode
, op0
);
6897 set_mem_attributes (temp
, exp
, 0);
6899 /* If we are writing to this object and its type is a record with
6900 readonly fields, we must mark it as readonly so it will
6901 conflict with readonly references to those fields. */
6902 if (modifier
== EXPAND_WRITE
&& readonly_fields_p (type
))
6903 RTX_UNCHANGING_P (temp
) = 1;
6910 #ifdef ENABLE_CHECKING
6911 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6916 tree array
= TREE_OPERAND (exp
, 0);
6917 tree low_bound
= array_ref_low_bound (exp
);
6918 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6921 /* Optimize the special-case of a zero lower bound.
6923 We convert the low_bound to sizetype to avoid some problems
6924 with constant folding. (E.g. suppose the lower bound is 1,
6925 and its mode is QI. Without the conversion, (ARRAY
6926 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6927 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6929 if (! integer_zerop (low_bound
))
6930 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6932 /* Fold an expression like: "foo"[2].
6933 This is not done in fold so it won't happen inside &.
6934 Don't fold if this is for wide characters since it's too
6935 difficult to do correctly and this is a very rare case. */
6937 if (modifier
!= EXPAND_CONST_ADDRESS
6938 && modifier
!= EXPAND_INITIALIZER
6939 && modifier
!= EXPAND_MEMORY
)
6941 tree t
= fold_read_from_constant_string (exp
);
6944 return expand_expr (t
, target
, tmode
, modifier
);
6947 /* If this is a constant index into a constant array,
6948 just get the value from the array. Handle both the cases when
6949 we have an explicit constructor and when our operand is a variable
6950 that was declared const. */
6952 if (modifier
!= EXPAND_CONST_ADDRESS
6953 && modifier
!= EXPAND_INITIALIZER
6954 && modifier
!= EXPAND_MEMORY
6955 && TREE_CODE (array
) == CONSTRUCTOR
6956 && ! TREE_SIDE_EFFECTS (array
)
6957 && TREE_CODE (index
) == INTEGER_CST
6958 && 0 > compare_tree_int (index
,
6959 list_length (CONSTRUCTOR_ELTS
6960 (TREE_OPERAND (exp
, 0)))))
6964 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6965 i
= TREE_INT_CST_LOW (index
);
6966 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6970 return expand_expr (fold (TREE_VALUE (elem
)), target
, tmode
,
6974 else if (optimize
>= 1
6975 && modifier
!= EXPAND_CONST_ADDRESS
6976 && modifier
!= EXPAND_INITIALIZER
6977 && modifier
!= EXPAND_MEMORY
6978 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6979 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6980 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
6981 && targetm
.binds_local_p (array
))
6983 if (TREE_CODE (index
) == INTEGER_CST
)
6985 tree init
= DECL_INITIAL (array
);
6987 if (TREE_CODE (init
) == CONSTRUCTOR
)
6991 for (elem
= CONSTRUCTOR_ELTS (init
);
6993 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6994 elem
= TREE_CHAIN (elem
))
6997 if (elem
&& !TREE_SIDE_EFFECTS (TREE_VALUE (elem
)))
6998 return expand_expr (fold (TREE_VALUE (elem
)), target
,
7001 else if (TREE_CODE (init
) == STRING_CST
7002 && 0 > compare_tree_int (index
,
7003 TREE_STRING_LENGTH (init
)))
7005 tree type
= TREE_TYPE (TREE_TYPE (init
));
7006 enum machine_mode mode
= TYPE_MODE (type
);
7008 if (GET_MODE_CLASS (mode
) == MODE_INT
7009 && GET_MODE_SIZE (mode
) == 1)
7010 return gen_int_mode (TREE_STRING_POINTER (init
)
7011 [TREE_INT_CST_LOW (index
)], mode
);
7016 goto normal_inner_ref
;
7019 /* If the operand is a CONSTRUCTOR, we can just extract the
7020 appropriate field if it is present. */
7021 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7025 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
7026 elt
= TREE_CHAIN (elt
))
7027 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
7028 /* We can normally use the value of the field in the
7029 CONSTRUCTOR. However, if this is a bitfield in
7030 an integral mode that we can fit in a HOST_WIDE_INT,
7031 we must mask only the number of bits in the bitfield,
7032 since this is done implicitly by the constructor. If
7033 the bitfield does not meet either of those conditions,
7034 we can't do this optimization. */
7035 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7036 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
7038 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
7039 <= HOST_BITS_PER_WIDE_INT
))))
7041 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
))
7042 && modifier
== EXPAND_STACK_PARM
)
7044 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
7045 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
7047 HOST_WIDE_INT bitsize
7048 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
7049 enum machine_mode imode
7050 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
7052 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
7054 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7055 op0
= expand_and (imode
, op0
, op1
, target
);
7060 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
7063 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7065 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7073 goto normal_inner_ref
;
7076 case ARRAY_RANGE_REF
:
7079 enum machine_mode mode1
;
7080 HOST_WIDE_INT bitsize
, bitpos
;
7083 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7084 &mode1
, &unsignedp
, &volatilep
);
7087 /* If we got back the original object, something is wrong. Perhaps
7088 we are evaluating an expression too early. In any event, don't
7089 infinitely recurse. */
7093 /* If TEM's type is a union of variable size, pass TARGET to the inner
7094 computation, since it will need a temporary and TARGET is known
7095 to have to do. This occurs in unchecked conversion in Ada. */
7099 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7100 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7102 && modifier
!= EXPAND_STACK_PARM
7103 ? target
: NULL_RTX
),
7105 (modifier
== EXPAND_INITIALIZER
7106 || modifier
== EXPAND_CONST_ADDRESS
7107 || modifier
== EXPAND_STACK_PARM
)
7108 ? modifier
: EXPAND_NORMAL
);
7110 /* If this is a constant, put it into a register if it is a
7111 legitimate constant and OFFSET is 0 and memory if it isn't. */
7112 if (CONSTANT_P (op0
))
7114 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
7115 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
7117 op0
= force_reg (mode
, op0
);
7119 op0
= validize_mem (force_const_mem (mode
, op0
));
7122 /* Otherwise, if this object not in memory and we either have an
7123 offset or a BLKmode result, put it there. This case can't occur in
7124 C, but can in Ada if we have unchecked conversion of an expression
7125 from a scalar type to an array or record type or for an
7126 ARRAY_RANGE_REF whose type is BLKmode. */
7127 else if (!MEM_P (op0
)
7129 || (code
== ARRAY_RANGE_REF
&& mode
== BLKmode
)))
7131 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7132 (TYPE_QUALS (TREE_TYPE (tem
))
7133 | TYPE_QUAL_CONST
));
7134 rtx memloc
= assign_temp (nt
, 1, 1, 1);
7136 emit_move_insn (memloc
, op0
);
7142 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7148 #ifdef POINTERS_EXTEND_UNSIGNED
7149 if (GET_MODE (offset_rtx
) != Pmode
)
7150 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7152 if (GET_MODE (offset_rtx
) != ptr_mode
)
7153 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7156 if (GET_MODE (op0
) == BLKmode
7157 /* A constant address in OP0 can have VOIDmode, we must
7158 not try to call force_reg in that case. */
7159 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7161 && (bitpos
% bitsize
) == 0
7162 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7163 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7165 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7169 op0
= offset_address (op0
, offset_rtx
,
7170 highest_pow2_factor (offset
));
7173 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7174 record its alignment as BIGGEST_ALIGNMENT. */
7175 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7176 && is_aligning_offset (offset
, tem
))
7177 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7179 /* Don't forget about volatility even if this is a bitfield. */
7180 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7182 if (op0
== orig_op0
)
7183 op0
= copy_rtx (op0
);
7185 MEM_VOLATILE_P (op0
) = 1;
7188 /* The following code doesn't handle CONCAT.
7189 Assume only bitpos == 0 can be used for CONCAT, due to
7190 one element arrays having the same mode as its element. */
7191 if (GET_CODE (op0
) == CONCAT
)
7193 if (bitpos
!= 0 || bitsize
!= GET_MODE_BITSIZE (GET_MODE (op0
)))
7198 /* In cases where an aligned union has an unaligned object
7199 as a field, we might be extracting a BLKmode value from
7200 an integer-mode (e.g., SImode) object. Handle this case
7201 by doing the extract into an object as wide as the field
7202 (which we know to be the width of a basic mode), then
7203 storing into memory, and changing the mode to BLKmode. */
7204 if (mode1
== VOIDmode
7205 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7206 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7207 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7208 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7209 && modifier
!= EXPAND_CONST_ADDRESS
7210 && modifier
!= EXPAND_INITIALIZER
)
7211 /* If the field isn't aligned enough to fetch as a memref,
7212 fetch it as a bit field. */
7213 || (mode1
!= BLKmode
7214 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7215 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7217 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7218 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7219 && ((modifier
== EXPAND_CONST_ADDRESS
7220 || modifier
== EXPAND_INITIALIZER
)
7222 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7223 || (bitpos
% BITS_PER_UNIT
!= 0)))
7224 /* If the type and the field are a constant size and the
7225 size of the type isn't the same size as the bitfield,
7226 we must use bitfield operations. */
7228 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7230 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7233 enum machine_mode ext_mode
= mode
;
7235 if (ext_mode
== BLKmode
7236 && ! (target
!= 0 && MEM_P (op0
)
7238 && bitpos
% BITS_PER_UNIT
== 0))
7239 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7241 if (ext_mode
== BLKmode
)
7244 target
= assign_temp (type
, 0, 1, 1);
7249 /* In this case, BITPOS must start at a byte boundary and
7250 TARGET, if specified, must be a MEM. */
7252 || (target
!= 0 && !MEM_P (target
))
7253 || bitpos
% BITS_PER_UNIT
!= 0)
7256 emit_block_move (target
,
7257 adjust_address (op0
, VOIDmode
,
7258 bitpos
/ BITS_PER_UNIT
),
7259 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7261 (modifier
== EXPAND_STACK_PARM
7262 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7267 op0
= validize_mem (op0
);
7269 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7270 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7272 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7273 (modifier
== EXPAND_STACK_PARM
7274 ? NULL_RTX
: target
),
7276 int_size_in_bytes (TREE_TYPE (tem
)));
7278 /* If the result is a record type and BITSIZE is narrower than
7279 the mode of OP0, an integral mode, and this is a big endian
7280 machine, we must put the field into the high-order bits. */
7281 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7282 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7283 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7284 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7285 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7289 /* If the result type is BLKmode, store the data into a temporary
7290 of the appropriate type, but with the mode corresponding to the
7291 mode for the data we have (op0's mode). It's tempting to make
7292 this a constant type, since we know it's only being stored once,
7293 but that can cause problems if we are taking the address of this
7294 COMPONENT_REF because the MEM of any reference via that address
7295 will have flags corresponding to the type, which will not
7296 necessarily be constant. */
7297 if (mode
== BLKmode
)
7300 = assign_stack_temp_for_type
7301 (ext_mode
, GET_MODE_BITSIZE (ext_mode
), 0, type
);
7303 emit_move_insn (new, op0
);
7304 op0
= copy_rtx (new);
7305 PUT_MODE (op0
, BLKmode
);
7306 set_mem_attributes (op0
, exp
, 1);
7312 /* If the result is BLKmode, use that to access the object
7314 if (mode
== BLKmode
)
7317 /* Get a reference to just this component. */
7318 if (modifier
== EXPAND_CONST_ADDRESS
7319 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7320 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7322 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7324 if (op0
== orig_op0
)
7325 op0
= copy_rtx (op0
);
7327 set_mem_attributes (op0
, exp
, 0);
7328 if (REG_P (XEXP (op0
, 0)))
7329 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7331 MEM_VOLATILE_P (op0
) |= volatilep
;
7332 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7333 || modifier
== EXPAND_CONST_ADDRESS
7334 || modifier
== EXPAND_INITIALIZER
)
7336 else if (target
== 0)
7337 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7339 convert_move (target
, op0
, unsignedp
);
7344 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
7346 /* Intended for a reference to a buffer of a file-object in Pascal.
7347 But it's not certain that a special tree code will really be
7348 necessary for these. INDIRECT_REF might work for them. */
7354 /* Pascal set IN expression.
7357 rlo = set_low - (set_low%bits_per_word);
7358 the_word = set [ (index - rlo)/bits_per_word ];
7359 bit_index = index % bits_per_word;
7360 bitmask = 1 << bit_index;
7361 return !!(the_word & bitmask); */
7363 tree set
= TREE_OPERAND (exp
, 0);
7364 tree index
= TREE_OPERAND (exp
, 1);
7365 int iunsignedp
= TYPE_UNSIGNED (TREE_TYPE (index
));
7366 tree set_type
= TREE_TYPE (set
);
7367 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7368 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7369 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7370 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7371 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7372 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7373 rtx setaddr
= XEXP (setval
, 0);
7374 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7376 rtx diff
, quo
, rem
, addr
, bit
, result
;
7378 /* If domain is empty, answer is no. Likewise if index is constant
7379 and out of bounds. */
7380 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7381 && TREE_CODE (set_low_bound
) == INTEGER_CST
7382 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7383 || (TREE_CODE (index
) == INTEGER_CST
7384 && TREE_CODE (set_low_bound
) == INTEGER_CST
7385 && tree_int_cst_lt (index
, set_low_bound
))
7386 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7387 && TREE_CODE (index
) == INTEGER_CST
7388 && tree_int_cst_lt (set_high_bound
, index
))))
7392 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7394 /* If we get here, we have to generate the code for both cases
7395 (in range and out of range). */
7397 op0
= gen_label_rtx ();
7398 op1
= gen_label_rtx ();
7400 if (! (GET_CODE (index_val
) == CONST_INT
7401 && GET_CODE (lo_r
) == CONST_INT
))
7402 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7403 GET_MODE (index_val
), iunsignedp
, op1
);
7405 if (! (GET_CODE (index_val
) == CONST_INT
7406 && GET_CODE (hi_r
) == CONST_INT
))
7407 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7408 GET_MODE (index_val
), iunsignedp
, op1
);
7410 /* Calculate the element number of bit zero in the first word
7412 if (GET_CODE (lo_r
) == CONST_INT
)
7413 rlow
= GEN_INT (INTVAL (lo_r
)
7414 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7416 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7417 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7418 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7420 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7421 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7423 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7424 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7425 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7426 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7428 addr
= memory_address (byte_mode
,
7429 expand_binop (index_mode
, add_optab
, diff
,
7430 setaddr
, NULL_RTX
, iunsignedp
,
7433 /* Extract the bit we want to examine. */
7434 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7435 gen_rtx_MEM (byte_mode
, addr
),
7436 make_tree (TREE_TYPE (index
), rem
),
7438 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7439 GET_MODE (target
) == byte_mode
? target
: 0,
7440 1, OPTAB_LIB_WIDEN
);
7442 if (result
!= target
)
7443 convert_move (target
, result
, 1);
7445 /* Output the code to handle the out-of-range case. */
7448 emit_move_insn (target
, const0_rtx
);
7453 case WITH_CLEANUP_EXPR
:
7454 if (WITH_CLEANUP_EXPR_RTL (exp
) == 0)
7456 WITH_CLEANUP_EXPR_RTL (exp
)
7457 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7458 expand_decl_cleanup_eh (NULL_TREE
, TREE_OPERAND (exp
, 1),
7459 CLEANUP_EH_ONLY (exp
));
7461 /* That's it for this cleanup. */
7462 TREE_OPERAND (exp
, 1) = 0;
7464 return WITH_CLEANUP_EXPR_RTL (exp
);
7466 case CLEANUP_POINT_EXPR
:
7468 /* Start a new binding layer that will keep track of all cleanup
7469 actions to be performed. */
7470 expand_start_bindings (2);
7472 target_temp_slot_level
= temp_slot_level
;
7474 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7475 /* If we're going to use this value, load it up now. */
7477 op0
= force_not_mem (op0
);
7478 preserve_temp_slots (op0
);
7479 expand_end_bindings (NULL_TREE
, 0, 0);
7484 /* Check for a built-in function. */
7485 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7486 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7488 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7490 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7491 == BUILT_IN_FRONTEND
)
7492 return lang_hooks
.expand_expr (exp
, original_target
,
7496 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7499 return expand_call (exp
, target
, ignore
);
7501 case NON_LVALUE_EXPR
:
7504 case REFERENCE_EXPR
:
7505 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7508 if (TREE_CODE (type
) == UNION_TYPE
)
7510 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7512 /* If both input and output are BLKmode, this conversion isn't doing
7513 anything except possibly changing memory attribute. */
7514 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7516 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7519 result
= copy_rtx (result
);
7520 set_mem_attributes (result
, exp
, 0);
7526 if (TYPE_MODE (type
) != BLKmode
)
7527 target
= gen_reg_rtx (TYPE_MODE (type
));
7529 target
= assign_temp (type
, 0, 1, 1);
7533 /* Store data into beginning of memory target. */
7534 store_expr (TREE_OPERAND (exp
, 0),
7535 adjust_address (target
, TYPE_MODE (valtype
), 0),
7536 modifier
== EXPAND_STACK_PARM
? 2 : 0);
7538 else if (REG_P (target
))
7539 /* Store this field into a union of the proper type. */
7540 store_field (target
,
7541 MIN ((int_size_in_bytes (TREE_TYPE
7542 (TREE_OPERAND (exp
, 0)))
7544 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7545 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7546 VOIDmode
, 0, type
, 0);
7550 /* Return the entire union. */
7554 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7556 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7559 /* If the signedness of the conversion differs and OP0 is
7560 a promoted SUBREG, clear that indication since we now
7561 have to do the proper extension. */
7562 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7563 && GET_CODE (op0
) == SUBREG
)
7564 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7569 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7570 if (GET_MODE (op0
) == mode
)
7573 /* If OP0 is a constant, just convert it into the proper mode. */
7574 if (CONSTANT_P (op0
))
7576 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7577 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7579 if (modifier
== EXPAND_INITIALIZER
)
7580 return simplify_gen_subreg (mode
, op0
, inner_mode
,
7581 subreg_lowpart_offset (mode
,
7584 return convert_modes (mode
, inner_mode
, op0
,
7585 TYPE_UNSIGNED (inner_type
));
7588 if (modifier
== EXPAND_INITIALIZER
)
7589 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7593 convert_to_mode (mode
, op0
,
7594 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7596 convert_move (target
, op0
,
7597 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7600 case VIEW_CONVERT_EXPR
:
7601 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
7603 /* If the input and output modes are both the same, we are done.
7604 Otherwise, if neither mode is BLKmode and both are integral and within
7605 a word, we can use gen_lowpart. If neither is true, make sure the
7606 operand is in memory and convert the MEM to the new mode. */
7607 if (TYPE_MODE (type
) == GET_MODE (op0
))
7609 else if (TYPE_MODE (type
) != BLKmode
&& GET_MODE (op0
) != BLKmode
7610 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7611 && GET_MODE_CLASS (TYPE_MODE (type
)) == MODE_INT
7612 && GET_MODE_SIZE (TYPE_MODE (type
)) <= UNITS_PER_WORD
7613 && GET_MODE_SIZE (GET_MODE (op0
)) <= UNITS_PER_WORD
)
7614 op0
= gen_lowpart (TYPE_MODE (type
), op0
);
7615 else if (!MEM_P (op0
))
7617 /* If the operand is not a MEM, force it into memory. Since we
7618 are going to be be changing the mode of the MEM, don't call
7619 force_const_mem for constants because we don't allow pool
7620 constants to change mode. */
7621 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7623 if (TREE_ADDRESSABLE (exp
))
7626 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
7628 = assign_stack_temp_for_type
7629 (TYPE_MODE (inner_type
),
7630 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
7632 emit_move_insn (target
, op0
);
7636 /* At this point, OP0 is in the correct mode. If the output type is such
7637 that the operand is known to be aligned, indicate that it is.
7638 Otherwise, we need only be concerned about alignment for non-BLKmode
7642 op0
= copy_rtx (op0
);
7644 if (TYPE_ALIGN_OK (type
))
7645 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
7646 else if (TYPE_MODE (type
) != BLKmode
&& STRICT_ALIGNMENT
7647 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
7649 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7650 HOST_WIDE_INT temp_size
7651 = MAX (int_size_in_bytes (inner_type
),
7652 (HOST_WIDE_INT
) GET_MODE_SIZE (TYPE_MODE (type
)));
7653 rtx
new = assign_stack_temp_for_type (TYPE_MODE (type
),
7654 temp_size
, 0, type
);
7655 rtx new_with_op0_mode
= adjust_address (new, GET_MODE (op0
), 0);
7657 if (TREE_ADDRESSABLE (exp
))
7660 if (GET_MODE (op0
) == BLKmode
)
7661 emit_block_move (new_with_op0_mode
, op0
,
7662 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type
))),
7663 (modifier
== EXPAND_STACK_PARM
7664 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7666 emit_move_insn (new_with_op0_mode
, op0
);
7671 op0
= adjust_address (op0
, TYPE_MODE (type
), 0);
7677 this_optab
= ! unsignedp
&& flag_trapv
7678 && (GET_MODE_CLASS (mode
) == MODE_INT
)
7679 ? addv_optab
: add_optab
;
7681 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7682 something else, make sure we add the register to the constant and
7683 then to the other thing. This case can occur during strength
7684 reduction and doing it this way will produce better code if the
7685 frame pointer or argument pointer is eliminated.
7687 fold-const.c will ensure that the constant is always in the inner
7688 PLUS_EXPR, so the only case we need to do anything about is if
7689 sp, ap, or fp is our second argument, in which case we must swap
7690 the innermost first argument and our second argument. */
7692 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7693 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7694 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
7695 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7696 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7697 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7699 tree t
= TREE_OPERAND (exp
, 1);
7701 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7702 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7705 /* If the result is to be ptr_mode and we are adding an integer to
7706 something, we might be forming a constant. So try to use
7707 plus_constant. If it produces a sum and we can't accept it,
7708 use force_operand. This allows P = &ARR[const] to generate
7709 efficient code on machines where a SYMBOL_REF is not a valid
7712 If this is an EXPAND_SUM call, always return the sum. */
7713 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7714 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7716 if (modifier
== EXPAND_STACK_PARM
)
7718 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7719 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7720 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7724 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7726 /* Use immed_double_const to ensure that the constant is
7727 truncated according to the mode of OP1, then sign extended
7728 to a HOST_WIDE_INT. Using the constant directly can result
7729 in non-canonical RTL in a 64x32 cross compile. */
7731 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7733 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7734 op1
= plus_constant (op1
, INTVAL (constant_part
));
7735 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7736 op1
= force_operand (op1
, target
);
7740 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7741 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7742 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7746 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7747 (modifier
== EXPAND_INITIALIZER
7748 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7749 if (! CONSTANT_P (op0
))
7751 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7752 VOIDmode
, modifier
);
7753 /* Return a PLUS if modifier says it's OK. */
7754 if (modifier
== EXPAND_SUM
7755 || modifier
== EXPAND_INITIALIZER
)
7756 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7759 /* Use immed_double_const to ensure that the constant is
7760 truncated according to the mode of OP1, then sign extended
7761 to a HOST_WIDE_INT. Using the constant directly can result
7762 in non-canonical RTL in a 64x32 cross compile. */
7764 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7766 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7767 op0
= plus_constant (op0
, INTVAL (constant_part
));
7768 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7769 op0
= force_operand (op0
, target
);
7774 /* No sense saving up arithmetic to be done
7775 if it's all in the wrong mode to form part of an address.
7776 And force_operand won't know whether to sign-extend or
7778 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7779 || mode
!= ptr_mode
)
7781 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7782 subtarget
, &op0
, &op1
, 0);
7783 if (op0
== const0_rtx
)
7785 if (op1
== const0_rtx
)
7790 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7791 subtarget
, &op0
, &op1
, modifier
);
7792 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7795 /* For initializers, we are allowed to return a MINUS of two
7796 symbolic constants. Here we handle all cases when both operands
7798 /* Handle difference of two symbolic constants,
7799 for the sake of an initializer. */
7800 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7801 && really_constant_p (TREE_OPERAND (exp
, 0))
7802 && really_constant_p (TREE_OPERAND (exp
, 1)))
7804 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7805 NULL_RTX
, &op0
, &op1
, modifier
);
7807 /* If the last operand is a CONST_INT, use plus_constant of
7808 the negated constant. Else make the MINUS. */
7809 if (GET_CODE (op1
) == CONST_INT
)
7810 return plus_constant (op0
, - INTVAL (op1
));
7812 return gen_rtx_MINUS (mode
, op0
, op1
);
7815 this_optab
= ! unsignedp
&& flag_trapv
7816 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7817 ? subv_optab
: sub_optab
;
7819 /* No sense saving up arithmetic to be done
7820 if it's all in the wrong mode to form part of an address.
7821 And force_operand won't know whether to sign-extend or
7823 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7824 || mode
!= ptr_mode
)
7827 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7828 subtarget
, &op0
, &op1
, modifier
);
7830 /* Convert A - const to A + (-const). */
7831 if (GET_CODE (op1
) == CONST_INT
)
7833 op1
= negate_rtx (mode
, op1
);
7834 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7840 /* If first operand is constant, swap them.
7841 Thus the following special case checks need only
7842 check the second operand. */
7843 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7845 tree t1
= TREE_OPERAND (exp
, 0);
7846 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7847 TREE_OPERAND (exp
, 1) = t1
;
7850 /* Attempt to return something suitable for generating an
7851 indexed address, for machines that support that. */
7853 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7854 && host_integerp (TREE_OPERAND (exp
, 1), 0))
7856 tree exp1
= TREE_OPERAND (exp
, 1);
7858 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7862 op0
= force_operand (op0
, NULL_RTX
);
7864 op0
= copy_to_mode_reg (mode
, op0
);
7866 return gen_rtx_MULT (mode
, op0
,
7867 gen_int_mode (tree_low_cst (exp1
, 0),
7868 TYPE_MODE (TREE_TYPE (exp1
))));
7871 if (modifier
== EXPAND_STACK_PARM
)
7874 /* Check for multiplying things that have been extended
7875 from a narrower type. If this machine supports multiplying
7876 in that narrower type with a result in the desired type,
7877 do it that way, and avoid the explicit type-conversion. */
7878 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7879 && TREE_CODE (type
) == INTEGER_TYPE
7880 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7881 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7882 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7883 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7884 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7885 /* Don't use a widening multiply if a shift will do. */
7886 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7887 > HOST_BITS_PER_WIDE_INT
)
7888 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7890 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7891 && (TYPE_PRECISION (TREE_TYPE
7892 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7893 == TYPE_PRECISION (TREE_TYPE
7895 (TREE_OPERAND (exp
, 0), 0))))
7896 /* If both operands are extended, they must either both
7897 be zero-extended or both be sign-extended. */
7898 && (TYPE_UNSIGNED (TREE_TYPE
7899 (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7900 == TYPE_UNSIGNED (TREE_TYPE
7902 (TREE_OPERAND (exp
, 0), 0)))))))
7904 tree op0type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0));
7905 enum machine_mode innermode
= TYPE_MODE (op0type
);
7906 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7907 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7908 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7910 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7912 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7914 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7915 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7916 TREE_OPERAND (exp
, 1),
7917 NULL_RTX
, &op0
, &op1
, 0);
7919 expand_operands (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7920 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7921 NULL_RTX
, &op0
, &op1
, 0);
7924 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7925 && innermode
== word_mode
)
7928 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7929 NULL_RTX
, VOIDmode
, 0);
7930 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7931 op1
= convert_modes (innermode
, mode
,
7932 expand_expr (TREE_OPERAND (exp
, 1),
7933 NULL_RTX
, VOIDmode
, 0),
7936 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7937 NULL_RTX
, VOIDmode
, 0);
7938 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7939 unsignedp
, OPTAB_LIB_WIDEN
);
7940 hipart
= gen_highpart (innermode
, temp
);
7941 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7945 emit_move_insn (hipart
, htem
);
7950 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7951 subtarget
, &op0
, &op1
, 0);
7952 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7954 case TRUNC_DIV_EXPR
:
7955 case FLOOR_DIV_EXPR
:
7957 case ROUND_DIV_EXPR
:
7958 case EXACT_DIV_EXPR
:
7959 if (modifier
== EXPAND_STACK_PARM
)
7961 /* Possible optimization: compute the dividend with EXPAND_SUM
7962 then if the divisor is constant can optimize the case
7963 where some terms of the dividend have coeffs divisible by it. */
7964 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7965 subtarget
, &op0
, &op1
, 0);
7966 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7969 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7970 expensive divide. If not, combine will rebuild the original
7972 if (flag_unsafe_math_optimizations
&& optimize
&& !optimize_size
7973 && TREE_CODE (type
) == REAL_TYPE
7974 && !real_onep (TREE_OPERAND (exp
, 0)))
7975 return expand_expr (build (MULT_EXPR
, type
, TREE_OPERAND (exp
, 0),
7976 build (RDIV_EXPR
, type
,
7977 build_real (type
, dconst1
),
7978 TREE_OPERAND (exp
, 1))),
7979 target
, tmode
, modifier
);
7980 this_optab
= sdiv_optab
;
7983 case TRUNC_MOD_EXPR
:
7984 case FLOOR_MOD_EXPR
:
7986 case ROUND_MOD_EXPR
:
7987 if (modifier
== EXPAND_STACK_PARM
)
7989 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
7990 subtarget
, &op0
, &op1
, 0);
7991 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7993 case FIX_ROUND_EXPR
:
7994 case FIX_FLOOR_EXPR
:
7996 abort (); /* Not used for C. */
7998 case FIX_TRUNC_EXPR
:
7999 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8000 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8001 target
= gen_reg_rtx (mode
);
8002 expand_fix (target
, op0
, unsignedp
);
8006 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8007 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8008 target
= gen_reg_rtx (mode
);
8009 /* expand_float can't figure out what to do if FROM has VOIDmode.
8010 So give it the correct mode. With -O, cse will optimize this. */
8011 if (GET_MODE (op0
) == VOIDmode
)
8012 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8014 expand_float (target
, op0
,
8015 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8019 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8020 if (modifier
== EXPAND_STACK_PARM
)
8022 temp
= expand_unop (mode
,
8023 ! unsignedp
&& flag_trapv
8024 && (GET_MODE_CLASS(mode
) == MODE_INT
)
8025 ? negv_optab
: neg_optab
, op0
, target
, 0);
8031 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8032 if (modifier
== EXPAND_STACK_PARM
)
8035 /* ABS_EXPR is not valid for complex arguments. */
8036 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
8037 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
8040 /* Unsigned abs is simply the operand. Testing here means we don't
8041 risk generating incorrect code below. */
8042 if (TYPE_UNSIGNED (type
))
8045 return expand_abs (mode
, op0
, target
, unsignedp
,
8046 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8050 target
= original_target
;
8052 || modifier
== EXPAND_STACK_PARM
8053 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8054 || GET_MODE (target
) != mode
8056 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8057 target
= gen_reg_rtx (mode
);
8058 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8059 target
, &op0
, &op1
, 0);
8061 /* First try to do it with a special MIN or MAX instruction.
8062 If that does not win, use a conditional jump to select the proper
8064 this_optab
= (unsignedp
8065 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
8066 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
8068 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8073 /* At this point, a MEM target is no longer useful; we will get better
8077 target
= gen_reg_rtx (mode
);
8079 /* If op1 was placed in target, swap op0 and op1. */
8080 if (target
!= op0
&& target
== op1
)
8088 emit_move_insn (target
, op0
);
8090 op0
= gen_label_rtx ();
8092 /* If this mode is an integer too wide to compare properly,
8093 compare word by word. Rely on cse to optimize constant cases. */
8094 if (GET_MODE_CLASS (mode
) == MODE_INT
8095 && ! can_compare_p (GE
, mode
, ccp_jump
))
8097 if (code
== MAX_EXPR
)
8098 do_jump_by_parts_greater_rtx (mode
, unsignedp
, target
, op1
,
8101 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op1
, target
,
8106 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
8107 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, op0
);
8109 emit_move_insn (target
, op1
);
8114 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8115 if (modifier
== EXPAND_STACK_PARM
)
8117 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8122 /* ??? Can optimize bitwise operations with one arg constant.
8123 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8124 and (a bitwise1 b) bitwise2 b (etc)
8125 but that is probably not worth while. */
8127 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8128 boolean values when we want in all cases to compute both of them. In
8129 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8130 as actual zero-or-1 values and then bitwise anding. In cases where
8131 there cannot be any side effects, better code would be made by
8132 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8133 how to recognize those cases. */
8135 case TRUTH_AND_EXPR
:
8137 this_optab
= and_optab
;
8142 this_optab
= ior_optab
;
8145 case TRUTH_XOR_EXPR
:
8147 this_optab
= xor_optab
;
8154 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8156 if (modifier
== EXPAND_STACK_PARM
)
8158 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8159 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
8162 /* Could determine the answer when only additive constants differ. Also,
8163 the addition of one can be handled by changing the condition. */
8170 case UNORDERED_EXPR
:
8178 temp
= do_store_flag (exp
,
8179 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8180 tmode
!= VOIDmode
? tmode
: mode
, 0);
8184 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8185 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
8187 && REG_P (original_target
)
8188 && (GET_MODE (original_target
)
8189 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8191 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
8194 /* If temp is constant, we can just compute the result. */
8195 if (GET_CODE (temp
) == CONST_INT
)
8197 if (INTVAL (temp
) != 0)
8198 emit_move_insn (target
, const1_rtx
);
8200 emit_move_insn (target
, const0_rtx
);
8205 if (temp
!= original_target
)
8207 enum machine_mode mode1
= GET_MODE (temp
);
8208 if (mode1
== VOIDmode
)
8209 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
8211 temp
= copy_to_mode_reg (mode1
, temp
);
8214 op1
= gen_label_rtx ();
8215 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
8216 GET_MODE (temp
), unsignedp
, op1
);
8217 emit_move_insn (temp
, const1_rtx
);
8222 /* If no set-flag instruction, must generate a conditional
8223 store into a temporary variable. Drop through
8224 and handle this like && and ||. */
8226 case TRUTH_ANDIF_EXPR
:
8227 case TRUTH_ORIF_EXPR
:
8230 || modifier
== EXPAND_STACK_PARM
8231 || ! safe_from_p (target
, exp
, 1)
8232 /* Make sure we don't have a hard reg (such as function's return
8233 value) live across basic blocks, if not optimizing. */
8234 || (!optimize
&& REG_P (target
)
8235 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8236 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8239 emit_clr_insn (target
);
8241 op1
= gen_label_rtx ();
8242 jumpifnot (exp
, op1
);
8245 emit_0_to_1_insn (target
);
8248 return ignore
? const0_rtx
: target
;
8250 case TRUTH_NOT_EXPR
:
8251 if (modifier
== EXPAND_STACK_PARM
)
8253 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
8254 /* The parser is careful to generate TRUTH_NOT_EXPR
8255 only with operands that are always zero or one. */
8256 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8257 target
, 1, OPTAB_LIB_WIDEN
);
8263 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
8265 return expand_expr_real (TREE_OPERAND (exp
, 1),
8266 (ignore
? const0_rtx
: target
),
8267 VOIDmode
, modifier
, alt_rtl
);
8269 case STATEMENT_LIST
:
8271 tree_stmt_iterator iter
;
8276 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
8277 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
8282 /* If it's void, we don't need to worry about computing a value. */
8283 if (VOID_TYPE_P (TREE_TYPE (exp
)))
8285 tree pred
= TREE_OPERAND (exp
, 0);
8286 tree then_
= TREE_OPERAND (exp
, 1);
8287 tree else_
= TREE_OPERAND (exp
, 2);
8289 /* If we do not have any pending cleanups or stack_levels
8290 to restore, and at least one arm of the COND_EXPR is a
8291 GOTO_EXPR to a local label, then we can emit more efficient
8292 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8294 || containing_blocks_have_cleanups_or_stack_level ())
8296 else if (TREE_CODE (then_
) == GOTO_EXPR
8297 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
)
8299 jumpif (pred
, label_rtx (GOTO_DESTINATION (then_
)));
8300 return expand_expr (else_
, const0_rtx
, VOIDmode
, 0);
8302 else if (TREE_CODE (else_
) == GOTO_EXPR
8303 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
)
8305 jumpifnot (pred
, label_rtx (GOTO_DESTINATION (else_
)));
8306 return expand_expr (then_
, const0_rtx
, VOIDmode
, 0);
8309 /* Just use the 'if' machinery. */
8310 expand_start_cond (pred
, 0);
8311 start_cleanup_deferral ();
8312 expand_expr (then_
, const0_rtx
, VOIDmode
, 0);
8316 /* Iterate over 'else if's instead of recursing. */
8317 for (; TREE_CODE (exp
) == COND_EXPR
; exp
= TREE_OPERAND (exp
, 2))
8319 expand_start_else ();
8320 if (EXPR_HAS_LOCATION (exp
))
8322 emit_line_note (EXPR_LOCATION (exp
));
8323 record_block_change (TREE_BLOCK (exp
));
8325 expand_elseif (TREE_OPERAND (exp
, 0));
8326 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, 0);
8328 /* Don't emit the jump and label if there's no 'else' clause. */
8329 if (TREE_SIDE_EFFECTS (exp
))
8331 expand_start_else ();
8332 expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
8334 end_cleanup_deferral ();
8339 /* If we would have a "singleton" (see below) were it not for a
8340 conversion in each arm, bring that conversion back out. */
8341 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
8342 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
8343 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
8344 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
8346 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
8347 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
8349 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
8350 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8351 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
8352 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
8353 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
8354 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
8355 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
8356 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
8357 return expand_expr (build1 (NOP_EXPR
, type
,
8358 build (COND_EXPR
, TREE_TYPE (iftrue
),
8359 TREE_OPERAND (exp
, 0),
8361 target
, tmode
, modifier
);
8365 /* Note that COND_EXPRs whose type is a structure or union
8366 are required to be constructed to contain assignments of
8367 a temporary variable, so that we can evaluate them here
8368 for side effect only. If type is void, we must do likewise. */
8370 /* If an arm of the branch requires a cleanup,
8371 only that cleanup is performed. */
8374 tree binary_op
= 0, unary_op
= 0;
8376 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8377 convert it to our mode, if necessary. */
8378 if (integer_onep (TREE_OPERAND (exp
, 1))
8379 && integer_zerop (TREE_OPERAND (exp
, 2))
8380 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8384 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8389 if (modifier
== EXPAND_STACK_PARM
)
8391 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
8392 if (GET_MODE (op0
) == mode
)
8396 target
= gen_reg_rtx (mode
);
8397 convert_move (target
, op0
, unsignedp
);
8401 /* Check for X ? A + B : A. If we have this, we can copy A to the
8402 output and conditionally add B. Similarly for unary operations.
8403 Don't do this if X has side-effects because those side effects
8404 might affect A or B and the "?" operation is a sequence point in
8405 ANSI. (operand_equal_p tests for side effects.) */
8407 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8408 && operand_equal_p (TREE_OPERAND (exp
, 2),
8409 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8410 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8411 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8412 && operand_equal_p (TREE_OPERAND (exp
, 1),
8413 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8414 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8415 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8416 && operand_equal_p (TREE_OPERAND (exp
, 2),
8417 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8418 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8419 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8420 && operand_equal_p (TREE_OPERAND (exp
, 1),
8421 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8422 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8424 /* If we are not to produce a result, we have no target. Otherwise,
8425 if a target was specified use it; it will not be used as an
8426 intermediate target unless it is safe. If no target, use a
8431 else if (modifier
== EXPAND_STACK_PARM
)
8432 temp
= assign_temp (type
, 0, 0, 1);
8433 else if (original_target
8434 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8435 || (singleton
&& REG_P (original_target
)
8436 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8437 && original_target
== var_rtx (singleton
)))
8438 && GET_MODE (original_target
) == mode
8439 #ifdef HAVE_conditional_move
8440 && (! can_conditionally_move_p (mode
)
8441 || REG_P (original_target
)
8442 || TREE_ADDRESSABLE (type
))
8444 && (!MEM_P (original_target
)
8445 || TREE_ADDRESSABLE (type
)))
8446 temp
= original_target
;
8447 else if (TREE_ADDRESSABLE (type
))
8450 temp
= assign_temp (type
, 0, 0, 1);
8452 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8453 do the test of X as a store-flag operation, do this as
8454 A + ((X != 0) << log C). Similarly for other simple binary
8455 operators. Only do for C == 1 if BRANCH_COST is low. */
8456 if (temp
&& singleton
&& binary_op
8457 && (TREE_CODE (binary_op
) == PLUS_EXPR
8458 || TREE_CODE (binary_op
) == MINUS_EXPR
8459 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8460 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8461 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8462 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8463 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8467 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8468 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8469 ? addv_optab
: add_optab
)
8470 : TREE_CODE (binary_op
) == MINUS_EXPR
8471 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8472 ? subv_optab
: sub_optab
)
8473 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8476 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8477 if (singleton
== TREE_OPERAND (exp
, 1))
8478 cond
= invert_truthvalue (TREE_OPERAND (exp
, 0));
8480 cond
= TREE_OPERAND (exp
, 0);
8482 result
= do_store_flag (cond
, (safe_from_p (temp
, singleton
, 1)
8484 mode
, BRANCH_COST
<= 1);
8486 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8487 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8488 build_int_2 (tree_log2
8492 (safe_from_p (temp
, singleton
, 1)
8493 ? temp
: NULL_RTX
), 0);
8497 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8498 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8499 unsignedp
, OPTAB_LIB_WIDEN
);
8503 do_pending_stack_adjust ();
8505 op0
= gen_label_rtx ();
8507 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8511 /* If the target conflicts with the other operand of the
8512 binary op, we can't use it. Also, we can't use the target
8513 if it is a hard register, because evaluating the condition
8514 might clobber it. */
8516 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8518 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8519 temp
= gen_reg_rtx (mode
);
8520 store_expr (singleton
, temp
,
8521 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8524 expand_expr (singleton
,
8525 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8526 if (singleton
== TREE_OPERAND (exp
, 1))
8527 jumpif (TREE_OPERAND (exp
, 0), op0
);
8529 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8531 start_cleanup_deferral ();
8532 if (binary_op
&& temp
== 0)
8533 /* Just touch the other operand. */
8534 expand_expr (TREE_OPERAND (binary_op
, 1),
8535 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8537 store_expr (build (TREE_CODE (binary_op
), type
,
8538 make_tree (type
, temp
),
8539 TREE_OPERAND (binary_op
, 1)),
8540 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8542 store_expr (build1 (TREE_CODE (unary_op
), type
,
8543 make_tree (type
, temp
)),
8544 temp
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8547 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8548 comparison operator. If we have one of these cases, set the
8549 output to A, branch on A (cse will merge these two references),
8550 then set the output to FOO. */
8552 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8553 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8554 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8555 TREE_OPERAND (exp
, 1), 0)
8556 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8557 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8558 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8561 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8562 temp
= gen_reg_rtx (mode
);
8563 store_expr (TREE_OPERAND (exp
, 1), temp
,
8564 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8565 jumpif (TREE_OPERAND (exp
, 0), op0
);
8567 start_cleanup_deferral ();
8568 if (TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8569 store_expr (TREE_OPERAND (exp
, 2), temp
,
8570 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8572 expand_expr (TREE_OPERAND (exp
, 2),
8573 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8577 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8578 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8579 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8580 TREE_OPERAND (exp
, 2), 0)
8581 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8582 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8583 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8586 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8587 temp
= gen_reg_rtx (mode
);
8588 store_expr (TREE_OPERAND (exp
, 2), temp
,
8589 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8590 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8592 start_cleanup_deferral ();
8593 if (TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8594 store_expr (TREE_OPERAND (exp
, 1), temp
,
8595 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8597 expand_expr (TREE_OPERAND (exp
, 1),
8598 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8603 op1
= gen_label_rtx ();
8604 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8606 start_cleanup_deferral ();
8608 /* One branch of the cond can be void, if it never returns. For
8609 example A ? throw : E */
8611 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8612 store_expr (TREE_OPERAND (exp
, 1), temp
,
8613 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8615 expand_expr (TREE_OPERAND (exp
, 1),
8616 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8617 end_cleanup_deferral ();
8619 emit_jump_insn (gen_jump (op1
));
8622 start_cleanup_deferral ();
8624 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8625 store_expr (TREE_OPERAND (exp
, 2), temp
,
8626 modifier
== EXPAND_STACK_PARM
? 2 : 0);
8628 expand_expr (TREE_OPERAND (exp
, 2),
8629 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8632 end_cleanup_deferral ();
8643 /* Something needs to be initialized, but we didn't know
8644 where that thing was when building the tree. For example,
8645 it could be the return value of a function, or a parameter
8646 to a function which lays down in the stack, or a temporary
8647 variable which must be passed by reference.
8649 We guarantee that the expression will either be constructed
8650 or copied into our original target. */
8652 tree slot
= TREE_OPERAND (exp
, 0);
8653 tree cleanups
= NULL_TREE
;
8656 if (TREE_CODE (slot
) != VAR_DECL
)
8660 target
= original_target
;
8662 /* Set this here so that if we get a target that refers to a
8663 register variable that's already been used, put_reg_into_stack
8664 knows that it should fix up those uses. */
8665 TREE_USED (slot
) = 1;
8669 if (DECL_RTL_SET_P (slot
))
8671 target
= DECL_RTL (slot
);
8672 /* If we have already expanded the slot, so don't do
8674 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8679 target
= assign_temp (type
, 2, 0, 1);
8680 SET_DECL_RTL (slot
, target
);
8682 /* Since SLOT is not known to the called function
8683 to belong to its stack frame, we must build an explicit
8684 cleanup. This case occurs when we must build up a reference
8685 to pass the reference as an argument. In this case,
8686 it is very likely that such a reference need not be
8689 if (TREE_OPERAND (exp
, 2) == 0)
8690 TREE_OPERAND (exp
, 2)
8691 = lang_hooks
.maybe_build_cleanup (slot
);
8692 cleanups
= TREE_OPERAND (exp
, 2);
8697 /* This case does occur, when expanding a parameter which
8698 needs to be constructed on the stack. The target
8699 is the actual stack address that we want to initialize.
8700 The function we call will perform the cleanup in this case. */
8702 /* If we have already assigned it space, use that space,
8703 not target that we were passed in, as our target
8704 parameter is only a hint. */
8705 if (DECL_RTL_SET_P (slot
))
8707 target
= DECL_RTL (slot
);
8708 /* If we have already expanded the slot, so don't do
8710 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8714 SET_DECL_RTL (slot
, target
);
8717 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8718 /* Mark it as expanded. */
8719 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8721 if (VOID_TYPE_P (TREE_TYPE (exp1
)))
8722 /* If the initializer is void, just expand it; it will initialize
8723 the object directly. */
8724 expand_expr (exp1
, const0_rtx
, VOIDmode
, 0);
8726 store_expr (exp1
, target
, modifier
== EXPAND_STACK_PARM
? 2 : 0);
8728 expand_decl_cleanup_eh (NULL_TREE
, cleanups
, CLEANUP_EH_ONLY (exp
));
8735 tree lhs
= TREE_OPERAND (exp
, 0);
8736 tree rhs
= TREE_OPERAND (exp
, 1);
8738 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8744 /* If lhs is complex, expand calls in rhs before computing it.
8745 That's so we don't compute a pointer and save it over a
8746 call. If lhs is simple, compute it first so we can give it
8747 as a target if the rhs is just a call. This avoids an
8748 extra temp and copy and that prevents a partial-subsumption
8749 which makes bad code. Actually we could treat
8750 component_ref's of vars like vars. */
8752 tree lhs
= TREE_OPERAND (exp
, 0);
8753 tree rhs
= TREE_OPERAND (exp
, 1);
8757 /* Check for |= or &= of a bitfield of size one into another bitfield
8758 of size 1. In this case, (unless we need the result of the
8759 assignment) we can do this more efficiently with a
8760 test followed by an assignment, if necessary.
8762 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8763 things change so we do, this code should be enhanced to
8766 && TREE_CODE (lhs
) == COMPONENT_REF
8767 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8768 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8769 && TREE_OPERAND (rhs
, 0) == lhs
8770 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8771 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8772 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8774 rtx label
= gen_label_rtx ();
8776 do_jump (TREE_OPERAND (rhs
, 1),
8777 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8778 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8779 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8780 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8782 : integer_zero_node
)),
8784 do_pending_stack_adjust ();
8789 temp
= expand_assignment (lhs
, rhs
, ! ignore
);
8795 if (!TREE_OPERAND (exp
, 0))
8796 expand_null_return ();
8798 expand_return (TREE_OPERAND (exp
, 0));
8801 case PREINCREMENT_EXPR
:
8802 case PREDECREMENT_EXPR
:
8803 return expand_increment (exp
, 0, ignore
);
8805 case POSTINCREMENT_EXPR
:
8806 case POSTDECREMENT_EXPR
:
8807 /* Faster to treat as pre-increment if result is not used. */
8808 return expand_increment (exp
, ! ignore
, ignore
);
8811 if (modifier
== EXPAND_STACK_PARM
)
8813 /* If we are taking the address of something erroneous, just
8815 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8817 /* If we are taking the address of a constant and are at the
8818 top level, we have to use output_constant_def since we can't
8819 call force_const_mem at top level. */
8821 && (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8822 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0)))
8824 op0
= XEXP (output_constant_def (TREE_OPERAND (exp
, 0), 0), 0);
8827 /* We make sure to pass const0_rtx down if we came in with
8828 ignore set, to avoid doing the cleanups twice for something. */
8829 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8830 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8831 (modifier
== EXPAND_INITIALIZER
8832 ? modifier
: EXPAND_CONST_ADDRESS
));
8834 /* If we are going to ignore the result, OP0 will have been set
8835 to const0_rtx, so just return it. Don't get confused and
8836 think we are taking the address of the constant. */
8840 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8841 clever and returns a REG when given a MEM. */
8842 op0
= protect_from_queue (op0
, 1);
8844 /* We would like the object in memory. If it is a constant, we can
8845 have it be statically allocated into memory. For a non-constant,
8846 we need to allocate some memory and store the value into it. */
8848 if (CONSTANT_P (op0
))
8849 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8851 else if (REG_P (op0
) || GET_CODE (op0
) == SUBREG
8852 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == PARALLEL
8853 || GET_CODE (op0
) == LO_SUM
)
8855 /* If this object is in a register, it can't be BLKmode. */
8856 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8857 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
8859 if (GET_CODE (op0
) == PARALLEL
)
8860 /* Handle calls that pass values in multiple
8861 non-contiguous locations. The Irix 6 ABI has examples
8863 emit_group_store (memloc
, op0
, inner_type
,
8864 int_size_in_bytes (inner_type
));
8866 emit_move_insn (memloc
, op0
);
8874 mark_temp_addr_taken (op0
);
8875 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8877 op0
= XEXP (op0
, 0);
8878 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8879 op0
= convert_memory_address (ptr_mode
, op0
);
8883 /* If OP0 is not aligned as least as much as the type requires, we
8884 need to make a temporary, copy OP0 to it, and take the address of
8885 the temporary. We want to use the alignment of the type, not of
8886 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8887 the test for BLKmode means that can't happen. The test for
8888 BLKmode is because we never make mis-aligned MEMs with
8891 We don't need to do this at all if the machine doesn't have
8892 strict alignment. */
8893 if (STRICT_ALIGNMENT
&& GET_MODE (op0
) == BLKmode
8894 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8896 && MEM_ALIGN (op0
) < BIGGEST_ALIGNMENT
)
8898 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8901 if (TYPE_ALIGN_OK (inner_type
))
8904 if (TREE_ADDRESSABLE (inner_type
))
8906 /* We can't make a bitwise copy of this object, so fail. */
8907 error ("cannot take the address of an unaligned member");
8911 new = assign_stack_temp_for_type
8912 (TYPE_MODE (inner_type
),
8913 MEM_SIZE (op0
) ? INTVAL (MEM_SIZE (op0
))
8914 : int_size_in_bytes (inner_type
),
8915 1, build_qualified_type (inner_type
,
8916 (TYPE_QUALS (inner_type
)
8917 | TYPE_QUAL_CONST
)));
8919 emit_block_move (new, op0
, expr_size (TREE_OPERAND (exp
, 0)),
8920 (modifier
== EXPAND_STACK_PARM
8921 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8926 op0
= force_operand (XEXP (op0
, 0), target
);
8931 && modifier
!= EXPAND_CONST_ADDRESS
8932 && modifier
!= EXPAND_INITIALIZER
8933 && modifier
!= EXPAND_SUM
)
8934 op0
= force_reg (Pmode
, op0
);
8937 && ! REG_USERVAR_P (op0
))
8938 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8940 if (GET_MODE (op0
) == Pmode
&& mode
== ptr_mode
)
8941 op0
= convert_memory_address (ptr_mode
, op0
);
8945 case ENTRY_VALUE_EXPR
:
8948 /* COMPLEX type for Extended Pascal & Fortran */
8951 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8954 /* Get the rtx code of the operands. */
8955 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8956 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8959 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8963 /* Move the real (op0) and imaginary (op1) parts to their location. */
8964 emit_move_insn (gen_realpart (mode
, target
), op0
);
8965 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8967 insns
= get_insns ();
8970 /* Complex construction should appear as a single unit. */
8971 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8972 each with a separate pseudo as destination.
8973 It's not correct for flow to treat them as a unit. */
8974 if (GET_CODE (target
) != CONCAT
)
8975 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8983 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8984 return gen_realpart (mode
, op0
);
8987 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8988 return gen_imagpart (mode
, op0
);
8992 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8996 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8999 target
= gen_reg_rtx (mode
);
9003 /* Store the realpart and the negated imagpart to target. */
9004 emit_move_insn (gen_realpart (partmode
, target
),
9005 gen_realpart (partmode
, op0
));
9007 imag_t
= gen_imagpart (partmode
, target
);
9008 temp
= expand_unop (partmode
,
9009 ! unsignedp
&& flag_trapv
9010 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
9011 ? negv_optab
: neg_optab
,
9012 gen_imagpart (partmode
, op0
), imag_t
, 0);
9014 emit_move_insn (imag_t
, temp
);
9016 insns
= get_insns ();
9019 /* Conjugate should appear as a single unit
9020 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9021 each with a separate pseudo as destination.
9022 It's not correct for flow to treat them as a unit. */
9023 if (GET_CODE (target
) != CONCAT
)
9024 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
9032 expand_resx_expr (exp
);
9035 case TRY_CATCH_EXPR
:
9037 tree handler
= TREE_OPERAND (exp
, 1);
9039 expand_eh_region_start ();
9040 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
9041 expand_eh_handler (handler
);
9047 expand_start_catch (CATCH_TYPES (exp
));
9048 expand_expr (CATCH_BODY (exp
), const0_rtx
, VOIDmode
, 0);
9049 expand_end_catch ();
9052 case EH_FILTER_EXPR
:
9053 /* Should have been handled in expand_eh_handler. */
9056 case TRY_FINALLY_EXPR
:
9058 tree try_block
= TREE_OPERAND (exp
, 0);
9059 tree finally_block
= TREE_OPERAND (exp
, 1);
9061 if ((!optimize
&& lang_protect_cleanup_actions
== NULL
)
9062 || unsafe_for_reeval (finally_block
) > 1)
9064 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9065 is not sufficient, so we cannot expand the block twice.
9066 So we play games with GOTO_SUBROUTINE_EXPR to let us
9067 expand the thing only once. */
9068 /* When not optimizing, we go ahead with this form since
9069 (1) user breakpoints operate more predictably without
9070 code duplication, and
9071 (2) we're not running any of the global optimizers
9072 that would explode in time/space with the highly
9073 connected CFG created by the indirect branching. */
9075 rtx finally_label
= gen_label_rtx ();
9076 rtx done_label
= gen_label_rtx ();
9077 rtx return_link
= gen_reg_rtx (Pmode
);
9078 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
9079 (tree
) finally_label
, (tree
) return_link
);
9080 TREE_SIDE_EFFECTS (cleanup
) = 1;
9082 /* Start a new binding layer that will keep track of all cleanup
9083 actions to be performed. */
9084 expand_start_bindings (2);
9085 target_temp_slot_level
= temp_slot_level
;
9087 expand_decl_cleanup (NULL_TREE
, cleanup
);
9088 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9090 preserve_temp_slots (op0
);
9091 expand_end_bindings (NULL_TREE
, 0, 0);
9092 emit_jump (done_label
);
9093 emit_label (finally_label
);
9094 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
9095 emit_indirect_jump (return_link
);
9096 emit_label (done_label
);
9100 expand_start_bindings (2);
9101 target_temp_slot_level
= temp_slot_level
;
9103 expand_decl_cleanup (NULL_TREE
, finally_block
);
9104 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
9106 preserve_temp_slots (op0
);
9107 expand_end_bindings (NULL_TREE
, 0, 0);
9113 case GOTO_SUBROUTINE_EXPR
:
9115 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
9116 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
9117 rtx return_address
= gen_label_rtx ();
9118 emit_move_insn (return_link
,
9119 gen_rtx_LABEL_REF (Pmode
, return_address
));
9121 emit_label (return_address
);
9126 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
9129 return get_exception_pointer (cfun
);
9132 return get_exception_filter (cfun
);
9135 /* Function descriptors are not valid except for as
9136 initialization constants, and should not be expanded. */
9140 expand_start_case (0, SWITCH_COND (exp
), integer_type_node
,
9142 if (SWITCH_BODY (exp
))
9143 expand_expr_stmt (SWITCH_BODY (exp
));
9144 if (SWITCH_LABELS (exp
))
9147 tree vec
= SWITCH_LABELS (exp
);
9148 size_t i
, n
= TREE_VEC_LENGTH (vec
);
9150 for (i
= 0; i
< n
; ++i
)
9152 tree elt
= TREE_VEC_ELT (vec
, i
);
9153 tree controlling_expr_type
= TREE_TYPE (SWITCH_COND (exp
));
9154 tree min_value
= TYPE_MIN_VALUE (controlling_expr_type
);
9155 tree max_value
= TYPE_MAX_VALUE (controlling_expr_type
);
9157 tree case_low
= CASE_LOW (elt
);
9158 tree case_high
= CASE_HIGH (elt
) ? CASE_HIGH (elt
) : case_low
;
9159 if (case_low
&& case_high
)
9161 /* Case label is less than minimum for type. */
9162 if (TREE_CODE (min_value
) == INTEGER_CST
9163 && tree_int_cst_compare (case_low
, min_value
) < 0
9164 && tree_int_cst_compare (case_high
, min_value
) < 0)
9166 warning ("case label value %d is less than minimum value for type",
9167 TREE_INT_CST (case_low
));
9171 /* Case value is greater than maximum for type. */
9172 if (TREE_CODE (max_value
) == INTEGER_CST
9173 && tree_int_cst_compare (case_low
, max_value
) > 0
9174 && tree_int_cst_compare (case_high
, max_value
) > 0)
9176 warning ("case label value %d exceeds maximum value for type",
9177 TREE_INT_CST (case_high
));
9181 /* Saturate lower case label value to minimum. */
9182 if (TREE_CODE (min_value
) == INTEGER_CST
9183 && tree_int_cst_compare (case_high
, min_value
) >= 0
9184 && tree_int_cst_compare (case_low
, min_value
) < 0)
9186 warning ("lower value %d in case label range less than minimum value for type",
9187 TREE_INT_CST (case_low
));
9188 case_low
= min_value
;
9191 /* Saturate upper case label value to maximum. */
9192 if (TREE_CODE (max_value
) == INTEGER_CST
9193 && tree_int_cst_compare (case_low
, max_value
) <= 0
9194 && tree_int_cst_compare (case_high
, max_value
) > 0)
9196 warning ("upper value %d in case label range exceeds maximum value for type",
9197 TREE_INT_CST (case_high
));
9198 case_high
= max_value
;
9202 add_case_node (case_low
, case_high
, CASE_LABEL (elt
), &duplicate
, true);
9207 expand_end_case_type (SWITCH_COND (exp
), TREE_TYPE (exp
));
9211 expand_label (TREE_OPERAND (exp
, 0));
9214 case CASE_LABEL_EXPR
:
9217 add_case_node (CASE_LOW (exp
), CASE_HIGH (exp
), CASE_LABEL (exp
),
9225 expand_asm_expr (exp
);
9229 return lang_hooks
.expand_expr (exp
, original_target
, tmode
,
9233 /* Here to do an ordinary binary operator, generating an instruction
9234 from the optab already placed in `this_optab'. */
9236 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9237 subtarget
, &op0
, &op1
, 0);
9239 if (modifier
== EXPAND_STACK_PARM
)
9241 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9242 unsignedp
, OPTAB_LIB_WIDEN
);
9248 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9249 when applied to the address of EXP produces an address known to be
9250 aligned more than BIGGEST_ALIGNMENT. */
9253 is_aligning_offset (tree offset
, tree exp
)
9255 /* Strip off any conversions. */
9256 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9257 || TREE_CODE (offset
) == NOP_EXPR
9258 || TREE_CODE (offset
) == CONVERT_EXPR
)
9259 offset
= TREE_OPERAND (offset
, 0);
9261 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9262 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9263 if (TREE_CODE (offset
) != BIT_AND_EXPR
9264 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9265 || compare_tree_int (TREE_OPERAND (offset
, 1),
9266 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9267 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9270 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9271 It must be NEGATE_EXPR. Then strip any more conversions. */
9272 offset
= TREE_OPERAND (offset
, 0);
9273 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9274 || TREE_CODE (offset
) == NOP_EXPR
9275 || TREE_CODE (offset
) == CONVERT_EXPR
)
9276 offset
= TREE_OPERAND (offset
, 0);
9278 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9281 offset
= TREE_OPERAND (offset
, 0);
9282 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
9283 || TREE_CODE (offset
) == NOP_EXPR
9284 || TREE_CODE (offset
) == CONVERT_EXPR
)
9285 offset
= TREE_OPERAND (offset
, 0);
9287 /* This must now be the address of EXP. */
9288 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9291 /* Return the tree node if an ARG corresponds to a string constant or zero
9292 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9293 in bytes within the string that ARG is accessing. The type of the
9294 offset will be `sizetype'. */
9297 string_constant (tree arg
, tree
*ptr_offset
)
9301 if (TREE_CODE (arg
) == ADDR_EXPR
9302 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9304 *ptr_offset
= size_zero_node
;
9305 return TREE_OPERAND (arg
, 0);
9307 if (TREE_CODE (arg
) == ADDR_EXPR
9308 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
9309 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg
, 0), 0)) == STRING_CST
)
9311 *ptr_offset
= convert (sizetype
, TREE_OPERAND (TREE_OPERAND (arg
, 0), 1));
9312 return TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9314 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9316 tree arg0
= TREE_OPERAND (arg
, 0);
9317 tree arg1
= TREE_OPERAND (arg
, 1);
9322 if (TREE_CODE (arg0
) == ADDR_EXPR
9323 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9325 *ptr_offset
= convert (sizetype
, arg1
);
9326 return TREE_OPERAND (arg0
, 0);
9328 else if (TREE_CODE (arg1
) == ADDR_EXPR
9329 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9331 *ptr_offset
= convert (sizetype
, arg0
);
9332 return TREE_OPERAND (arg1
, 0);
9339 /* Expand code for a post- or pre- increment or decrement
9340 and return the RTX for the result.
9341 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9344 expand_increment (tree exp
, int post
, int ignore
)
9348 tree incremented
= TREE_OPERAND (exp
, 0);
9349 optab this_optab
= add_optab
;
9351 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9352 int op0_is_copy
= 0;
9353 int single_insn
= 0;
9354 /* 1 means we can't store into OP0 directly,
9355 because it is a subreg narrower than a word,
9356 and we don't dare clobber the rest of the word. */
9359 /* Stabilize any component ref that might need to be
9360 evaluated more than once below. */
9362 || TREE_CODE (incremented
) == BIT_FIELD_REF
9363 || (TREE_CODE (incremented
) == COMPONENT_REF
9364 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9365 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9366 incremented
= stabilize_reference (incremented
);
9367 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9368 ones into save exprs so that they don't accidentally get evaluated
9369 more than once by the code below. */
9370 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9371 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9372 incremented
= save_expr (incremented
);
9374 /* Compute the operands as RTX.
9375 Note whether OP0 is the actual lvalue or a copy of it:
9376 I believe it is a copy iff it is a register or subreg
9377 and insns were generated in computing it. */
9379 temp
= get_last_insn ();
9380 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9382 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9383 in place but instead must do sign- or zero-extension during assignment,
9384 so we copy it into a new register and let the code below use it as
9387 Note that we can safely modify this SUBREG since it is know not to be
9388 shared (it was made by the expand_expr call above). */
9390 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9393 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9397 else if (GET_CODE (op0
) == SUBREG
9398 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9400 /* We cannot increment this SUBREG in place. If we are
9401 post-incrementing, get a copy of the old value. Otherwise,
9402 just mark that we cannot increment in place. */
9404 op0
= copy_to_reg (op0
);
9409 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| REG_P (op0
))
9410 && temp
!= get_last_insn ());
9411 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9413 /* Decide whether incrementing or decrementing. */
9414 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9415 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9416 this_optab
= sub_optab
;
9418 /* Convert decrement by a constant into a negative increment. */
9419 if (this_optab
== sub_optab
9420 && GET_CODE (op1
) == CONST_INT
)
9422 op1
= GEN_INT (-INTVAL (op1
));
9423 this_optab
= add_optab
;
9426 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9427 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9429 /* For a preincrement, see if we can do this with a single instruction. */
9432 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9433 if (icode
!= (int) CODE_FOR_nothing
9434 /* Make sure that OP0 is valid for operands 0 and 1
9435 of the insn we want to queue. */
9436 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9437 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9438 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9442 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9443 then we cannot just increment OP0. We must therefore contrive to
9444 increment the original value. Then, for postincrement, we can return
9445 OP0 since it is a copy of the old value. For preincrement, expand here
9446 unless we can do it with a single insn.
9448 Likewise if storing directly into OP0 would clobber high bits
9449 we need to preserve (bad_subreg). */
9450 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9452 /* This is the easiest way to increment the value wherever it is.
9453 Problems with multiple evaluation of INCREMENTED are prevented
9454 because either (1) it is a component_ref or preincrement,
9455 in which case it was stabilized above, or (2) it is an array_ref
9456 with constant index in an array in a register, which is
9457 safe to reevaluate. */
9458 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9459 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9460 ? MINUS_EXPR
: PLUS_EXPR
),
9463 TREE_OPERAND (exp
, 1));
9465 while (TREE_CODE (incremented
) == NOP_EXPR
9466 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9468 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9469 incremented
= TREE_OPERAND (incremented
, 0);
9472 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
);
9473 return post
? op0
: temp
;
9478 /* We have a true reference to the value in OP0.
9479 If there is an insn to add or subtract in this mode, queue it.
9480 Queuing the increment insn avoids the register shuffling
9481 that often results if we must increment now and first save
9482 the old value for subsequent use. */
9484 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9485 op0
= stabilize (op0
);
9488 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9489 if (icode
!= (int) CODE_FOR_nothing
9490 /* Make sure that OP0 is valid for operands 0 and 1
9491 of the insn we want to queue. */
9492 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9493 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9495 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9496 op1
= force_reg (mode
, op1
);
9498 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9500 if (icode
!= (int) CODE_FOR_nothing
&& MEM_P (op0
))
9502 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9503 ? force_reg (Pmode
, XEXP (op0
, 0))
9504 : copy_to_reg (XEXP (op0
, 0)));
9507 op0
= replace_equiv_address (op0
, addr
);
9508 temp
= force_reg (GET_MODE (op0
), op0
);
9509 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9510 op1
= force_reg (mode
, op1
);
9512 /* The increment queue is LIFO, thus we have to `queue'
9513 the instructions in reverse order. */
9514 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9515 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9520 /* Preincrement, or we can't increment with one simple insn. */
9522 /* Save a copy of the value before inc or dec, to return it later. */
9523 temp
= value
= copy_to_reg (op0
);
9525 /* Arrange to return the incremented value. */
9526 /* Copy the rtx because expand_binop will protect from the queue,
9527 and the results of that would be invalid for us to return
9528 if our caller does emit_queue before using our result. */
9529 temp
= copy_rtx (value
= op0
);
9531 /* Increment however we can. */
9532 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9533 TYPE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9535 /* Make sure the value is stored into OP0. */
9537 emit_move_insn (op0
, op1
);
9542 /* Generate code to calculate EXP using a store-flag instruction
9543 and return an rtx for the result. EXP is either a comparison
9544 or a TRUTH_NOT_EXPR whose operand is a comparison.
9546 If TARGET is nonzero, store the result there if convenient.
9548 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9551 Return zero if there is no suitable set-flag instruction
9552 available on this machine.
9554 Once expand_expr has been called on the arguments of the comparison,
9555 we are committed to doing the store flag, since it is not safe to
9556 re-evaluate the expression. We emit the store-flag insn by calling
9557 emit_store_flag, but only expand the arguments if we have a reason
9558 to believe that emit_store_flag will be successful. If we think that
9559 it will, but it isn't, we have to simulate the store-flag with a
9560 set/jump/set sequence. */
9563 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
, int only_cheap
)
9566 tree arg0
, arg1
, type
;
9568 enum machine_mode operand_mode
;
9572 enum insn_code icode
;
9573 rtx subtarget
= target
;
9576 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9577 result at the end. We can't simply invert the test since it would
9578 have already been inverted if it were valid. This case occurs for
9579 some floating-point comparisons. */
9581 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9582 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9584 arg0
= TREE_OPERAND (exp
, 0);
9585 arg1
= TREE_OPERAND (exp
, 1);
9587 /* Don't crash if the comparison was erroneous. */
9588 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9591 type
= TREE_TYPE (arg0
);
9592 operand_mode
= TYPE_MODE (type
);
9593 unsignedp
= TYPE_UNSIGNED (type
);
9595 /* We won't bother with BLKmode store-flag operations because it would mean
9596 passing a lot of information to emit_store_flag. */
9597 if (operand_mode
== BLKmode
)
9600 /* We won't bother with store-flag operations involving function pointers
9601 when function pointers must be canonicalized before comparisons. */
9602 #ifdef HAVE_canonicalize_funcptr_for_compare
9603 if (HAVE_canonicalize_funcptr_for_compare
9604 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9605 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9607 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9608 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9609 == FUNCTION_TYPE
))))
9616 /* Get the rtx comparison code to use. We know that EXP is a comparison
9617 operation of some type. Some comparisons against 1 and -1 can be
9618 converted to comparisons with zero. Do so here so that the tests
9619 below will be aware that we have a comparison with zero. These
9620 tests will not catch constants in the first operand, but constants
9621 are rarely passed as the first operand. */
9623 switch (TREE_CODE (exp
))
9632 if (integer_onep (arg1
))
9633 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9635 code
= unsignedp
? LTU
: LT
;
9638 if (! unsignedp
&& integer_all_onesp (arg1
))
9639 arg1
= integer_zero_node
, code
= LT
;
9641 code
= unsignedp
? LEU
: LE
;
9644 if (! unsignedp
&& integer_all_onesp (arg1
))
9645 arg1
= integer_zero_node
, code
= GE
;
9647 code
= unsignedp
? GTU
: GT
;
9650 if (integer_onep (arg1
))
9651 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9653 code
= unsignedp
? GEU
: GE
;
9656 case UNORDERED_EXPR
:
9685 /* Put a constant second. */
9686 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9688 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9689 code
= swap_condition (code
);
9692 /* If this is an equality or inequality test of a single bit, we can
9693 do this by shifting the bit being tested to the low-order bit and
9694 masking the result with the constant 1. If the condition was EQ,
9695 we xor it with 1. This does not require an scc insn and is faster
9696 than an scc insn even if we have it.
9698 The code to make this transformation was moved into fold_single_bit_test,
9699 so we just call into the folder and expand its result. */
9701 if ((code
== NE
|| code
== EQ
)
9702 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9703 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9705 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9706 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9708 target
, VOIDmode
, EXPAND_NORMAL
);
9711 /* Now see if we are likely to be able to do this. Return if not. */
9712 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9715 icode
= setcc_gen_code
[(int) code
];
9716 if (icode
== CODE_FOR_nothing
9717 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
9719 /* We can only do this if it is one of the special cases that
9720 can be handled without an scc insn. */
9721 if ((code
== LT
&& integer_zerop (arg1
))
9722 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9724 else if (BRANCH_COST
>= 0
9725 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9726 && TREE_CODE (type
) != REAL_TYPE
9727 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9728 != CODE_FOR_nothing
)
9729 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9730 != CODE_FOR_nothing
)))
9736 if (! get_subtarget (target
)
9737 || GET_MODE (subtarget
) != operand_mode
)
9740 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, 0);
9743 target
= gen_reg_rtx (mode
);
9745 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9746 because, if the emit_store_flag does anything it will succeed and
9747 OP0 and OP1 will not be used subsequently. */
9749 result
= emit_store_flag (target
, code
,
9750 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9751 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9752 operand_mode
, unsignedp
, 1);
9757 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9758 result
, 0, OPTAB_LIB_WIDEN
);
9762 /* If this failed, we have to do this with set/compare/jump/set code. */
9764 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9765 target
= gen_reg_rtx (GET_MODE (target
));
9767 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9768 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9769 operand_mode
, NULL_RTX
);
9770 if (GET_CODE (result
) == CONST_INT
)
9771 return (((result
== const0_rtx
&& ! invert
)
9772 || (result
!= const0_rtx
&& invert
))
9773 ? const0_rtx
: const1_rtx
);
9775 /* The code of RESULT may not match CODE if compare_from_rtx
9776 decided to swap its operands and reverse the original code.
9778 We know that compare_from_rtx returns either a CONST_INT or
9779 a new comparison code, so it is safe to just extract the
9780 code from RESULT. */
9781 code
= GET_CODE (result
);
9783 label
= gen_label_rtx ();
9784 if (bcc_gen_fctn
[(int) code
] == 0)
9787 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9788 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9795 /* Stubs in case we haven't got a casesi insn. */
9797 # define HAVE_casesi 0
9798 # define gen_casesi(a, b, c, d, e) (0)
9799 # define CODE_FOR_casesi CODE_FOR_nothing
9802 /* If the machine does not have a case insn that compares the bounds,
9803 this means extra overhead for dispatch tables, which raises the
9804 threshold for using them. */
9805 #ifndef CASE_VALUES_THRESHOLD
9806 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9807 #endif /* CASE_VALUES_THRESHOLD */
9810 case_values_threshold (void)
9812 return CASE_VALUES_THRESHOLD
;
9815 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9816 0 otherwise (i.e. if there is no casesi instruction). */
9818 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9819 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
)
9821 enum machine_mode index_mode
= SImode
;
9822 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9823 rtx op1
, op2
, index
;
9824 enum machine_mode op_mode
;
9829 /* Convert the index to SImode. */
9830 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9832 enum machine_mode omode
= TYPE_MODE (index_type
);
9833 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9835 /* We must handle the endpoints in the original mode. */
9836 index_expr
= build (MINUS_EXPR
, index_type
,
9837 index_expr
, minval
);
9838 minval
= integer_zero_node
;
9839 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9840 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9841 omode
, 1, default_label
);
9842 /* Now we can safely truncate. */
9843 index
= convert_to_mode (index_mode
, index
, 0);
9847 if (TYPE_MODE (index_type
) != index_mode
)
9849 index_expr
= convert (lang_hooks
.types
.type_for_size
9850 (index_bits
, 0), index_expr
);
9851 index_type
= TREE_TYPE (index_expr
);
9854 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9857 index
= protect_from_queue (index
, 0);
9858 do_pending_stack_adjust ();
9860 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9861 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9863 index
= copy_to_mode_reg (op_mode
, index
);
9865 op1
= expand_expr (minval
, NULL_RTX
, VOIDmode
, 0);
9867 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9868 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9869 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
9870 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9872 op1
= copy_to_mode_reg (op_mode
, op1
);
9874 op2
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
9876 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9877 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9878 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
9879 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9881 op2
= copy_to_mode_reg (op_mode
, op2
);
9883 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9884 table_label
, default_label
));
9888 /* Attempt to generate a tablejump instruction; same concept. */
9889 #ifndef HAVE_tablejump
9890 #define HAVE_tablejump 0
9891 #define gen_tablejump(x, y) (0)
9894 /* Subroutine of the next function.
9896 INDEX is the value being switched on, with the lowest value
9897 in the table already subtracted.
9898 MODE is its expected mode (needed if INDEX is constant).
9899 RANGE is the length of the jump table.
9900 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9902 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9903 index value is out of range. */
9906 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
9911 if (INTVAL (range
) > cfun
->max_jumptable_ents
)
9912 cfun
->max_jumptable_ents
= INTVAL (range
);
9914 /* Do an unsigned comparison (in the proper mode) between the index
9915 expression and the value which represents the length of the range.
9916 Since we just finished subtracting the lower bound of the range
9917 from the index expression, this comparison allows us to simultaneously
9918 check that the original index expression value is both greater than
9919 or equal to the minimum value of the range and less than or equal to
9920 the maximum value of the range. */
9922 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
9925 /* If index is in range, it must fit in Pmode.
9926 Convert to Pmode so we can index with it. */
9928 index
= convert_to_mode (Pmode
, index
, 1);
9930 /* Don't let a MEM slip through, because then INDEX that comes
9931 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9932 and break_out_memory_refs will go to work on it and mess it up. */
9933 #ifdef PIC_CASE_VECTOR_ADDRESS
9934 if (flag_pic
&& !REG_P (index
))
9935 index
= copy_to_mode_reg (Pmode
, index
);
9938 /* If flag_force_addr were to affect this address
9939 it could interfere with the tricky assumptions made
9940 about addresses that contain label-refs,
9941 which may be valid only very near the tablejump itself. */
9942 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9943 GET_MODE_SIZE, because this indicates how large insns are. The other
9944 uses should all be Pmode, because they are addresses. This code
9945 could fail if addresses and insns are not the same size. */
9946 index
= gen_rtx_PLUS (Pmode
,
9947 gen_rtx_MULT (Pmode
, index
,
9948 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
9949 gen_rtx_LABEL_REF (Pmode
, table_label
));
9950 #ifdef PIC_CASE_VECTOR_ADDRESS
9952 index
= PIC_CASE_VECTOR_ADDRESS (index
);
9955 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
9956 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
9957 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
9958 RTX_UNCHANGING_P (vector
) = 1;
9959 MEM_NOTRAP_P (vector
) = 1;
9960 convert_move (temp
, vector
, 0);
9962 emit_jump_insn (gen_tablejump (temp
, table_label
));
9964 /* If we are generating PIC code or if the table is PC-relative, the
9965 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9966 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
9971 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
9972 rtx table_label
, rtx default_label
)
9976 if (! HAVE_tablejump
)
9979 index_expr
= fold (build (MINUS_EXPR
, index_type
,
9980 convert (index_type
, index_expr
),
9981 convert (index_type
, minval
)));
9982 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
9984 index
= protect_from_queue (index
, 0);
9985 do_pending_stack_adjust ();
9987 do_tablejump (index
, TYPE_MODE (index_type
),
9988 convert_modes (TYPE_MODE (index_type
),
9989 TYPE_MODE (TREE_TYPE (range
)),
9990 expand_expr (range
, NULL_RTX
,
9992 TYPE_UNSIGNED (TREE_TYPE (range
))),
9993 table_label
, default_label
);
9997 /* Nonzero if the mode is a valid vector mode for this architecture.
9998 This returns nonzero even if there is no hardware support for the
9999 vector mode, but we can emulate with narrower modes. */
10002 vector_mode_valid_p (enum machine_mode mode
)
10004 enum mode_class
class = GET_MODE_CLASS (mode
);
10005 enum machine_mode innermode
;
10007 /* Doh! What's going on? */
10008 if (class != MODE_VECTOR_INT
10009 && class != MODE_VECTOR_FLOAT
)
10012 /* Hardware support. Woo hoo! */
10013 if (VECTOR_MODE_SUPPORTED_P (mode
))
10016 innermode
= GET_MODE_INNER (mode
);
10018 /* We should probably return 1 if requesting V4DI and we have no DI,
10019 but we have V2DI, but this is probably very unlikely. */
10021 /* If we have support for the inner mode, we can safely emulate it.
10022 We may not have V2DI, but me can emulate with a pair of DIs. */
10023 return mov_optab
->handlers
[innermode
].insn_code
!= CODE_FOR_nothing
;
10026 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10028 const_vector_from_tree (tree exp
)
10033 enum machine_mode inner
, mode
;
10035 mode
= TYPE_MODE (TREE_TYPE (exp
));
10037 if (initializer_zerop (exp
))
10038 return CONST0_RTX (mode
);
10040 units
= GET_MODE_NUNITS (mode
);
10041 inner
= GET_MODE_INNER (mode
);
10043 v
= rtvec_alloc (units
);
10045 link
= TREE_VECTOR_CST_ELTS (exp
);
10046 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10048 elt
= TREE_VALUE (link
);
10050 if (TREE_CODE (elt
) == REAL_CST
)
10051 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10054 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10055 TREE_INT_CST_HIGH (elt
),
10059 /* Initialize remaining elements to 0. */
10060 for (; i
< units
; ++i
)
10061 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10063 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
10065 #include "gt-expr.h"