1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls
= 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust
;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop
;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call
;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
87 static rtx saveregs_value
;
90 static void store_constructor ();
91 static rtx
store_field ();
92 static rtx
expand_builtin ();
93 static rtx
compare ();
94 static rtx
do_store_flag ();
95 static void preexpand_calls ();
96 static rtx
expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load
[NUM_MACHINE_MODES
];
110 static char direct_store
[NUM_MACHINE_MODES
];
112 /* MOVE_RATIO is the number of move instructions that is better than
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
119 /* A value of around 6 would minimize code size; infinity would minimize
121 #define MOVE_RATIO 15
125 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
127 #ifndef SLOW_UNALIGNED_ACCESS
128 #define SLOW_UNALIGNED_ACCESS 0
131 /* This is run once per compilation to set up which modes can be used
132 directly in memory. */
138 enum machine_mode mode
;
139 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
142 insn
= emit_insn (gen_rtx (SET
, 0, 0));
143 pat
= PATTERN (insn
);
145 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
146 mode
= (enum machine_mode
) ((int) mode
+ 1))
152 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
153 PUT_MODE (mem
, mode
);
155 /* Find a register that can be used in this mode, if any. */
156 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
157 if (HARD_REGNO_MODE_OK (regno
, mode
))
160 if (regno
== FIRST_PSEUDO_REGISTER
)
163 reg
= gen_rtx (REG
, mode
, regno
);
166 SET_DEST (pat
) = reg
;
167 direct_load
[(int) mode
] = (recog (pat
, insn
, &num_clobbers
)) >= 0;
170 SET_DEST (pat
) = mem
;
171 direct_store
[(int) mode
] = (recog (pat
, insn
, &num_clobbers
)) >= 0;
177 /* This is run at the start of compiling a function. */
184 pending_stack_adjust
= 0;
185 inhibit_defer_pop
= 0;
186 cleanups_this_call
= 0;
191 /* Save all variables describing the current status into the structure *P.
192 This is used before starting a nested function. */
198 /* Instead of saving the postincrement queue, empty it. */
201 p
->pending_stack_adjust
= pending_stack_adjust
;
202 p
->inhibit_defer_pop
= inhibit_defer_pop
;
203 p
->cleanups_this_call
= cleanups_this_call
;
204 p
->saveregs_value
= saveregs_value
;
205 p
->forced_labels
= forced_labels
;
207 pending_stack_adjust
= 0;
208 inhibit_defer_pop
= 0;
209 cleanups_this_call
= 0;
214 /* Restore all variables describing the current status from the structure *P.
215 This is used after a nested function. */
218 restore_expr_status (p
)
221 pending_stack_adjust
= p
->pending_stack_adjust
;
222 inhibit_defer_pop
= p
->inhibit_defer_pop
;
223 cleanups_this_call
= p
->cleanups_this_call
;
224 saveregs_value
= p
->saveregs_value
;
225 forced_labels
= p
->forced_labels
;
228 /* Manage the queue of increment instructions to be output
229 for POSTINCREMENT_EXPR expressions, etc. */
231 static rtx pending_chain
;
233 /* Queue up to increment (or change) VAR later. BODY says how:
234 BODY should be the same thing you would pass to emit_insn
235 to increment right away. It will go to emit_insn later on.
237 The value is a QUEUED expression to be used in place of VAR
238 where you want to guarantee the pre-incrementation value of VAR. */
241 enqueue_insn (var
, body
)
244 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
245 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
246 return pending_chain
;
249 /* Use protect_from_queue to convert a QUEUED expression
250 into something that you can put immediately into an instruction.
251 If the queued incrementation has not happened yet,
252 protect_from_queue returns the variable itself.
253 If the incrementation has happened, protect_from_queue returns a temp
254 that contains a copy of the old value of the variable.
256 Any time an rtx which might possibly be a QUEUED is to be put
257 into an instruction, it must be passed through protect_from_queue first.
258 QUEUED expressions are not meaningful in instructions.
260 Do not pass a value through protect_from_queue and then hold
261 on to it for a while before putting it in an instruction!
262 If the queue is flushed in between, incorrect code will result. */
265 protect_from_queue (x
, modify
)
269 register RTX_CODE code
= GET_CODE (x
);
271 #if 0 /* A QUEUED can hang around after the queue is forced out. */
272 /* Shortcut for most common case. */
273 if (pending_chain
== 0)
279 /* A special hack for read access to (MEM (QUEUED ...))
280 to facilitate use of autoincrement.
281 Make a copy of the contents of the memory location
282 rather than a copy of the address, but not
283 if the value is of mode BLKmode. */
284 if (code
== MEM
&& GET_MODE (x
) != BLKmode
285 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
287 register rtx y
= XEXP (x
, 0);
288 XEXP (x
, 0) = QUEUED_VAR (y
);
291 register rtx temp
= gen_reg_rtx (GET_MODE (x
));
292 emit_insn_before (gen_move_insn (temp
, x
),
298 /* Otherwise, recursively protect the subexpressions of all
299 the kinds of rtx's that can contain a QUEUED. */
301 XEXP (x
, 0) = protect_from_queue (XEXP (x
, 0), 0);
302 else if (code
== PLUS
|| code
== MULT
)
304 XEXP (x
, 0) = protect_from_queue (XEXP (x
, 0), 0);
305 XEXP (x
, 1) = protect_from_queue (XEXP (x
, 1), 0);
309 /* If the increment has not happened, use the variable itself. */
310 if (QUEUED_INSN (x
) == 0)
311 return QUEUED_VAR (x
);
312 /* If the increment has happened and a pre-increment copy exists,
314 if (QUEUED_COPY (x
) != 0)
315 return QUEUED_COPY (x
);
316 /* The increment has happened but we haven't set up a pre-increment copy.
317 Set one up now, and use it. */
318 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
319 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
321 return QUEUED_COPY (x
);
324 /* Return nonzero if X contains a QUEUED expression:
325 if it contains anything that will be altered by a queued increment.
326 We handle only combinations of MEM, PLUS, MINUS and MULT operators
327 since memory addresses generally contain only those. */
333 register enum rtx_code code
= GET_CODE (x
);
339 return queued_subexp_p (XEXP (x
, 0));
343 return queued_subexp_p (XEXP (x
, 0))
344 || queued_subexp_p (XEXP (x
, 1));
349 /* Perform all the pending incrementations. */
355 while (p
= pending_chain
)
357 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
358 pending_chain
= QUEUED_NEXT (p
);
369 /* Copy data from FROM to TO, where the machine modes are not the same.
370 Both modes may be integer, or both may be floating.
371 UNSIGNEDP should be nonzero if FROM is an unsigned type.
372 This causes zero-extension instead of sign-extension. */
375 convert_move (to
, from
, unsignedp
)
376 register rtx to
, from
;
379 enum machine_mode to_mode
= GET_MODE (to
);
380 enum machine_mode from_mode
= GET_MODE (from
);
381 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
382 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
386 /* rtx code for making an equivalent value. */
387 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
389 to
= protect_from_queue (to
, 1);
390 from
= protect_from_queue (from
, 0);
392 if (to_real
!= from_real
)
395 if (to_mode
== from_mode
396 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
398 emit_move_insn (to
, from
);
404 #ifdef HAVE_extendsfdf2
405 if (HAVE_extendsfdf2
&& from_mode
== SFmode
&& to_mode
== DFmode
)
407 emit_unop_insn (CODE_FOR_extendsfdf2
, to
, from
, UNKNOWN
);
411 #ifdef HAVE_extendsfxf2
412 if (HAVE_extendsfxf2
&& from_mode
== SFmode
&& to_mode
== XFmode
)
414 emit_unop_insn (CODE_FOR_extendsfxf2
, to
, from
, UNKNOWN
);
418 #ifdef HAVE_extendsftf2
419 if (HAVE_extendsftf2
&& from_mode
== SFmode
&& to_mode
== TFmode
)
421 emit_unop_insn (CODE_FOR_extendsftf2
, to
, from
, UNKNOWN
);
425 #ifdef HAVE_extenddfxf2
426 if (HAVE_extenddfxf2
&& from_mode
== DFmode
&& to_mode
== XFmode
)
428 emit_unop_insn (CODE_FOR_extenddfxf2
, to
, from
, UNKNOWN
);
432 #ifdef HAVE_extenddftf2
433 if (HAVE_extenddftf2
&& from_mode
== DFmode
&& to_mode
== TFmode
)
435 emit_unop_insn (CODE_FOR_extenddftf2
, to
, from
, UNKNOWN
);
439 #ifdef HAVE_truncdfsf2
440 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
442 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
446 #ifdef HAVE_truncxfsf2
447 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
449 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
453 #ifdef HAVE_trunctfsf2
454 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
456 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
460 #ifdef HAVE_truncxfdf2
461 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
463 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
467 #ifdef HAVE_trunctfdf2
468 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
470 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
482 libcall
= extendsfdf2_libfunc
;
486 libcall
= extendsfxf2_libfunc
;
490 libcall
= extendsftf2_libfunc
;
499 libcall
= truncdfsf2_libfunc
;
503 libcall
= extenddfxf2_libfunc
;
507 libcall
= extenddftf2_libfunc
;
516 libcall
= truncxfsf2_libfunc
;
520 libcall
= truncxfdf2_libfunc
;
529 libcall
= trunctfsf2_libfunc
;
533 libcall
= trunctfdf2_libfunc
;
539 if (libcall
== (rtx
) 0)
540 /* This conversion is not implemented yet. */
543 emit_library_call (libcall
, 1, to_mode
, 1, from
, from_mode
);
544 emit_move_insn (to
, hard_libcall_value (to_mode
));
548 /* Now both modes are integers. */
550 /* Handle expanding beyond a word. */
551 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
552 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
559 enum machine_mode lowpart_mode
;
560 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
562 /* Try converting directly if the insn is supported. */
563 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
566 emit_unop_insn (code
, to
, from
, equiv_code
);
569 /* Next, try converting via full word. */
570 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
571 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
572 != CODE_FOR_nothing
))
574 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
575 emit_unop_insn (code
, to
,
576 gen_lowpart (word_mode
, to
), equiv_code
);
580 /* No special multiword conversion insn; do it by hand. */
583 /* Get a copy of FROM widened to a word, if necessary. */
584 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
585 lowpart_mode
= word_mode
;
587 lowpart_mode
= from_mode
;
589 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
591 lowpart
= gen_lowpart (lowpart_mode
, to
);
592 emit_move_insn (lowpart
, lowfrom
);
594 /* Compute the value to put in each remaining word. */
596 fill_value
= const0_rtx
;
601 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
602 && STORE_FLAG_VALUE
== -1)
604 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
606 fill_value
= gen_reg_rtx (word_mode
);
607 emit_insn (gen_slt (fill_value
));
613 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
614 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
616 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
620 /* Fill the remaining words. */
621 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
623 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
624 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
629 if (fill_value
!= subword
)
630 emit_move_insn (subword
, fill_value
);
633 insns
= get_insns ();
636 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
637 gen_rtx (equiv_code
, to_mode
, from
));
641 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
)
643 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
647 /* Handle pointer conversion */ /* SPEE 900220 */
648 if (to_mode
== PSImode
)
650 if (from_mode
!= SImode
)
651 from
= convert_to_mode (SImode
, from
, unsignedp
);
653 #ifdef HAVE_truncsipsi
656 emit_unop_insn (CODE_FOR_truncsipsi
, to
, from
, UNKNOWN
);
659 #endif /* HAVE_truncsipsi */
663 if (from_mode
== PSImode
)
665 if (to_mode
!= SImode
)
667 from
= convert_to_mode (SImode
, from
, unsignedp
);
672 #ifdef HAVE_extendpsisi
673 if (HAVE_extendpsisi
)
675 emit_unop_insn (CODE_FOR_extendpsisi
, to
, from
, UNKNOWN
);
678 #endif /* HAVE_extendpsisi */
683 /* Now follow all the conversions between integers
684 no more than a word long. */
686 /* For truncation, usually we can just refer to FROM in a narrower mode. */
687 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
688 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
689 GET_MODE_BITSIZE (from_mode
))
690 && ((GET_CODE (from
) == MEM
691 && ! MEM_VOLATILE_P (from
)
692 && direct_load
[(int) to_mode
]
693 && ! mode_dependent_address_p (XEXP (from
, 0)))
694 || GET_CODE (from
) == REG
695 || GET_CODE (from
) == SUBREG
))
697 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
701 /* For truncation, usually we can just refer to FROM in a narrower mode. */
702 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
704 /* Convert directly if that works. */
705 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
708 /* If FROM is a SUBREG, put it into a register. Do this
709 so that we always generate the same set of insns for
710 better cse'ing; if an intermediate assignment occurred,
711 we won't be doing the operation directly on the SUBREG. */
712 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
713 from
= force_reg (from_mode
, from
);
714 emit_unop_insn (code
, to
, from
, equiv_code
);
719 enum machine_mode intermediate
;
721 /* Search for a mode to convert via. */
722 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
723 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
724 if ((can_extend_p (to_mode
, intermediate
, unsignedp
)
726 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
727 != CODE_FOR_nothing
))
729 convert_move (to
, convert_to_mode (intermediate
, from
,
730 unsignedp
), unsignedp
);
734 /* No suitable intermediate mode. */
739 /* Support special truncate insns for certain modes. */
741 if (from_mode
== DImode
&& to_mode
== SImode
)
743 #ifdef HAVE_truncdisi2
746 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
750 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
754 if (from_mode
== DImode
&& to_mode
== HImode
)
756 #ifdef HAVE_truncdihi2
759 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
763 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
767 if (from_mode
== DImode
&& to_mode
== QImode
)
769 #ifdef HAVE_truncdiqi2
772 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
776 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
780 if (from_mode
== SImode
&& to_mode
== HImode
)
782 #ifdef HAVE_truncsihi2
785 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
789 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
793 if (from_mode
== SImode
&& to_mode
== QImode
)
795 #ifdef HAVE_truncsiqi2
798 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
802 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
806 if (from_mode
== HImode
&& to_mode
== QImode
)
808 #ifdef HAVE_trunchiqi2
811 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
815 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
819 /* Handle truncation of volatile memrefs, and so on;
820 the things that couldn't be truncated directly,
821 and for which there was no special instruction. */
822 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
824 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
825 emit_move_insn (to
, temp
);
829 /* Mode combination is not recognized. */
833 /* Return an rtx for a value that would result
834 from converting X to mode MODE.
835 Both X and MODE may be floating, or both integer.
836 UNSIGNEDP is nonzero if X is an unsigned value.
837 This can be done by referring to a part of X in place
838 or by copying to a new temporary with conversion.
840 This function *must not* call protect_from_queue
841 except when putting X into an insn (in which case convert_move does it). */
844 convert_to_mode (mode
, x
, unsignedp
)
845 enum machine_mode mode
;
851 if (mode
== GET_MODE (x
))
854 /* There is one case that we must handle specially: If we are converting
855 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
856 we are to interpret the constant as unsigned, gen_lowpart will do
857 the wrong if the constant appears negative. What we want to do is
858 make the high-order word of the constant zero, not all ones. */
860 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
861 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
862 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
863 return immed_double_const (INTVAL (x
), (HOST_WIDE_INT
) 0, mode
);
865 /* We can do this with a gen_lowpart if both desired and current modes
866 are integer, and this is either a constant integer, a register, or a
867 non-volatile MEM. Except for the constant case, we must be narrowing
870 if (GET_CODE (x
) == CONST_INT
871 || (GET_MODE_CLASS (mode
) == MODE_INT
872 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
873 && (GET_CODE (x
) == CONST_DOUBLE
874 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (GET_MODE (x
))
875 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
))
876 && direct_load
[(int) mode
]
877 || GET_CODE (x
) == REG
)))))
878 return gen_lowpart (mode
, x
);
880 temp
= gen_reg_rtx (mode
);
881 convert_move (temp
, x
, unsignedp
);
885 /* Generate several move instructions to copy LEN bytes
886 from block FROM to block TO. (These are MEM rtx's with BLKmode).
887 The caller must pass FROM and TO
888 through protect_from_queue before calling.
889 ALIGN (in bytes) is maximum alignment we can assume. */
891 struct move_by_pieces
900 int explicit_inc_from
;
906 static void move_by_pieces_1 ();
907 static int move_by_pieces_ninsns ();
910 move_by_pieces (to
, from
, len
, align
)
914 struct move_by_pieces data
;
915 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
916 int max_size
= MOVE_MAX
+ 1;
919 data
.to_addr
= to_addr
;
920 data
.from_addr
= from_addr
;
924 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
925 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
927 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
928 || GET_CODE (from_addr
) == POST_INC
929 || GET_CODE (from_addr
) == POST_DEC
);
931 data
.explicit_inc_from
= 0;
932 data
.explicit_inc_to
= 0;
934 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
935 if (data
.reverse
) data
.offset
= len
;
938 /* If copying requires more than two move insns,
939 copy addresses to registers (to make displacements shorter)
940 and use post-increment if available. */
941 if (!(data
.autinc_from
&& data
.autinc_to
)
942 && move_by_pieces_ninsns (len
, align
) > 2)
944 #ifdef HAVE_PRE_DECREMENT
945 if (data
.reverse
&& ! data
.autinc_from
)
947 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
948 data
.autinc_from
= 1;
949 data
.explicit_inc_from
= -1;
952 #ifdef HAVE_POST_INCREMENT
953 if (! data
.autinc_from
)
955 data
.from_addr
= copy_addr_to_reg (from_addr
);
956 data
.autinc_from
= 1;
957 data
.explicit_inc_from
= 1;
960 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
961 data
.from_addr
= copy_addr_to_reg (from_addr
);
962 #ifdef HAVE_PRE_DECREMENT
963 if (data
.reverse
&& ! data
.autinc_to
)
965 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
967 data
.explicit_inc_to
= -1;
970 #ifdef HAVE_POST_INCREMENT
971 if (! data
.reverse
&& ! data
.autinc_to
)
973 data
.to_addr
= copy_addr_to_reg (to_addr
);
975 data
.explicit_inc_to
= 1;
978 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
979 data
.to_addr
= copy_addr_to_reg (to_addr
);
982 if (! (STRICT_ALIGNMENT
|| SLOW_UNALIGNED_ACCESS
)
983 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
986 /* First move what we can in the largest integer mode, then go to
987 successively smaller modes. */
991 enum machine_mode mode
= VOIDmode
, tmode
;
992 enum insn_code icode
;
994 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
995 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
996 if (GET_MODE_SIZE (tmode
) < max_size
)
999 if (mode
== VOIDmode
)
1002 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1003 if (icode
!= CODE_FOR_nothing
1004 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1005 GET_MODE_SIZE (mode
)))
1006 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1008 max_size
= GET_MODE_SIZE (mode
);
1011 /* The code above should have handled everything. */
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bytes) is maximum alignment we can assume. */
1020 move_by_pieces_ninsns (l
, align
)
1024 register int n_insns
= 0;
1025 int max_size
= MOVE_MAX
+ 1;
1027 if (! (STRICT_ALIGNMENT
|| SLOW_UNALIGNED_ACCESS
)
1028 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1031 while (max_size
> 1)
1033 enum machine_mode mode
= VOIDmode
, tmode
;
1034 enum insn_code icode
;
1036 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1037 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1038 if (GET_MODE_SIZE (tmode
) < max_size
)
1041 if (mode
== VOIDmode
)
1044 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1045 if (icode
!= CODE_FOR_nothing
1046 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1047 GET_MODE_SIZE (mode
)))
1048 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1050 max_size
= GET_MODE_SIZE (mode
);
1056 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1057 with move instructions for mode MODE. GENFUN is the gen_... function
1058 to make a move insn for that mode. DATA has all the other info. */
1061 move_by_pieces_1 (genfun
, mode
, data
)
1063 enum machine_mode mode
;
1064 struct move_by_pieces
*data
;
1066 register int size
= GET_MODE_SIZE (mode
);
1067 register rtx to1
, from1
;
1069 while (data
->len
>= size
)
1071 if (data
->reverse
) data
->offset
-= size
;
1073 to1
= (data
->autinc_to
1074 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1075 : change_address (data
->to
, mode
,
1076 plus_constant (data
->to_addr
, data
->offset
)));
1079 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1080 : change_address (data
->from
, mode
,
1081 plus_constant (data
->from_addr
, data
->offset
)));
1083 #ifdef HAVE_PRE_DECREMENT
1084 if (data
->explicit_inc_to
< 0)
1085 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1086 if (data
->explicit_inc_from
< 0)
1087 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1090 emit_insn ((*genfun
) (to1
, from1
));
1091 #ifdef HAVE_POST_INCREMENT
1092 if (data
->explicit_inc_to
> 0)
1093 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1094 if (data
->explicit_inc_from
> 0)
1095 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1098 if (! data
->reverse
) data
->offset
+= size
;
1104 /* Emit code to move a block Y to a block X.
1105 This may be done with string-move instructions,
1106 with multiple scalar move instructions, or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1110 SIZE is an rtx that says how long they are.
1111 ALIGN is the maximum alignment we can assume they have,
1112 measured in bytes. */
1115 emit_block_move (x
, y
, size
, align
)
1120 if (GET_MODE (x
) != BLKmode
)
1123 if (GET_MODE (y
) != BLKmode
)
1126 x
= protect_from_queue (x
, 1);
1127 y
= protect_from_queue (y
, 0);
1128 size
= protect_from_queue (size
, 0);
1130 if (GET_CODE (x
) != MEM
)
1132 if (GET_CODE (y
) != MEM
)
1137 if (GET_CODE (size
) == CONST_INT
1138 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1139 move_by_pieces (x
, y
, INTVAL (size
), align
);
1142 /* Try the most limited insn first, because there's no point
1143 including more than one in the machine description unless
1144 the more limited one has some advantage. */
1145 #ifdef HAVE_movstrqi
1147 && GET_CODE (size
) == CONST_INT
1148 && ((unsigned) INTVAL (size
)
1149 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
1151 rtx insn
= gen_movstrqi (x
, y
, size
, GEN_INT (align
));
1159 #ifdef HAVE_movstrhi
1161 && GET_CODE (size
) == CONST_INT
1162 && ((unsigned) INTVAL (size
)
1163 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
1165 rtx insn
= gen_movstrhi (x
, y
, size
, GEN_INT (align
));
1173 #ifdef HAVE_movstrsi
1176 rtx insn
= gen_movstrsi (x
, y
, size
, GEN_INT (align
));
1184 #ifdef HAVE_movstrdi
1187 rtx insn
= gen_movstrdi (x
, y
, size
, GEN_INT (align
));
1196 #ifdef TARGET_MEM_FUNCTIONS
1197 emit_library_call (memcpy_libfunc
, 1,
1198 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1200 convert_to_mode (Pmode
, size
, 1), Pmode
);
1202 emit_library_call (bcopy_libfunc
, 1,
1203 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1205 convert_to_mode (Pmode
, size
, 1), Pmode
);
1210 /* Copy all or part of a value X into registers starting at REGNO.
1211 The number of registers to be filled is NREGS. */
1214 move_block_to_reg (regno
, x
, nregs
, mode
)
1218 enum machine_mode mode
;
1223 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1224 x
= validize_mem (force_const_mem (mode
, x
));
1226 /* See if the machine can do this with a load multiple insn. */
1227 #ifdef HAVE_load_multiple
1228 last
= get_last_insn ();
1229 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1237 delete_insns_since (last
);
1240 for (i
= 0; i
< nregs
; i
++)
1241 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1242 operand_subword_force (x
, i
, mode
));
1245 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1246 The number of registers to be filled is NREGS. */
1249 move_block_from_reg (regno
, x
, nregs
)
1257 /* See if the machine can do this with a store multiple insn. */
1258 #ifdef HAVE_store_multiple
1259 last
= get_last_insn ();
1260 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1268 delete_insns_since (last
);
1271 for (i
= 0; i
< nregs
; i
++)
1273 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1278 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1282 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1285 use_regs (regno
, nregs
)
1291 for (i
= 0; i
< nregs
; i
++)
1292 emit_insn (gen_rtx (USE
, VOIDmode
, gen_rtx (REG
, word_mode
, regno
+ i
)));
1295 /* Write zeros through the storage of OBJECT.
1296 If OBJECT has BLKmode, SIZE is its length in bytes. */
1299 clear_storage (object
, size
)
1303 if (GET_MODE (object
) == BLKmode
)
1305 #ifdef TARGET_MEM_FUNCTIONS
1306 emit_library_call (memset_libfunc
, 1,
1308 XEXP (object
, 0), Pmode
, const0_rtx
, Pmode
,
1309 GEN_INT (size
), Pmode
);
1311 emit_library_call (bzero_libfunc
, 1,
1313 XEXP (object
, 0), Pmode
,
1314 GEN_INT (size
), Pmode
);
1318 emit_move_insn (object
, const0_rtx
);
1321 /* Generate code to copy Y into X.
1322 Both Y and X must have the same mode, except that
1323 Y can be a constant with VOIDmode.
1324 This mode cannot be BLKmode; use emit_block_move for that.
1326 Return the last instruction emitted. */
1329 emit_move_insn (x
, y
)
1332 enum machine_mode mode
= GET_MODE (x
);
1335 x
= protect_from_queue (x
, 1);
1336 y
= protect_from_queue (y
, 0);
1338 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
1341 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
1342 y
= force_const_mem (mode
, y
);
1344 /* If X or Y are memory references, verify that their addresses are valid
1346 if (GET_CODE (x
) == MEM
1347 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
1348 && ! push_operand (x
, GET_MODE (x
)))
1350 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
1351 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
1353 if (GET_CODE (y
) == MEM
1354 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
1356 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
1357 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
1359 if (mode
== BLKmode
)
1362 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
1364 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
1366 /* This will handle any multi-word mode that lacks a move_insn pattern.
1367 However, you will get better code if you define such patterns,
1368 even if they must turn into multiple assembler instructions. */
1369 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1374 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
1377 rtx xpart
= operand_subword (x
, i
, 1, mode
);
1378 rtx ypart
= operand_subword (y
, i
, 1, mode
);
1380 /* If we can't get a part of Y, put Y into memory if it is a
1381 constant. Otherwise, force it into a register. If we still
1382 can't get a part of Y, abort. */
1383 if (ypart
== 0 && CONSTANT_P (y
))
1385 y
= force_const_mem (mode
, y
);
1386 ypart
= operand_subword (y
, i
, 1, mode
);
1388 else if (ypart
== 0)
1389 ypart
= operand_subword_force (y
, i
, mode
);
1391 if (xpart
== 0 || ypart
== 0)
1394 last_insn
= emit_move_insn (xpart
, ypart
);
1402 /* Pushing data onto the stack. */
1404 /* Push a block of length SIZE (perhaps variable)
1405 and return an rtx to address the beginning of the block.
1406 Note that it is not possible for the value returned to be a QUEUED.
1407 The value may be virtual_outgoing_args_rtx.
1409 EXTRA is the number of bytes of padding to push in addition to SIZE.
1410 BELOW nonzero means this padding comes at low addresses;
1411 otherwise, the padding comes at high addresses. */
1414 push_block (size
, extra
, below
)
1419 if (CONSTANT_P (size
))
1420 anti_adjust_stack (plus_constant (size
, extra
));
1421 else if (GET_CODE (size
) == REG
&& extra
== 0)
1422 anti_adjust_stack (size
);
1425 rtx temp
= copy_to_mode_reg (Pmode
, size
);
1427 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
1428 temp
, 0, OPTAB_LIB_WIDEN
);
1429 anti_adjust_stack (temp
);
1432 #ifdef STACK_GROWS_DOWNWARD
1433 temp
= virtual_outgoing_args_rtx
;
1434 if (extra
!= 0 && below
)
1435 temp
= plus_constant (temp
, extra
);
1437 if (GET_CODE (size
) == CONST_INT
)
1438 temp
= plus_constant (virtual_outgoing_args_rtx
,
1439 - INTVAL (size
) - (below
? 0 : extra
));
1440 else if (extra
!= 0 && !below
)
1441 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
1442 negate_rtx (Pmode
, plus_constant (size
, extra
)));
1444 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
1445 negate_rtx (Pmode
, size
));
1448 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
1454 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
1457 /* Generate code to push X onto the stack, assuming it has mode MODE and
1459 MODE is redundant except when X is a CONST_INT (since they don't
1461 SIZE is an rtx for the size of data to be copied (in bytes),
1462 needed only if X is BLKmode.
1464 ALIGN (in bytes) is maximum alignment we can assume.
1466 If PARTIAL is nonzero, then copy that many of the first words
1467 of X into registers starting with REG, and push the rest of X.
1468 The amount of space pushed is decreased by PARTIAL words,
1469 rounded *down* to a multiple of PARM_BOUNDARY.
1470 REG must be a hard register in this case.
1472 EXTRA is the amount in bytes of extra space to leave next to this arg.
1473 This is ignored if an argument block has already been allocated.
1475 On a machine that lacks real push insns, ARGS_ADDR is the address of
1476 the bottom of the argument block for this call. We use indexing off there
1477 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1478 argument block has not been preallocated.
1480 ARGS_SO_FAR is the size of args previously pushed for this call. */
1483 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
1484 args_addr
, args_so_far
)
1486 enum machine_mode mode
;
1497 enum direction stack_direction
1498 #ifdef STACK_GROWS_DOWNWARD
1504 /* Decide where to pad the argument: `downward' for below,
1505 `upward' for above, or `none' for don't pad it.
1506 Default is below for small data on big-endian machines; else above. */
1507 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
1509 /* Invert direction if stack is post-update. */
1510 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
1511 if (where_pad
!= none
)
1512 where_pad
= (where_pad
== downward
? upward
: downward
);
1514 xinner
= x
= protect_from_queue (x
, 0);
1516 if (mode
== BLKmode
)
1518 /* Copy a block into the stack, entirely or partially. */
1521 int used
= partial
* UNITS_PER_WORD
;
1522 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
1530 /* USED is now the # of bytes we need not copy to the stack
1531 because registers will take care of them. */
1534 xinner
= change_address (xinner
, BLKmode
,
1535 plus_constant (XEXP (xinner
, 0), used
));
1537 /* If the partial register-part of the arg counts in its stack size,
1538 skip the part of stack space corresponding to the registers.
1539 Otherwise, start copying to the beginning of the stack space,
1540 by setting SKIP to 0. */
1541 #ifndef REG_PARM_STACK_SPACE
1547 #ifdef PUSH_ROUNDING
1548 /* Do it with several push insns if that doesn't take lots of insns
1549 and if there is no difficulty with push insns that skip bytes
1550 on the stack for alignment purposes. */
1552 && GET_CODE (size
) == CONST_INT
1554 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
1556 /* Here we avoid the case of a structure whose weak alignment
1557 forces many pushes of a small amount of data,
1558 and such small pushes do rounding that causes trouble. */
1559 && ((! STRICT_ALIGNMENT
&& ! SLOW_UNALIGNED_ACCESS
)
1560 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
1561 || PUSH_ROUNDING (align
) == align
)
1562 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
1564 /* Push padding now if padding above and stack grows down,
1565 or if padding below and stack grows up.
1566 But if space already allocated, this has already been done. */
1567 if (extra
&& args_addr
== 0
1568 && where_pad
!= none
&& where_pad
!= stack_direction
)
1569 anti_adjust_stack (GEN_INT (extra
));
1571 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
1572 INTVAL (size
) - used
, align
);
1575 #endif /* PUSH_ROUNDING */
1577 /* Otherwise make space on the stack and copy the data
1578 to the address of that space. */
1580 /* Deduct words put into registers from the size we must copy. */
1583 if (GET_CODE (size
) == CONST_INT
)
1584 size
= GEN_INT (INTVAL (size
) - used
);
1586 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
1587 GEN_INT (used
), NULL_RTX
, 0,
1591 /* Get the address of the stack space.
1592 In this case, we do not deal with EXTRA separately.
1593 A single stack adjust will do. */
1596 temp
= push_block (size
, extra
, where_pad
== downward
);
1599 else if (GET_CODE (args_so_far
) == CONST_INT
)
1600 temp
= memory_address (BLKmode
,
1601 plus_constant (args_addr
,
1602 skip
+ INTVAL (args_so_far
)));
1604 temp
= memory_address (BLKmode
,
1605 plus_constant (gen_rtx (PLUS
, Pmode
,
1606 args_addr
, args_so_far
),
1609 /* TEMP is the address of the block. Copy the data there. */
1610 if (GET_CODE (size
) == CONST_INT
1611 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
1614 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
1615 INTVAL (size
), align
);
1618 /* Try the most limited insn first, because there's no point
1619 including more than one in the machine description unless
1620 the more limited one has some advantage. */
1621 #ifdef HAVE_movstrqi
1623 && GET_CODE (size
) == CONST_INT
1624 && ((unsigned) INTVAL (size
)
1625 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
1627 emit_insn (gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
1628 xinner
, size
, GEN_INT (align
)));
1632 #ifdef HAVE_movstrhi
1634 && GET_CODE (size
) == CONST_INT
1635 && ((unsigned) INTVAL (size
)
1636 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
1638 emit_insn (gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
1639 xinner
, size
, GEN_INT (align
)));
1643 #ifdef HAVE_movstrsi
1646 emit_insn (gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
1647 xinner
, size
, GEN_INT (align
)));
1651 #ifdef HAVE_movstrdi
1654 emit_insn (gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
1655 xinner
, size
, GEN_INT (align
)));
1660 #ifndef ACCUMULATE_OUTGOING_ARGS
1661 /* If the source is referenced relative to the stack pointer,
1662 copy it to another register to stabilize it. We do not need
1663 to do this if we know that we won't be changing sp. */
1665 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
1666 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
1667 temp
= copy_to_reg (temp
);
1670 /* Make inhibit_defer_pop nonzero around the library call
1671 to force it to pop the bcopy-arguments right away. */
1673 #ifdef TARGET_MEM_FUNCTIONS
1674 emit_library_call (memcpy_libfunc
, 1,
1675 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
1678 emit_library_call (bcopy_libfunc
, 1,
1679 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
1685 else if (partial
> 0)
1687 /* Scalar partly in registers. */
1689 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
1692 /* # words of start of argument
1693 that we must make space for but need not store. */
1694 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
1695 int args_offset
= INTVAL (args_so_far
);
1698 /* Push padding now if padding above and stack grows down,
1699 or if padding below and stack grows up.
1700 But if space already allocated, this has already been done. */
1701 if (extra
&& args_addr
== 0
1702 && where_pad
!= none
&& where_pad
!= stack_direction
)
1703 anti_adjust_stack (GEN_INT (extra
));
1705 /* If we make space by pushing it, we might as well push
1706 the real data. Otherwise, we can leave OFFSET nonzero
1707 and leave the space uninitialized. */
1711 /* Now NOT_STACK gets the number of words that we don't need to
1712 allocate on the stack. */
1713 not_stack
= partial
- offset
;
1715 /* If the partial register-part of the arg counts in its stack size,
1716 skip the part of stack space corresponding to the registers.
1717 Otherwise, start copying to the beginning of the stack space,
1718 by setting SKIP to 0. */
1719 #ifndef REG_PARM_STACK_SPACE
1725 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1726 x
= validize_mem (force_const_mem (mode
, x
));
1728 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1729 SUBREGs of such registers are not allowed. */
1730 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
1731 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
1732 x
= copy_to_reg (x
);
1734 /* Loop over all the words allocated on the stack for this arg. */
1735 /* We can do it by words, because any scalar bigger than a word
1736 has a size a multiple of a word. */
1737 #ifndef PUSH_ARGS_REVERSED
1738 for (i
= not_stack
; i
< size
; i
++)
1740 for (i
= size
- 1; i
>= not_stack
; i
--)
1742 if (i
>= not_stack
+ offset
)
1743 emit_push_insn (operand_subword_force (x
, i
, mode
),
1744 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
1746 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
1747 * UNITS_PER_WORD
)));
1753 /* Push padding now if padding above and stack grows down,
1754 or if padding below and stack grows up.
1755 But if space already allocated, this has already been done. */
1756 if (extra
&& args_addr
== 0
1757 && where_pad
!= none
&& where_pad
!= stack_direction
)
1758 anti_adjust_stack (GEN_INT (extra
));
1760 #ifdef PUSH_ROUNDING
1762 addr
= gen_push_operand ();
1765 if (GET_CODE (args_so_far
) == CONST_INT
)
1767 = memory_address (mode
,
1768 plus_constant (args_addr
, INTVAL (args_so_far
)));
1770 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
1773 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
1777 /* If part should go in registers, copy that part
1778 into the appropriate registers. Do this now, at the end,
1779 since mem-to-mem copies above may do function calls. */
1781 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
1783 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
1784 anti_adjust_stack (GEN_INT (extra
));
1787 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1788 (emitting the queue unless NO_QUEUE is nonzero),
1789 for a value of mode OUTMODE,
1790 with NARGS different arguments, passed as alternating rtx values
1791 and machine_modes to convert them to.
1792 The rtx values should have been passed through protect_from_queue already.
1794 NO_QUEUE will be true if and only if the library call is a `const' call
1795 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1796 to the variable is_const in expand_call. */
1799 emit_library_call (va_alist
)
1803 struct args_size args_size
;
1804 register int argnum
;
1805 enum machine_mode outmode
;
1812 CUMULATIVE_ARGS args_so_far
;
1813 struct arg
{ rtx value
; enum machine_mode mode
; rtx reg
; int partial
;
1814 struct args_size offset
; struct args_size size
; };
1816 int old_inhibit_defer_pop
= inhibit_defer_pop
;
1821 orgfun
= fun
= va_arg (p
, rtx
);
1822 no_queue
= va_arg (p
, int);
1823 outmode
= va_arg (p
, enum machine_mode
);
1824 nargs
= va_arg (p
, int);
1826 /* Copy all the libcall-arguments out of the varargs data
1827 and into a vector ARGVEC.
1829 Compute how to pass each argument. We only support a very small subset
1830 of the full argument passing conventions to limit complexity here since
1831 library functions shouldn't have many args. */
1833 argvec
= (struct arg
*) alloca (nargs
* sizeof (struct arg
));
1835 INIT_CUMULATIVE_ARGS (args_so_far
, (tree
)0, fun
);
1837 args_size
.constant
= 0;
1840 for (count
= 0; count
< nargs
; count
++)
1842 rtx val
= va_arg (p
, rtx
);
1843 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
1845 /* We cannot convert the arg value to the mode the library wants here;
1846 must do it earlier where we know the signedness of the arg. */
1848 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
1851 /* On some machines, there's no way to pass a float to a library fcn.
1852 Pass it as a double instead. */
1853 #ifdef LIBGCC_NEEDS_DOUBLE
1854 if (LIBGCC_NEEDS_DOUBLE
&& mode
== SFmode
)
1855 val
= convert_to_mode (DFmode
, val
), mode
= DFmode
;
1858 /* There's no need to call protect_from_queue, because
1859 either emit_move_insn or emit_push_insn will do that. */
1861 /* Make sure it is a reasonable operand for a move or push insn. */
1862 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
1863 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
1864 val
= force_operand (val
, NULL_RTX
);
1866 argvec
[count
].value
= val
;
1867 argvec
[count
].mode
= mode
;
1869 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1870 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
1874 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1875 if (argvec
[count
].reg
&& GET_CODE (argvec
[count
].reg
) == EXPR_LIST
)
1877 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1878 argvec
[count
].partial
1879 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
1881 argvec
[count
].partial
= 0;
1884 locate_and_pad_parm (mode
, NULL_TREE
,
1885 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
1886 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
1887 &argvec
[count
].size
);
1889 if (argvec
[count
].size
.var
)
1892 #ifndef REG_PARM_STACK_SPACE
1893 if (argvec
[count
].partial
)
1894 argvec
[count
].size
.constant
-= argvec
[count
].partial
* UNITS_PER_WORD
;
1897 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
1898 #ifdef REG_PARM_STACK_SPACE
1902 args_size
.constant
+= argvec
[count
].size
.constant
;
1904 #ifdef ACCUMULATE_OUTGOING_ARGS
1905 /* If this arg is actually passed on the stack, it might be
1906 clobbering something we already put there (this library call might
1907 be inside the evaluation of an argument to a function whose call
1908 requires the stack). This will only occur when the library call
1909 has sufficient args to run out of argument registers. Abort in
1910 this case; if this ever occurs, code must be added to save and
1911 restore the arg slot. */
1913 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0)
1917 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
)0, 1);
1921 /* If this machine requires an external definition for library
1922 functions, write one out. */
1923 assemble_external_libcall (fun
);
1925 #ifdef STACK_BOUNDARY
1926 args_size
.constant
= (((args_size
.constant
+ (STACK_BYTES
- 1))
1927 / STACK_BYTES
) * STACK_BYTES
);
1930 #ifdef REG_PARM_STACK_SPACE
1931 args_size
.constant
= MAX (args_size
.constant
,
1932 REG_PARM_STACK_SPACE ((tree
) 0));
1935 #ifdef ACCUMULATE_OUTGOING_ARGS
1936 if (args_size
.constant
> current_function_outgoing_args_size
)
1937 current_function_outgoing_args_size
= args_size
.constant
;
1938 args_size
.constant
= 0;
1941 #ifndef PUSH_ROUNDING
1942 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
1945 #ifdef PUSH_ARGS_REVERSED
1953 /* Push the args that need to be pushed. */
1955 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
1957 register enum machine_mode mode
= argvec
[argnum
].mode
;
1958 register rtx val
= argvec
[argnum
].value
;
1959 rtx reg
= argvec
[argnum
].reg
;
1960 int partial
= argvec
[argnum
].partial
;
1962 if (! (reg
!= 0 && partial
== 0))
1963 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, 0, partial
, reg
, 0,
1964 argblock
, GEN_INT (argvec
[count
].offset
.constant
));
1968 #ifdef PUSH_ARGS_REVERSED
1974 /* Now load any reg parms into their regs. */
1976 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
1978 register enum machine_mode mode
= argvec
[argnum
].mode
;
1979 register rtx val
= argvec
[argnum
].value
;
1980 rtx reg
= argvec
[argnum
].reg
;
1981 int partial
= argvec
[argnum
].partial
;
1983 if (reg
!= 0 && partial
== 0)
1984 emit_move_insn (reg
, val
);
1988 /* For version 1.37, try deleting this entirely. */
1992 /* Any regs containing parms remain in use through the call. */
1994 for (count
= 0; count
< nargs
; count
++)
1995 if (argvec
[count
].reg
!= 0)
1996 emit_insn (gen_rtx (USE
, VOIDmode
, argvec
[count
].reg
));
1998 use_insns
= get_insns ();
2001 fun
= prepare_call_address (fun
, NULL_TREE
, &use_insns
);
2003 /* Don't allow popping to be deferred, since then
2004 cse'ing of library calls could delete a call and leave the pop. */
2007 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2008 will set inhibit_defer_pop to that value. */
2010 emit_call_1 (fun
, get_identifier (XSTR (orgfun
, 0)), args_size
.constant
, 0,
2011 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
2012 outmode
!= VOIDmode
? hard_libcall_value (outmode
) : NULL_RTX
,
2013 old_inhibit_defer_pop
+ 1, use_insns
, no_queue
);
2015 /* Now restore inhibit_defer_pop to its actual original value. */
2019 /* Expand an assignment that stores the value of FROM into TO.
2020 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2021 (This may contain a QUEUED rtx.)
2022 Otherwise, the returned value is not meaningful.
2024 SUGGEST_REG is no longer actually used.
2025 It used to mean, copy the value through a register
2026 and return that register, if that is possible.
2027 But now we do this if WANT_VALUE.
2029 If the value stored is a constant, we return the constant. */
2032 expand_assignment (to
, from
, want_value
, suggest_reg
)
2037 register rtx to_rtx
= 0;
2040 /* Don't crash if the lhs of the assignment was erroneous. */
2042 if (TREE_CODE (to
) == ERROR_MARK
)
2043 return expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2045 /* Assignment of a structure component needs special treatment
2046 if the structure component's rtx is not simply a MEM.
2047 Assignment of an array element at a constant index
2048 has the same problem. */
2050 if (TREE_CODE (to
) == COMPONENT_REF
2051 || TREE_CODE (to
) == BIT_FIELD_REF
2052 || (TREE_CODE (to
) == ARRAY_REF
2053 && TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2054 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
))
2056 enum machine_mode mode1
;
2062 tree tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
,
2063 &mode1
, &unsignedp
, &volatilep
);
2065 /* If we are going to use store_bit_field and extract_bit_field,
2066 make sure to_rtx will be safe for multiple use. */
2068 if (mode1
== VOIDmode
&& want_value
)
2069 tem
= stabilize_reference (tem
);
2071 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2074 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2076 if (GET_CODE (to_rtx
) != MEM
)
2078 to_rtx
= change_address (to_rtx
, VOIDmode
,
2079 gen_rtx (PLUS
, Pmode
, XEXP (to_rtx
, 0),
2080 force_reg (Pmode
, offset_rtx
)));
2084 if (GET_CODE (to_rtx
) == MEM
)
2085 MEM_VOLATILE_P (to_rtx
) = 1;
2086 #if 0 /* This was turned off because, when a field is volatile
2087 in an object which is not volatile, the object may be in a register,
2088 and then we would abort over here. */
2094 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2096 /* Spurious cast makes HPUX compiler happy. */
2097 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2100 /* Required alignment of containing datum. */
2101 TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
,
2102 int_size_in_bytes (TREE_TYPE (tem
)));
2103 preserve_temp_slots (result
);
2109 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2110 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2113 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2115 /* In case we are returning the contents of an object which overlaps
2116 the place the value is being stored, use a safe function when copying
2117 a value through a pointer into a structure value return block. */
2118 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2119 && current_function_returns_struct
2120 && !current_function_returns_pcc_struct
)
2122 rtx from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2123 rtx size
= expr_size (from
);
2125 #ifdef TARGET_MEM_FUNCTIONS
2126 emit_library_call (memcpy_libfunc
, 1,
2127 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2128 XEXP (from_rtx
, 0), Pmode
,
2131 emit_library_call (bcopy_libfunc
, 1,
2132 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2133 XEXP (to_rtx
, 0), Pmode
,
2137 preserve_temp_slots (to_rtx
);
2142 /* Compute FROM and store the value in the rtx we got. */
2144 result
= store_expr (from
, to_rtx
, want_value
);
2145 preserve_temp_slots (result
);
2150 /* Generate code for computing expression EXP,
2151 and storing the value into TARGET.
2152 Returns TARGET or an equivalent value.
2153 TARGET may contain a QUEUED rtx.
2155 If SUGGEST_REG is nonzero, copy the value through a register
2156 and return that register, if that is possible.
2158 If the value stored is a constant, we return the constant. */
2161 store_expr (exp
, target
, suggest_reg
)
2163 register rtx target
;
2167 int dont_return_target
= 0;
2169 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2171 /* Perform first part of compound expression, then assign from second
2173 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
2175 return store_expr (TREE_OPERAND (exp
, 1), target
, suggest_reg
);
2177 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
2179 /* For conditional expression, get safe form of the target. Then
2180 test the condition, doing the appropriate assignment on either
2181 side. This avoids the creation of unnecessary temporaries.
2182 For non-BLKmode, it is more efficient not to do this. */
2184 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
2187 target
= protect_from_queue (target
, 1);
2190 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
2191 store_expr (TREE_OPERAND (exp
, 1), target
, suggest_reg
);
2193 emit_jump_insn (gen_jump (lab2
));
2196 store_expr (TREE_OPERAND (exp
, 2), target
, suggest_reg
);
2202 else if (suggest_reg
&& GET_CODE (target
) == MEM
2203 && GET_MODE (target
) != BLKmode
)
2204 /* If target is in memory and caller wants value in a register instead,
2205 arrange that. Pass TARGET as target for expand_expr so that,
2206 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2207 We know expand_expr will not use the target in that case. */
2209 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
2210 GET_MODE (target
), 0);
2211 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
2212 temp
= copy_to_reg (temp
);
2213 dont_return_target
= 1;
2215 else if (queued_subexp_p (target
))
2216 /* If target contains a postincrement, it is not safe
2217 to use as the returned value. It would access the wrong
2218 place by the time the queued increment gets output.
2219 So copy the value through a temporary and use that temp
2222 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
2224 /* Expand EXP into a new pseudo. */
2225 temp
= gen_reg_rtx (GET_MODE (target
));
2226 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
2229 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
2230 dont_return_target
= 1;
2234 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
2235 /* DO return TARGET if it's a specified hardware register.
2236 expand_return relies on this. */
2237 if (!(target
&& GET_CODE (target
) == REG
2238 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
2239 && CONSTANT_P (temp
))
2240 dont_return_target
= 1;
2243 /* If value was not generated in the target, store it there.
2244 Convert the value to TARGET's type first if nec. */
2246 if (temp
!= target
&& TREE_CODE (exp
) != ERROR_MARK
)
2248 target
= protect_from_queue (target
, 1);
2249 if (GET_MODE (temp
) != GET_MODE (target
)
2250 && GET_MODE (temp
) != VOIDmode
)
2252 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
2253 if (dont_return_target
)
2255 /* In this case, we will return TEMP,
2256 so make sure it has the proper mode.
2257 But don't forget to store the value into TARGET. */
2258 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
2259 emit_move_insn (target
, temp
);
2262 convert_move (target
, temp
, unsignedp
);
2265 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
2267 /* Handle copying a string constant into an array.
2268 The string constant may be shorter than the array.
2269 So copy just the string's actual length, and clear the rest. */
2272 /* Get the size of the data type of the string,
2273 which is actually the size of the target. */
2274 size
= expr_size (exp
);
2275 if (GET_CODE (size
) == CONST_INT
2276 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
2277 emit_block_move (target
, temp
, size
,
2278 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
2281 /* Compute the size of the data to copy from the string. */
2283 = fold (build (MIN_EXPR
, sizetype
,
2284 size_binop (CEIL_DIV_EXPR
,
2285 TYPE_SIZE (TREE_TYPE (exp
)),
2286 size_int (BITS_PER_UNIT
)),
2288 build_int_2 (TREE_STRING_LENGTH (exp
), 0))));
2289 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
2293 /* Copy that much. */
2294 emit_block_move (target
, temp
, copy_size_rtx
,
2295 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
2297 /* Figure out how much is left in TARGET
2298 that we have to clear. */
2299 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
2301 temp
= plus_constant (XEXP (target
, 0),
2302 TREE_STRING_LENGTH (exp
));
2303 size
= plus_constant (size
,
2304 - TREE_STRING_LENGTH (exp
));
2308 enum machine_mode size_mode
= Pmode
;
2310 temp
= force_reg (Pmode
, XEXP (target
, 0));
2311 temp
= expand_binop (size_mode
, add_optab
, temp
,
2312 copy_size_rtx
, NULL_RTX
, 0,
2315 size
= expand_binop (size_mode
, sub_optab
, size
,
2316 copy_size_rtx
, NULL_RTX
, 0,
2319 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
2320 GET_MODE (size
), 0, 0);
2321 label
= gen_label_rtx ();
2322 emit_jump_insn (gen_blt (label
));
2325 if (size
!= const0_rtx
)
2327 #ifdef TARGET_MEM_FUNCTIONS
2328 emit_library_call (memset_libfunc
, 1, VOIDmode
, 3,
2329 temp
, Pmode
, const0_rtx
, Pmode
, size
, Pmode
);
2331 emit_library_call (bzero_libfunc
, 1, VOIDmode
, 2,
2332 temp
, Pmode
, size
, Pmode
);
2339 else if (GET_MODE (temp
) == BLKmode
)
2340 emit_block_move (target
, temp
, expr_size (exp
),
2341 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
2343 emit_move_insn (target
, temp
);
2345 if (dont_return_target
)
2350 /* Store the value of constructor EXP into the rtx TARGET.
2351 TARGET is either a REG or a MEM. */
2354 store_constructor (exp
, target
)
2358 tree type
= TREE_TYPE (exp
);
2360 /* We know our target cannot conflict, since safe_from_p has been called. */
2362 /* Don't try copying piece by piece into a hard register
2363 since that is vulnerable to being clobbered by EXP.
2364 Instead, construct in a pseudo register and then copy it all. */
2365 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
2367 rtx temp
= gen_reg_rtx (GET_MODE (target
));
2368 store_constructor (exp
, temp
);
2369 emit_move_insn (target
, temp
);
2374 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
)
2378 /* Inform later passes that the whole union value is dead. */
2379 if (TREE_CODE (type
) == UNION_TYPE
)
2380 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
2382 /* If we are building a static constructor into a register,
2383 set the initial value as zero so we can fold the value into
2385 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
))
2386 emit_move_insn (target
, const0_rtx
);
2388 /* If the constructor has fewer fields than the structure,
2389 clear the whole structure first. */
2390 else if (list_length (CONSTRUCTOR_ELTS (exp
))
2391 != list_length (TYPE_FIELDS (type
)))
2392 clear_storage (target
, int_size_in_bytes (type
));
2394 /* Inform later passes that the old value is dead. */
2395 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
2397 /* Store each element of the constructor into
2398 the corresponding field of TARGET. */
2400 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
2402 register tree field
= TREE_PURPOSE (elt
);
2403 register enum machine_mode mode
;
2408 /* Just ignore missing fields.
2409 We cleared the whole structure, above,
2410 if any fields are missing. */
2414 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
2415 unsignedp
= TREE_UNSIGNED (field
);
2416 mode
= DECL_MODE (field
);
2417 if (DECL_BIT_FIELD (field
))
2420 if (TREE_CODE (DECL_FIELD_BITPOS (field
)) != INTEGER_CST
)
2421 /* ??? This case remains to be written. */
2424 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
2426 store_field (target
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
),
2427 /* The alignment of TARGET is
2428 at least what its type requires. */
2430 TYPE_ALIGN (type
) / BITS_PER_UNIT
,
2431 int_size_in_bytes (type
));
2434 else if (TREE_CODE (type
) == ARRAY_TYPE
)
2438 tree domain
= TYPE_DOMAIN (type
);
2439 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
2440 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
2441 tree elttype
= TREE_TYPE (type
);
2443 /* If the constructor has fewer fields than the structure,
2444 clear the whole structure first. Similarly if this this is
2445 static constructor of a non-BLKmode object. */
2447 if (list_length (CONSTRUCTOR_ELTS (exp
)) < maxelt
- minelt
+ 1
2448 || (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
2449 clear_storage (target
, maxelt
- minelt
+ 1);
2451 /* Inform later passes that the old value is dead. */
2452 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
2454 /* Store each element of the constructor into
2455 the corresponding element of TARGET, determined
2456 by counting the elements. */
2457 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
2459 elt
= TREE_CHAIN (elt
), i
++)
2461 register enum machine_mode mode
;
2466 mode
= TYPE_MODE (elttype
);
2467 bitsize
= GET_MODE_BITSIZE (mode
);
2468 unsignedp
= TREE_UNSIGNED (elttype
);
2470 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
2472 store_field (target
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
),
2473 /* The alignment of TARGET is
2474 at least what its type requires. */
2476 TYPE_ALIGN (type
) / BITS_PER_UNIT
,
2477 int_size_in_bytes (type
));
2485 /* Store the value of EXP (an expression tree)
2486 into a subfield of TARGET which has mode MODE and occupies
2487 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2488 If MODE is VOIDmode, it means that we are storing into a bit-field.
2490 If VALUE_MODE is VOIDmode, return nothing in particular.
2491 UNSIGNEDP is not used in this case.
2493 Otherwise, return an rtx for the value stored. This rtx
2494 has mode VALUE_MODE if that is convenient to do.
2495 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2497 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2498 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2501 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
2502 unsignedp
, align
, total_size
)
2504 int bitsize
, bitpos
;
2505 enum machine_mode mode
;
2507 enum machine_mode value_mode
;
2512 HOST_WIDE_INT width_mask
= 0;
2514 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
2515 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
2517 /* If we are storing into an unaligned field of an aligned union that is
2518 in a register, we may have the mode of TARGET being an integer mode but
2519 MODE == BLKmode. In that case, get an aligned object whose size and
2520 alignment are the same as TARGET and store TARGET into it (we can avoid
2521 the store if the field being stored is the entire width of TARGET). Then
2522 call ourselves recursively to store the field into a BLKmode version of
2523 that object. Finally, load from the object into TARGET. This is not
2524 very efficient in general, but should only be slightly more expensive
2525 than the otherwise-required unaligned accesses. Perhaps this can be
2526 cleaned up later. */
2529 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
2531 rtx object
= assign_stack_temp (GET_MODE (target
),
2532 GET_MODE_SIZE (GET_MODE (target
)), 0);
2533 rtx blk_object
= copy_rtx (object
);
2535 PUT_MODE (blk_object
, BLKmode
);
2537 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
2538 emit_move_insn (object
, target
);
2540 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
2543 emit_move_insn (target
, object
);
2548 /* If the structure is in a register or if the component
2549 is a bit field, we cannot use addressing to access it.
2550 Use bit-field techniques or SUBREG to store in it. */
2552 if (mode
== VOIDmode
2553 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
2554 || GET_CODE (target
) == REG
2555 || GET_CODE (target
) == SUBREG
)
2557 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
2558 /* Store the value in the bitfield. */
2559 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
2560 if (value_mode
!= VOIDmode
)
2562 /* The caller wants an rtx for the value. */
2563 /* If possible, avoid refetching from the bitfield itself. */
2565 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
2566 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
2567 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
2568 NULL_RTX
, value_mode
, 0, align
,
2575 rtx addr
= XEXP (target
, 0);
2578 /* If a value is wanted, it must be the lhs;
2579 so make the address stable for multiple use. */
2581 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
2582 && ! CONSTANT_ADDRESS_P (addr
)
2583 /* A frame-pointer reference is already stable. */
2584 && ! (GET_CODE (addr
) == PLUS
2585 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
2586 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
2587 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
2588 addr
= copy_to_reg (addr
);
2590 /* Now build a reference to just the desired component. */
2592 to_rtx
= change_address (target
, mode
,
2593 plus_constant (addr
, (bitpos
/ BITS_PER_UNIT
)));
2594 MEM_IN_STRUCT_P (to_rtx
) = 1;
2596 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
2600 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2601 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2602 ARRAY_REFs at constant positions and find the ultimate containing object,
2605 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2606 bit position, and *PUNSIGNEDP to the signedness of the field.
2607 If the position of the field is variable, we store a tree
2608 giving the variable offset (in units) in *POFFSET.
2609 This offset is in addition to the bit position.
2610 If the position is not variable, we store 0 in *POFFSET.
2612 If any of the extraction expressions is volatile,
2613 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2615 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2616 is a mode that can be used to access the field. In that case, *PBITSIZE
2619 If the field describes a variable-sized object, *PMODE is set to
2620 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2621 this case, but the address of the object can be found. */
2624 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
, punsignedp
, pvolatilep
)
2629 enum machine_mode
*pmode
;
2634 enum machine_mode mode
= VOIDmode
;
2637 if (TREE_CODE (exp
) == COMPONENT_REF
)
2639 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
2640 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
2641 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
2642 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
2644 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
2646 size_tree
= TREE_OPERAND (exp
, 1);
2647 *punsignedp
= TREE_UNSIGNED (exp
);
2651 mode
= TYPE_MODE (TREE_TYPE (exp
));
2652 *pbitsize
= GET_MODE_BITSIZE (mode
);
2653 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
2658 if (TREE_CODE (size_tree
) != INTEGER_CST
)
2659 mode
= BLKmode
, *pbitsize
= -1;
2661 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
2664 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2665 and find the ultimate containing object. */
2671 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
2673 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
2674 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
2675 : TREE_OPERAND (exp
, 2));
2677 if (TREE_CODE (pos
) == PLUS_EXPR
)
2680 if (TREE_CODE (TREE_OPERAND (pos
, 0)) == INTEGER_CST
)
2682 constant
= TREE_OPERAND (pos
, 0);
2683 var
= TREE_OPERAND (pos
, 1);
2685 else if (TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
2687 constant
= TREE_OPERAND (pos
, 1);
2688 var
= TREE_OPERAND (pos
, 0);
2692 *pbitpos
+= TREE_INT_CST_LOW (constant
);
2694 offset
= size_binop (PLUS_EXPR
, offset
,
2695 size_binop (FLOOR_DIV_EXPR
, var
,
2696 size_int (BITS_PER_UNIT
)));
2698 offset
= size_binop (FLOOR_DIV_EXPR
, var
,
2699 size_int (BITS_PER_UNIT
));
2701 else if (TREE_CODE (pos
) == INTEGER_CST
)
2702 *pbitpos
+= TREE_INT_CST_LOW (pos
);
2705 /* Assume here that the offset is a multiple of a unit.
2706 If not, there should be an explicitly added constant. */
2708 offset
= size_binop (PLUS_EXPR
, offset
,
2709 size_binop (FLOOR_DIV_EXPR
, pos
,
2710 size_int (BITS_PER_UNIT
)));
2712 offset
= size_binop (FLOOR_DIV_EXPR
, pos
,
2713 size_int (BITS_PER_UNIT
));
2717 else if (TREE_CODE (exp
) == ARRAY_REF
2718 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
2719 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
)
2721 *pbitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
2722 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp
))));
2724 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
2725 && ! ((TREE_CODE (exp
) == NOP_EXPR
2726 || TREE_CODE (exp
) == CONVERT_EXPR
)
2727 && (TYPE_MODE (TREE_TYPE (exp
))
2728 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
2731 /* If any reference in the chain is volatile, the effect is volatile. */
2732 if (TREE_THIS_VOLATILE (exp
))
2734 exp
= TREE_OPERAND (exp
, 0);
2737 /* If this was a bit-field, see if there is a mode that allows direct
2738 access in case EXP is in memory. */
2739 if (mode
== VOIDmode
&& *pbitpos
% *pbitsize
== 0)
2741 mode
= mode_for_size (*pbitsize
, MODE_INT
, 0);
2742 if (mode
== BLKmode
)
2749 /* We aren't finished fixing the callers to really handle nonzero offset. */
2757 /* Given an rtx VALUE that may contain additions and multiplications,
2758 return an equivalent value that just refers to a register or memory.
2759 This is done by generating instructions to perform the arithmetic
2760 and returning a pseudo-register containing the value. */
2763 force_operand (value
, target
)
2766 register optab binoptab
= 0;
2767 /* Use a temporary to force order of execution of calls to
2771 /* Use subtarget as the target for operand 0 of a binary operation. */
2772 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
2774 if (GET_CODE (value
) == PLUS
)
2775 binoptab
= add_optab
;
2776 else if (GET_CODE (value
) == MINUS
)
2777 binoptab
= sub_optab
;
2778 else if (GET_CODE (value
) == MULT
)
2780 op2
= XEXP (value
, 1);
2781 if (!CONSTANT_P (op2
)
2782 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
2784 tmp
= force_operand (XEXP (value
, 0), subtarget
);
2785 return expand_mult (GET_MODE (value
), tmp
,
2786 force_operand (op2
, NULL_RTX
),
2792 op2
= XEXP (value
, 1);
2793 if (!CONSTANT_P (op2
)
2794 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
2796 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
2798 binoptab
= add_optab
;
2799 op2
= negate_rtx (GET_MODE (value
), op2
);
2802 /* Check for an addition with OP2 a constant integer and our first
2803 operand a PLUS of a virtual register and something else. In that
2804 case, we want to emit the sum of the virtual register and the
2805 constant first and then add the other value. This allows virtual
2806 register instantiation to simply modify the constant rather than
2807 creating another one around this addition. */
2808 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
2809 && GET_CODE (XEXP (value
, 0)) == PLUS
2810 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
2811 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2812 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
2814 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
2815 XEXP (XEXP (value
, 0), 0), op2
,
2816 subtarget
, 0, OPTAB_LIB_WIDEN
);
2817 return expand_binop (GET_MODE (value
), binoptab
, temp
,
2818 force_operand (XEXP (XEXP (value
, 0), 1), 0),
2819 target
, 0, OPTAB_LIB_WIDEN
);
2822 tmp
= force_operand (XEXP (value
, 0), subtarget
);
2823 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
2824 force_operand (op2
, NULL_RTX
),
2825 target
, 0, OPTAB_LIB_WIDEN
);
2826 /* We give UNSIGNEP = 0 to expand_binop
2827 because the only operations we are expanding here are signed ones. */
2832 /* Subroutine of expand_expr:
2833 save the non-copied parts (LIST) of an expr (LHS), and return a list
2834 which can restore these values to their previous values,
2835 should something modify their storage. */
2838 save_noncopied_parts (lhs
, list
)
2845 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
2846 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
2847 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
2850 tree part
= TREE_VALUE (tail
);
2851 tree part_type
= TREE_TYPE (part
);
2852 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
2853 rtx target
= assign_stack_temp (TYPE_MODE (part_type
),
2854 int_size_in_bytes (part_type
), 0);
2855 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
2856 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
2857 parts
= tree_cons (to_be_saved
,
2858 build (RTL_EXPR
, part_type
, NULL_TREE
,
2861 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
2866 /* Subroutine of expand_expr:
2867 record the non-copied parts (LIST) of an expr (LHS), and return a list
2868 which specifies the initial values of these parts. */
2871 init_noncopied_parts (lhs
, list
)
2878 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
2879 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
2880 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
2883 tree part
= TREE_VALUE (tail
);
2884 tree part_type
= TREE_TYPE (part
);
2885 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
2886 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
2891 /* Subroutine of expand_expr: return nonzero iff there is no way that
2892 EXP can reference X, which is being modified. */
2895 safe_from_p (x
, exp
)
2905 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2906 find the underlying pseudo. */
2907 if (GET_CODE (x
) == SUBREG
)
2910 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2914 /* If X is a location in the outgoing argument area, it is always safe. */
2915 if (GET_CODE (x
) == MEM
2916 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
2917 || (GET_CODE (XEXP (x
, 0)) == PLUS
2918 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
2921 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
2924 exp_rtl
= DECL_RTL (exp
);
2931 if (TREE_CODE (exp
) == TREE_LIST
)
2932 return ((TREE_VALUE (exp
) == 0
2933 || safe_from_p (x
, TREE_VALUE (exp
)))
2934 && (TREE_CHAIN (exp
) == 0
2935 || safe_from_p (x
, TREE_CHAIN (exp
))));
2940 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
2944 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
2945 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
2949 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2950 the expression. If it is set, we conflict iff we are that rtx or
2951 both are in memory. Otherwise, we check all operands of the
2952 expression recursively. */
2954 switch (TREE_CODE (exp
))
2957 return staticp (TREE_OPERAND (exp
, 0));
2960 if (GET_CODE (x
) == MEM
)
2965 exp_rtl
= CALL_EXPR_RTL (exp
);
2968 /* Assume that the call will clobber all hard registers and
2970 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2971 || GET_CODE (x
) == MEM
)
2978 exp_rtl
= RTL_EXPR_RTL (exp
);
2980 /* We don't know what this can modify. */
2985 case WITH_CLEANUP_EXPR
:
2986 exp_rtl
= RTL_EXPR_RTL (exp
);
2990 exp_rtl
= SAVE_EXPR_RTL (exp
);
2994 /* The only operand we look at is operand 1. The rest aren't
2995 part of the expression. */
2996 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
2998 case METHOD_CALL_EXPR
:
2999 /* This takes a rtx argument, but shouldn't appear here. */
3003 /* If we have an rtx, we do not need to scan our operands. */
3007 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
3008 for (i
= 0; i
< nops
; i
++)
3009 if (TREE_OPERAND (exp
, i
) != 0
3010 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
3014 /* If we have an rtl, find any enclosed object. Then see if we conflict
3018 if (GET_CODE (exp_rtl
) == SUBREG
)
3020 exp_rtl
= SUBREG_REG (exp_rtl
);
3021 if (GET_CODE (exp_rtl
) == REG
3022 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
3026 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3027 are memory and EXP is not readonly. */
3028 return ! (rtx_equal_p (x
, exp_rtl
)
3029 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
3030 && ! TREE_READONLY (exp
)));
3033 /* If we reach here, it is safe. */
3037 /* Subroutine of expand_expr: return nonzero iff EXP is an
3038 expression whose type is statically determinable. */
3044 if (TREE_CODE (exp
) == PARM_DECL
3045 || TREE_CODE (exp
) == VAR_DECL
3046 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
3047 || TREE_CODE (exp
) == COMPONENT_REF
3048 || TREE_CODE (exp
) == ARRAY_REF
)
3053 /* expand_expr: generate code for computing expression EXP.
3054 An rtx for the computed value is returned. The value is never null.
3055 In the case of a void EXP, const0_rtx is returned.
3057 The value may be stored in TARGET if TARGET is nonzero.
3058 TARGET is just a suggestion; callers must assume that
3059 the rtx returned may not be the same as TARGET.
3061 If TARGET is CONST0_RTX, it means that the value will be ignored.
3063 If TMODE is not VOIDmode, it suggests generating the
3064 result in mode TMODE. But this is done only when convenient.
3065 Otherwise, TMODE is ignored and the value generated in its natural mode.
3066 TMODE is just a suggestion; callers must assume that
3067 the rtx returned may not have mode TMODE.
3069 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3070 with a constant address even if that address is not normally legitimate.
3071 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3073 If MODIFIER is EXPAND_SUM then when EXP is an addition
3074 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3075 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3076 products as above, or REG or MEM, or constant.
3077 Ordinarily in such cases we would output mul or add instructions
3078 and then return a pseudo reg containing the sum.
3080 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3081 it also marks a label as absolutely required (it can't be dead).
3082 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3083 This is used for outputting expressions used in initializers. */
3086 expand_expr (exp
, target
, tmode
, modifier
)
3089 enum machine_mode tmode
;
3090 enum expand_modifier modifier
;
3092 register rtx op0
, op1
, temp
;
3093 tree type
= TREE_TYPE (exp
);
3094 int unsignedp
= TREE_UNSIGNED (type
);
3095 register enum machine_mode mode
= TYPE_MODE (type
);
3096 register enum tree_code code
= TREE_CODE (exp
);
3098 /* Use subtarget as the target for operand 0 of a binary operation. */
3099 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
3100 rtx original_target
= target
;
3101 int ignore
= target
== const0_rtx
;
3104 /* Don't use hard regs as subtargets, because the combiner
3105 can only handle pseudo regs. */
3106 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
3108 /* Avoid subtargets inside loops,
3109 since they hide some invariant expressions. */
3110 if (preserve_subexpressions_p ())
3113 if (ignore
) target
= 0, original_target
= 0;
3115 /* If will do cse, generate all results into pseudo registers
3116 since 1) that allows cse to find more things
3117 and 2) otherwise cse could produce an insn the machine
3120 if (! cse_not_expected
&& mode
!= BLKmode
&& target
3121 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3124 /* Ensure we reference a volatile object even if value is ignored. */
3125 if (ignore
&& TREE_THIS_VOLATILE (exp
)
3126 && mode
!= VOIDmode
&& mode
!= BLKmode
)
3128 target
= gen_reg_rtx (mode
);
3129 temp
= expand_expr (exp
, target
, VOIDmode
, modifier
);
3131 emit_move_insn (target
, temp
);
3139 tree function
= decl_function_context (exp
);
3140 /* Handle using a label in a containing function. */
3141 if (function
!= current_function_decl
&& function
!= 0)
3143 struct function
*p
= find_function_data (function
);
3144 /* Allocate in the memory associated with the function
3145 that the label is in. */
3146 push_obstacks (p
->function_obstack
,
3147 p
->function_maybepermanent_obstack
);
3149 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
3150 label_rtx (exp
), p
->forced_labels
);
3153 else if (modifier
== EXPAND_INITIALIZER
)
3154 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
3155 label_rtx (exp
), forced_labels
);
3156 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
3157 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
3158 if (function
!= current_function_decl
&& function
!= 0)
3159 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
3164 if (DECL_RTL (exp
) == 0)
3166 error_with_decl (exp
, "prior parameter's size depends on `%s'");
3167 return CONST0_RTX (mode
);
3173 if (DECL_RTL (exp
) == 0)
3175 /* Ensure variable marked as used
3176 even if it doesn't go through a parser. */
3177 TREE_USED (exp
) = 1;
3178 /* Handle variables inherited from containing functions. */
3179 context
= decl_function_context (exp
);
3181 /* We treat inline_function_decl as an alias for the current function
3182 because that is the inline function whose vars, types, etc.
3183 are being merged into the current function.
3184 See expand_inline_function. */
3185 if (context
!= 0 && context
!= current_function_decl
3186 && context
!= inline_function_decl
3187 /* If var is static, we don't need a static chain to access it. */
3188 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
3189 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
3193 /* Mark as non-local and addressable. */
3194 DECL_NONLOCAL (exp
) = 1;
3195 mark_addressable (exp
);
3196 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
3198 addr
= XEXP (DECL_RTL (exp
), 0);
3199 if (GET_CODE (addr
) == MEM
)
3200 addr
= gen_rtx (MEM
, Pmode
, fix_lexical_addr (XEXP (addr
, 0), exp
));
3202 addr
= fix_lexical_addr (addr
, exp
);
3203 return change_address (DECL_RTL (exp
), mode
, addr
);
3206 /* This is the case of an array whose size is to be determined
3207 from its initializer, while the initializer is still being parsed.
3209 if (GET_CODE (DECL_RTL (exp
)) == MEM
3210 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
3211 return change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
3212 XEXP (DECL_RTL (exp
), 0));
3213 if (GET_CODE (DECL_RTL (exp
)) == MEM
3214 && modifier
!= EXPAND_CONST_ADDRESS
3215 && modifier
!= EXPAND_SUM
3216 && modifier
!= EXPAND_INITIALIZER
)
3218 /* DECL_RTL probably contains a constant address.
3219 On RISC machines where a constant address isn't valid,
3220 make some insns to get that address into a register. */
3221 if (!memory_address_p (DECL_MODE (exp
), XEXP (DECL_RTL (exp
), 0))
3223 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp
), 0))))
3224 return change_address (DECL_RTL (exp
), VOIDmode
,
3225 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
3227 return DECL_RTL (exp
);
3230 return immed_double_const (TREE_INT_CST_LOW (exp
),
3231 TREE_INT_CST_HIGH (exp
),
3235 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
3238 /* If optimized, generate immediate CONST_DOUBLE
3239 which will be turned into memory by reload if necessary.
3241 We used to force a register so that loop.c could see it. But
3242 this does not allow gen_* patterns to perform optimizations with
3243 the constants. It also produces two insns in cases like "x = 1.0;".
3244 On most machines, floating-point constants are not permitted in
3245 many insns, so we'd end up copying it to a register in any case.
3247 Now, we do the copying in expand_binop, if appropriate. */
3248 return immed_real_const (exp
);
3252 if (! TREE_CST_RTL (exp
))
3253 output_constant_def (exp
);
3255 /* TREE_CST_RTL probably contains a constant address.
3256 On RISC machines where a constant address isn't valid,
3257 make some insns to get that address into a register. */
3258 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
3259 && modifier
!= EXPAND_CONST_ADDRESS
3260 && modifier
!= EXPAND_INITIALIZER
3261 && modifier
!= EXPAND_SUM
3262 && !memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0)))
3263 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
3264 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
3265 return TREE_CST_RTL (exp
);
3268 context
= decl_function_context (exp
);
3269 /* We treat inline_function_decl as an alias for the current function
3270 because that is the inline function whose vars, types, etc.
3271 are being merged into the current function.
3272 See expand_inline_function. */
3273 if (context
== current_function_decl
|| context
== inline_function_decl
)
3276 /* If this is non-local, handle it. */
3279 temp
= SAVE_EXPR_RTL (exp
);
3280 if (temp
&& GET_CODE (temp
) == REG
)
3282 put_var_into_stack (exp
);
3283 temp
= SAVE_EXPR_RTL (exp
);
3285 if (temp
== 0 || GET_CODE (temp
) != MEM
)
3287 return change_address (temp
, mode
,
3288 fix_lexical_addr (XEXP (temp
, 0), exp
));
3290 if (SAVE_EXPR_RTL (exp
) == 0)
3292 if (mode
== BLKmode
)
3294 = assign_stack_temp (mode
,
3295 int_size_in_bytes (TREE_TYPE (exp
)), 0);
3297 temp
= gen_reg_rtx (mode
);
3298 SAVE_EXPR_RTL (exp
) = temp
;
3299 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
3300 if (!optimize
&& GET_CODE (temp
) == REG
)
3301 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
3304 return SAVE_EXPR_RTL (exp
);
3307 /* Exit the current loop if the body-expression is true. */
3309 rtx label
= gen_label_rtx ();
3310 do_jump (TREE_OPERAND (exp
, 0), label
, NULL_RTX
);
3311 expand_exit_loop (NULL_PTR
);
3317 expand_start_loop (1);
3318 expand_expr_stmt (TREE_OPERAND (exp
, 0));
3325 tree vars
= TREE_OPERAND (exp
, 0);
3326 int vars_need_expansion
= 0;
3328 /* Need to open a binding contour here because
3329 if there are any cleanups they most be contained here. */
3330 expand_start_bindings (0);
3332 /* Mark the corresponding BLOCK for output. */
3333 if (TREE_OPERAND (exp
, 2) != 0)
3334 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
3336 /* If VARS have not yet been expanded, expand them now. */
3339 if (DECL_RTL (vars
) == 0)
3341 vars_need_expansion
= 1;
3344 expand_decl_init (vars
);
3345 vars
= TREE_CHAIN (vars
);
3348 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
3350 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
3356 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
3358 emit_insns (RTL_EXPR_SEQUENCE (exp
));
3359 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
3360 return RTL_EXPR_RTL (exp
);
3363 /* All elts simple constants => refer to a constant in memory. But
3364 if this is a non-BLKmode mode, let it store a field at a time
3365 since that should make a CONST_INT or CONST_DOUBLE when we
3367 if (TREE_STATIC (exp
) && (mode
== BLKmode
|| TREE_ADDRESSABLE (exp
)))
3369 rtx constructor
= output_constant_def (exp
);
3370 if (modifier
!= EXPAND_CONST_ADDRESS
3371 && modifier
!= EXPAND_INITIALIZER
3372 && modifier
!= EXPAND_SUM
3373 && !memory_address_p (GET_MODE (constructor
),
3374 XEXP (constructor
, 0)))
3375 constructor
= change_address (constructor
, VOIDmode
,
3376 XEXP (constructor
, 0));
3383 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3384 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
3389 if (target
== 0 || ! safe_from_p (target
, exp
))
3391 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
3392 target
= gen_reg_rtx (mode
);
3395 rtx safe_target
= assign_stack_temp (mode
, int_size_in_bytes (type
), 0);
3397 MEM_IN_STRUCT_P (safe_target
) = MEM_IN_STRUCT_P (target
);
3398 target
= safe_target
;
3401 store_constructor (exp
, target
);
3407 tree exp1
= TREE_OPERAND (exp
, 0);
3410 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3411 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3412 This code has the same general effect as simply doing
3413 expand_expr on the save expr, except that the expression PTR
3414 is computed for use as a memory address. This means different
3415 code, suitable for indexing, may be generated. */
3416 if (TREE_CODE (exp1
) == SAVE_EXPR
3417 && SAVE_EXPR_RTL (exp1
) == 0
3418 && TREE_CODE (exp2
= TREE_OPERAND (exp1
, 0)) != ERROR_MARK
3419 && TYPE_MODE (TREE_TYPE (exp1
)) == Pmode
3420 && TYPE_MODE (TREE_TYPE (exp2
)) == Pmode
)
3422 temp
= expand_expr (TREE_OPERAND (exp1
, 0), NULL_RTX
,
3423 VOIDmode
, EXPAND_SUM
);
3424 op0
= memory_address (mode
, temp
);
3425 op0
= copy_all_regs (op0
);
3426 SAVE_EXPR_RTL (exp1
) = op0
;
3430 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3431 op0
= memory_address (mode
, op0
);
3434 temp
= gen_rtx (MEM
, mode
, op0
);
3435 /* If address was computed by addition,
3436 mark this as an element of an aggregate. */
3437 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
3438 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
3439 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
3440 || TREE_CODE (TREE_TYPE (exp
)) == ARRAY_TYPE
3441 || TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
3442 || TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
3443 || (TREE_CODE (exp1
) == ADDR_EXPR
3444 && (exp2
= TREE_OPERAND (exp1
, 0))
3445 && (TREE_CODE (TREE_TYPE (exp2
)) == ARRAY_TYPE
3446 || TREE_CODE (TREE_TYPE (exp2
)) == RECORD_TYPE
3447 || TREE_CODE (TREE_TYPE (exp2
)) == UNION_TYPE
)))
3448 MEM_IN_STRUCT_P (temp
) = 1;
3449 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) || flag_volatile
;
3450 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3451 a location is accessed through a pointer to const does not mean
3452 that the value there can never change. */
3453 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
);
3459 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
3460 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3462 /* Nonconstant array index or nonconstant element size.
3463 Generate the tree for *(&array+index) and expand that,
3464 except do it in a language-independent way
3465 and don't complain about non-lvalue arrays.
3466 `mark_addressable' should already have been called
3467 for any array for which this case will be reached. */
3469 /* Don't forget the const or volatile flag from the array element. */
3470 tree variant_type
= build_type_variant (type
,
3471 TREE_READONLY (exp
),
3472 TREE_THIS_VOLATILE (exp
));
3473 tree array_adr
= build1 (ADDR_EXPR
, build_pointer_type (variant_type
),
3474 TREE_OPERAND (exp
, 0));
3475 tree index
= TREE_OPERAND (exp
, 1);
3478 /* Convert the integer argument to a type the same size as a pointer
3479 so the multiply won't overflow spuriously. */
3480 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
3481 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
3483 /* Don't think the address has side effects
3484 just because the array does.
3485 (In some cases the address might have side effects,
3486 and we fail to record that fact here. However, it should not
3487 matter, since expand_expr should not care.) */
3488 TREE_SIDE_EFFECTS (array_adr
) = 0;
3490 elt
= build1 (INDIRECT_REF
, type
,
3491 fold (build (PLUS_EXPR
, TYPE_POINTER_TO (variant_type
),
3493 fold (build (MULT_EXPR
,
3494 TYPE_POINTER_TO (variant_type
),
3495 index
, size_in_bytes (type
))))));
3497 /* Volatility, etc., of new expression is same as old expression. */
3498 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
3499 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
3500 TREE_READONLY (elt
) = TREE_READONLY (exp
);
3502 return expand_expr (elt
, target
, tmode
, modifier
);
3505 /* Fold an expression like: "foo"[2].
3506 This is not done in fold so it won't happen inside &. */
3509 tree arg0
= TREE_OPERAND (exp
, 0);
3510 tree arg1
= TREE_OPERAND (exp
, 1);
3512 if (TREE_CODE (arg0
) == STRING_CST
3513 && TREE_CODE (arg1
) == INTEGER_CST
3514 && !TREE_INT_CST_HIGH (arg1
)
3515 && (i
= TREE_INT_CST_LOW (arg1
)) < TREE_STRING_LENGTH (arg0
))
3517 if (TREE_TYPE (TREE_TYPE (arg0
)) == integer_type_node
)
3519 exp
= build_int_2 (((int *)TREE_STRING_POINTER (arg0
))[i
], 0);
3520 TREE_TYPE (exp
) = integer_type_node
;
3521 return expand_expr (exp
, target
, tmode
, modifier
);
3523 if (TREE_TYPE (TREE_TYPE (arg0
)) == char_type_node
)
3525 exp
= build_int_2 (TREE_STRING_POINTER (arg0
)[i
], 0);
3526 TREE_TYPE (exp
) = integer_type_node
;
3527 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0
)), exp
), target
, tmode
, modifier
);
3532 /* If this is a constant index into a constant array,
3533 just get the value from the array. Handle both the cases when
3534 we have an explicit constructor and when our operand is a variable
3535 that was declared const. */
3537 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
3538 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
3540 tree index
= fold (TREE_OPERAND (exp
, 1));
3541 if (TREE_CODE (index
) == INTEGER_CST
3542 && TREE_INT_CST_HIGH (index
) == 0)
3544 int i
= TREE_INT_CST_LOW (index
);
3545 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
3548 elem
= TREE_CHAIN (elem
);
3550 return expand_expr (fold (TREE_VALUE (elem
)), target
,
3555 else if (TREE_READONLY (TREE_OPERAND (exp
, 0))
3556 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
3557 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == ARRAY_TYPE
3558 && TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
3559 && DECL_INITIAL (TREE_OPERAND (exp
, 0))
3561 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp
, 0)))
3564 tree index
= fold (TREE_OPERAND (exp
, 1));
3565 if (TREE_CODE (index
) == INTEGER_CST
3566 && TREE_INT_CST_HIGH (index
) == 0)
3568 int i
= TREE_INT_CST_LOW (index
);
3569 tree init
= DECL_INITIAL (TREE_OPERAND (exp
, 0));
3571 if (TREE_CODE (init
) == CONSTRUCTOR
)
3573 tree elem
= CONSTRUCTOR_ELTS (init
);
3576 elem
= TREE_CHAIN (elem
);
3578 return expand_expr (fold (TREE_VALUE (elem
)), target
,
3581 else if (TREE_CODE (init
) == STRING_CST
3582 && i
< TREE_STRING_LENGTH (init
))
3584 temp
= GEN_INT (TREE_STRING_POINTER (init
)[i
]);
3585 return convert_to_mode (mode
, temp
, 0);
3589 /* Treat array-ref with constant index as a component-ref. */
3593 /* If the operand is a CONSTRUCTOR, we can just extract the
3594 appropriate field if it is present. */
3595 if (code
!= ARRAY_REF
3596 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
3600 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
3601 elt
= TREE_CHAIN (elt
))
3602 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
3603 return expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
3607 enum machine_mode mode1
;
3612 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
3613 &mode1
, &unsignedp
, &volatilep
);
3615 /* In some cases, we will be offsetting OP0's address by a constant.
3616 So get it as a sum, if possible. If we will be using it
3617 directly in an insn, we validate it. */
3618 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3620 /* If this is a constant, put it into a register if it is a
3621 legimate constant and memory if it isn't. */
3622 if (CONSTANT_P (op0
))
3624 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
3625 if (LEGITIMATE_CONSTANT_P (op0
))
3626 op0
= force_reg (mode
, op0
);
3628 op0
= validize_mem (force_const_mem (mode
, op0
));
3633 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3635 if (GET_CODE (op0
) != MEM
)
3637 op0
= change_address (op0
, VOIDmode
,
3638 gen_rtx (PLUS
, Pmode
, XEXP (op0
, 0),
3639 force_reg (Pmode
, offset_rtx
)));
3642 /* Don't forget about volatility even if this is a bitfield. */
3643 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
3645 op0
= copy_rtx (op0
);
3646 MEM_VOLATILE_P (op0
) = 1;
3649 if (mode1
== VOIDmode
3650 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
])
3651 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
)
3653 /* In cases where an aligned union has an unaligned object
3654 as a field, we might be extracting a BLKmode value from
3655 an integer-mode (e.g., SImode) object. Handle this case
3656 by doing the extract into an object as wide as the field
3657 (which we know to be the width of a basic mode), then
3658 storing into memory, and changing the mode to BLKmode. */
3659 enum machine_mode ext_mode
= mode
;
3661 if (ext_mode
== BLKmode
)
3662 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
3664 if (ext_mode
== BLKmode
)
3667 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
3668 unsignedp
, target
, ext_mode
, ext_mode
,
3669 TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
,
3670 int_size_in_bytes (TREE_TYPE (tem
)));
3671 if (mode
== BLKmode
)
3673 rtx
new = assign_stack_temp (ext_mode
,
3674 bitsize
/ BITS_PER_UNIT
, 0);
3676 emit_move_insn (new, op0
);
3677 op0
= copy_rtx (new);
3678 PUT_MODE (op0
, BLKmode
);
3684 /* Get a reference to just this component. */
3685 if (modifier
== EXPAND_CONST_ADDRESS
3686 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
3687 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
3688 (bitpos
/ BITS_PER_UNIT
)));
3690 op0
= change_address (op0
, mode1
,
3691 plus_constant (XEXP (op0
, 0),
3692 (bitpos
/ BITS_PER_UNIT
)));
3693 MEM_IN_STRUCT_P (op0
) = 1;
3694 MEM_VOLATILE_P (op0
) |= volatilep
;
3695 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
)
3698 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
3699 convert_move (target
, op0
, unsignedp
);
3705 tree base
= build_unary_op (ADDR_EXPR
, TREE_OPERAND (exp
, 0), 0);
3706 tree addr
= build (PLUS_EXPR
, type
, base
, TREE_OPERAND (exp
, 1));
3707 op0
= expand_expr (addr
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
3708 temp
= gen_rtx (MEM
, mode
, memory_address (mode
, op0
));
3709 MEM_IN_STRUCT_P (temp
) = 1;
3710 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) || flag_volatile
;
3711 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3712 a location is accessed through a pointer to const does not mean
3713 that the value there can never change. */
3714 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
);
3719 /* Intended for a reference to a buffer of a file-object in Pascal.
3720 But it's not certain that a special tree code will really be
3721 necessary for these. INDIRECT_REF might work for them. */
3725 case WITH_CLEANUP_EXPR
:
3726 if (RTL_EXPR_RTL (exp
) == 0)
3729 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
3731 = tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2), cleanups_this_call
);
3732 /* That's it for this cleanup. */
3733 TREE_OPERAND (exp
, 2) = 0;
3735 return RTL_EXPR_RTL (exp
);
3738 /* Check for a built-in function. */
3739 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
3740 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == FUNCTION_DECL
3741 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
3742 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
3743 /* If this call was expanded already by preexpand_calls,
3744 just return the result we got. */
3745 if (CALL_EXPR_RTL (exp
) != 0)
3746 return CALL_EXPR_RTL (exp
);
3747 return expand_call (exp
, target
, ignore
);
3749 case NON_LVALUE_EXPR
:
3752 case REFERENCE_EXPR
:
3753 if (TREE_CODE (type
) == VOID_TYPE
|| ignore
)
3755 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
3758 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3759 return expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, modifier
);
3760 if (TREE_CODE (type
) == UNION_TYPE
)
3762 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
3765 if (mode
== BLKmode
)
3767 if (TYPE_SIZE (type
) == 0
3768 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3770 target
= assign_stack_temp (BLKmode
,
3771 (TREE_INT_CST_LOW (TYPE_SIZE (type
))
3772 + BITS_PER_UNIT
- 1)
3773 / BITS_PER_UNIT
, 0);
3776 target
= gen_reg_rtx (mode
);
3778 if (GET_CODE (target
) == MEM
)
3779 /* Store data into beginning of memory target. */
3780 store_expr (TREE_OPERAND (exp
, 0),
3781 change_address (target
, TYPE_MODE (valtype
), 0),
3783 else if (GET_CODE (target
) == REG
)
3784 /* Store this field into a union of the proper type. */
3785 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
3786 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
3788 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
3792 /* Return the entire union. */
3795 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, mode
, modifier
);
3796 if (GET_MODE (op0
) == mode
|| GET_MODE (op0
) == VOIDmode
)
3798 if (modifier
== EXPAND_INITIALIZER
)
3799 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
3800 if (flag_force_mem
&& GET_CODE (op0
) == MEM
)
3801 op0
= copy_to_reg (op0
);
3804 return convert_to_mode (mode
, op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
3806 convert_move (target
, op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
3810 /* We come here from MINUS_EXPR when the second operand is a constant. */
3812 this_optab
= add_optab
;
3814 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3815 something else, make sure we add the register to the constant and
3816 then to the other thing. This case can occur during strength
3817 reduction and doing it this way will produce better code if the
3818 frame pointer or argument pointer is eliminated.
3820 fold-const.c will ensure that the constant is always in the inner
3821 PLUS_EXPR, so the only case we need to do anything about is if
3822 sp, ap, or fp is our second argument, in which case we must swap
3823 the innermost first argument and our second argument. */
3825 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
3826 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
3827 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
3828 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
3829 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
3830 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
3832 tree t
= TREE_OPERAND (exp
, 1);
3834 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
3835 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
3838 /* If the result is to be Pmode and we are adding an integer to
3839 something, we might be forming a constant. So try to use
3840 plus_constant. If it produces a sum and we can't accept it,
3841 use force_operand. This allows P = &ARR[const] to generate
3842 efficient code on machines where a SYMBOL_REF is not a valid
3845 If this is an EXPAND_SUM call, always return the sum. */
3846 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
3847 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3848 && (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
3851 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
3853 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
3854 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
3855 op1
= force_operand (op1
, target
);
3859 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
3860 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
3861 && (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
3864 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
3866 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
3867 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
3868 op0
= force_operand (op0
, target
);
3872 /* No sense saving up arithmetic to be done
3873 if it's all in the wrong mode to form part of an address.
3874 And force_operand won't know whether to sign-extend or
3876 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
3877 || mode
!= Pmode
) goto binop
;
3879 preexpand_calls (exp
);
3880 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
3883 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
3884 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
3886 /* Make sure any term that's a sum with a constant comes last. */
3887 if (GET_CODE (op0
) == PLUS
3888 && CONSTANT_P (XEXP (op0
, 1)))
3894 /* If adding to a sum including a constant,
3895 associate it to put the constant outside. */
3896 if (GET_CODE (op1
) == PLUS
3897 && CONSTANT_P (XEXP (op1
, 1)))
3899 rtx constant_term
= const0_rtx
;
3901 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
3904 /* Ensure that MULT comes first if there is one. */
3905 else if (GET_CODE (op0
) == MULT
)
3906 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
3908 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
3910 /* Let's also eliminate constants from op0 if possible. */
3911 op0
= eliminate_constant_term (op0
, &constant_term
);
3913 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3914 their sum should be a constant. Form it into OP1, since the
3915 result we want will then be OP0 + OP1. */
3917 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
3922 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
3925 /* Put a constant term last and put a multiplication first. */
3926 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
3927 temp
= op1
, op1
= op0
, op0
= temp
;
3929 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
3930 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
3933 /* Handle difference of two symbolic constants,
3934 for the sake of an initializer. */
3935 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
3936 && really_constant_p (TREE_OPERAND (exp
, 0))
3937 && really_constant_p (TREE_OPERAND (exp
, 1)))
3939 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
3940 VOIDmode
, modifier
);
3941 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
3942 VOIDmode
, modifier
);
3943 return gen_rtx (MINUS
, mode
, op0
, op1
);
3945 /* Convert A - const to A + (-const). */
3946 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
3948 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0),
3949 fold (build1 (NEGATE_EXPR
, type
,
3950 TREE_OPERAND (exp
, 1))));
3953 this_optab
= sub_optab
;
3957 preexpand_calls (exp
);
3958 /* If first operand is constant, swap them.
3959 Thus the following special case checks need only
3960 check the second operand. */
3961 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
3963 register tree t1
= TREE_OPERAND (exp
, 0);
3964 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
3965 TREE_OPERAND (exp
, 1) = t1
;
3968 /* Attempt to return something suitable for generating an
3969 indexed address, for machines that support that. */
3971 if (modifier
== EXPAND_SUM
&& mode
== Pmode
3972 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
3973 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3975 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
3977 /* Apply distributive law if OP0 is x+c. */
3978 if (GET_CODE (op0
) == PLUS
3979 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
3980 return gen_rtx (PLUS
, mode
,
3981 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
3982 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
3983 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
3984 * INTVAL (XEXP (op0
, 1))));
3986 if (GET_CODE (op0
) != REG
)
3987 op0
= force_operand (op0
, NULL_RTX
);
3988 if (GET_CODE (op0
) != REG
)
3989 op0
= copy_to_mode_reg (mode
, op0
);
3991 return gen_rtx (MULT
, mode
, op0
,
3992 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
3995 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
3998 /* Check for multiplying things that have been extended
3999 from a narrower type. If this machine supports multiplying
4000 in that narrower type with a result in the desired type,
4001 do it that way, and avoid the explicit type-conversion. */
4002 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
4003 && TREE_CODE (type
) == INTEGER_TYPE
4004 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
4005 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4006 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
4007 && int_fits_type_p (TREE_OPERAND (exp
, 1),
4008 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
4009 /* Don't use a widening multiply if a shift will do. */
4010 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
4011 > HOST_BITS_PER_WIDE_INT
)
4012 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
4014 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
4015 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
4017 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
4018 /* If both operands are extended, they must either both
4019 be zero-extended or both be sign-extended. */
4020 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
4022 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
4024 enum machine_mode innermode
4025 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
4026 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
4027 ? umul_widen_optab
: smul_widen_optab
);
4028 if (mode
== GET_MODE_WIDER_MODE (innermode
)
4029 && this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
4031 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
4032 NULL_RTX
, VOIDmode
, 0);
4033 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
4034 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
4037 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
4038 NULL_RTX
, VOIDmode
, 0);
4042 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
4043 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
4044 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
4046 case TRUNC_DIV_EXPR
:
4047 case FLOOR_DIV_EXPR
:
4049 case ROUND_DIV_EXPR
:
4050 case EXACT_DIV_EXPR
:
4051 preexpand_calls (exp
);
4052 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
4054 /* Possible optimization: compute the dividend with EXPAND_SUM
4055 then if the divisor is constant can optimize the case
4056 where some terms of the dividend have coeffs divisible by it. */
4057 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
4058 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
4059 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
4062 this_optab
= flodiv_optab
;
4065 case TRUNC_MOD_EXPR
:
4066 case FLOOR_MOD_EXPR
:
4068 case ROUND_MOD_EXPR
:
4069 preexpand_calls (exp
);
4070 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
4072 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
4073 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
4074 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
4076 case FIX_ROUND_EXPR
:
4077 case FIX_FLOOR_EXPR
:
4079 abort (); /* Not used for C. */
4081 case FIX_TRUNC_EXPR
:
4082 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
4084 target
= gen_reg_rtx (mode
);
4085 expand_fix (target
, op0
, unsignedp
);
4089 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
4091 target
= gen_reg_rtx (mode
);
4092 /* expand_float can't figure out what to do if FROM has VOIDmode.
4093 So give it the correct mode. With -O, cse will optimize this. */
4094 if (GET_MODE (op0
) == VOIDmode
)
4095 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
4097 expand_float (target
, op0
,
4098 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4102 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
4103 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
4109 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
4111 /* Unsigned abs is simply the operand. Testing here means we don't
4112 risk generating incorrect code below. */
4113 if (TREE_UNSIGNED (type
))
4116 /* First try to do it with a special abs instruction. */
4117 temp
= expand_unop (mode
, abs_optab
, op0
, target
, 0);
4121 /* If this machine has expensive jumps, we can do integer absolute
4122 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4123 where W is the width of MODE. */
4125 if (GET_MODE_CLASS (mode
) == MODE_INT
&& BRANCH_COST
>= 2)
4127 rtx extended
= expand_shift (RSHIFT_EXPR
, mode
, op0
,
4128 size_int (GET_MODE_BITSIZE (mode
) - 1),
4131 temp
= expand_binop (mode
, xor_optab
, extended
, op0
, target
, 0,
4134 temp
= expand_binop (mode
, sub_optab
, temp
, extended
, target
, 0,
4141 /* If that does not win, use conditional jump and negate. */
4142 target
= original_target
;
4143 temp
= gen_label_rtx ();
4144 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 0))
4145 || (GET_CODE (target
) == REG
4146 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4147 target
= gen_reg_rtx (mode
);
4148 emit_move_insn (target
, op0
);
4149 emit_cmp_insn (target
,
4150 expand_expr (convert (type
, integer_zero_node
),
4151 NULL_RTX
, VOIDmode
, 0),
4152 GE
, NULL_RTX
, mode
, 0, 0);
4154 emit_jump_insn (gen_bge (temp
));
4155 op0
= expand_unop (mode
, neg_optab
, target
, target
, 0);
4157 emit_move_insn (target
, op0
);
4164 target
= original_target
;
4165 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
4166 || (GET_CODE (target
) == REG
4167 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4168 target
= gen_reg_rtx (mode
);
4169 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
4170 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
4172 /* First try to do it with a special MIN or MAX instruction.
4173 If that does not win, use a conditional jump to select the proper
4175 this_optab
= (TREE_UNSIGNED (type
)
4176 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
4177 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
4179 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
4185 emit_move_insn (target
, op0
);
4186 op0
= gen_label_rtx ();
4187 if (code
== MAX_EXPR
)
4188 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
4189 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
4190 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
4192 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
4193 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
4194 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
4195 if (temp
== const0_rtx
)
4196 emit_move_insn (target
, op1
);
4197 else if (temp
!= const_true_rtx
)
4199 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
4200 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
4203 emit_move_insn (target
, op1
);
4208 /* ??? Can optimize when the operand of this is a bitwise operation,
4209 by using a different bitwise operation. */
4211 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
4212 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
4218 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
4219 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
4224 /* ??? Can optimize bitwise operations with one arg constant.
4225 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4226 and (a bitwise1 b) bitwise2 b (etc)
4227 but that is probably not worth while. */
4229 /* BIT_AND_EXPR is for bitwise anding.
4230 TRUTH_AND_EXPR is for anding two boolean values
4231 when we want in all cases to compute both of them.
4232 In general it is fastest to do TRUTH_AND_EXPR by
4233 computing both operands as actual zero-or-1 values
4234 and then bitwise anding. In cases where there cannot
4235 be any side effects, better code would be made by
4236 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4237 but the question is how to recognize those cases. */
4239 case TRUTH_AND_EXPR
:
4241 this_optab
= and_optab
;
4244 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4247 this_optab
= ior_optab
;
4251 this_optab
= xor_optab
;
4258 preexpand_calls (exp
);
4259 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
4261 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
4262 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
4265 /* Could determine the answer when only additive constants differ.
4266 Also, the addition of one can be handled by changing the condition. */
4273 preexpand_calls (exp
);
4274 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
4277 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4278 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
4280 && GET_CODE (original_target
) == REG
4281 && (GET_MODE (original_target
)
4282 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
4284 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, VOIDmode
, 0);
4285 if (temp
!= original_target
)
4286 temp
= copy_to_reg (temp
);
4287 op1
= gen_label_rtx ();
4288 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
4289 GET_MODE (temp
), unsignedp
, 0);
4290 emit_jump_insn (gen_beq (op1
));
4291 emit_move_insn (temp
, const1_rtx
);
4295 /* If no set-flag instruction, must generate a conditional
4296 store into a temporary variable. Drop through
4297 and handle this like && and ||. */
4299 case TRUTH_ANDIF_EXPR
:
4300 case TRUTH_ORIF_EXPR
:
4301 if (target
== 0 || ! safe_from_p (target
, exp
)
4302 /* Make sure we don't have a hard reg (such as function's return
4303 value) live across basic blocks, if not optimizing. */
4304 || (!optimize
&& GET_CODE (target
) == REG
4305 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4306 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
4307 emit_clr_insn (target
);
4308 op1
= gen_label_rtx ();
4309 jumpifnot (exp
, op1
);
4310 emit_0_to_1_insn (target
);
4314 case TRUTH_NOT_EXPR
:
4315 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
4316 /* The parser is careful to generate TRUTH_NOT_EXPR
4317 only with operands that are always zero or one. */
4318 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
4319 target
, 1, OPTAB_LIB_WIDEN
);
4325 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4327 return expand_expr (TREE_OPERAND (exp
, 1),
4328 (ignore
? const0_rtx
: target
),
4333 /* Note that COND_EXPRs whose type is a structure or union
4334 are required to be constructed to contain assignments of
4335 a temporary variable, so that we can evaluate them here
4336 for side effect only. If type is void, we must do likewise. */
4338 /* If an arm of the branch requires a cleanup,
4339 only that cleanup is performed. */
4342 tree binary_op
= 0, unary_op
= 0;
4343 tree old_cleanups
= cleanups_this_call
;
4344 cleanups_this_call
= 0;
4346 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4347 convert it to our mode, if necessary. */
4348 if (integer_onep (TREE_OPERAND (exp
, 1))
4349 && integer_zerop (TREE_OPERAND (exp
, 2))
4350 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
4352 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
4353 if (GET_MODE (op0
) == mode
)
4356 target
= gen_reg_rtx (mode
);
4357 convert_move (target
, op0
, unsignedp
);
4361 /* If we are not to produce a result, we have no target. Otherwise,
4362 if a target was specified use it; it will not be used as an
4363 intermediate target unless it is safe. If no target, use a
4366 if (mode
== VOIDmode
|| ignore
)
4368 else if (original_target
4369 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0)))
4370 temp
= original_target
;
4371 else if (mode
== BLKmode
)
4373 if (TYPE_SIZE (type
) == 0
4374 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4376 temp
= assign_stack_temp (BLKmode
,
4377 (TREE_INT_CST_LOW (TYPE_SIZE (type
))
4378 + BITS_PER_UNIT
- 1)
4379 / BITS_PER_UNIT
, 0);
4382 temp
= gen_reg_rtx (mode
);
4384 /* Check for X ? A + B : A. If we have this, we can copy
4385 A to the output and conditionally add B. Similarly for unary
4386 operations. Don't do this if X has side-effects because
4387 those side effects might affect A or B and the "?" operation is
4388 a sequence point in ANSI. (We test for side effects later.) */
4390 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
4391 && operand_equal_p (TREE_OPERAND (exp
, 2),
4392 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
4393 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
4394 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
4395 && operand_equal_p (TREE_OPERAND (exp
, 1),
4396 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
4397 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
4398 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
4399 && operand_equal_p (TREE_OPERAND (exp
, 2),
4400 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
4401 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
4402 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
4403 && operand_equal_p (TREE_OPERAND (exp
, 1),
4404 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
4405 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
4407 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4408 operation, do this as A + (X != 0). Similarly for other simple
4409 binary operators. */
4410 if (singleton
&& binary_op
4411 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
4412 && (TREE_CODE (binary_op
) == PLUS_EXPR
4413 || TREE_CODE (binary_op
) == MINUS_EXPR
4414 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
4415 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
4416 || TREE_CODE (binary_op
) == BIT_AND_EXPR
)
4417 && integer_onep (TREE_OPERAND (binary_op
, 1))
4418 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
4421 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
4422 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
4423 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
4424 : TREE_CODE (binary_op
) == BIT_XOR_EXPR
? xor_optab
4427 /* If we had X ? A : A + 1, do this as A + (X == 0).
4429 We have to invert the truth value here and then put it
4430 back later if do_store_flag fails. We cannot simply copy
4431 TREE_OPERAND (exp, 0) to another variable and modify that
4432 because invert_truthvalue can modify the tree pointed to
4434 if (singleton
== TREE_OPERAND (exp
, 1))
4435 TREE_OPERAND (exp
, 0)
4436 = invert_truthvalue (TREE_OPERAND (exp
, 0));
4438 result
= do_store_flag (TREE_OPERAND (exp
, 0),
4439 (safe_from_p (temp
, singleton
)
4441 mode
, BRANCH_COST
<= 1);
4445 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
4446 return expand_binop (mode
, boptab
, op1
, result
, temp
,
4447 unsignedp
, OPTAB_LIB_WIDEN
);
4449 else if (singleton
== TREE_OPERAND (exp
, 1))
4450 TREE_OPERAND (exp
, 0)
4451 = invert_truthvalue (TREE_OPERAND (exp
, 0));
4455 op0
= gen_label_rtx ();
4457 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
4461 /* If the target conflicts with the other operand of the
4462 binary op, we can't use it. Also, we can't use the target
4463 if it is a hard register, because evaluating the condition
4464 might clobber it. */
4466 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
4467 || (GET_CODE (temp
) == REG
4468 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
4469 temp
= gen_reg_rtx (mode
);
4470 store_expr (singleton
, temp
, 0);
4473 expand_expr (singleton
,
4474 ignore
? const1_rtx
: NULL_RTX
, VOIDmode
, 0);
4475 if (cleanups_this_call
)
4477 sorry ("aggregate value in COND_EXPR");
4478 cleanups_this_call
= 0;
4480 if (singleton
== TREE_OPERAND (exp
, 1))
4481 jumpif (TREE_OPERAND (exp
, 0), op0
);
4483 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
4485 if (binary_op
&& temp
== 0)
4486 /* Just touch the other operand. */
4487 expand_expr (TREE_OPERAND (binary_op
, 1),
4488 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
4490 store_expr (build (TREE_CODE (binary_op
), type
,
4491 make_tree (type
, temp
),
4492 TREE_OPERAND (binary_op
, 1)),
4495 store_expr (build1 (TREE_CODE (unary_op
), type
,
4496 make_tree (type
, temp
)),
4501 /* This is now done in jump.c and is better done there because it
4502 produces shorter register lifetimes. */
4504 /* Check for both possibilities either constants or variables
4505 in registers (but not the same as the target!). If so, can
4506 save branches by assigning one, branching, and assigning the
4508 else if (temp
&& GET_MODE (temp
) != BLKmode
4509 && (TREE_CONSTANT (TREE_OPERAND (exp
, 1))
4510 || ((TREE_CODE (TREE_OPERAND (exp
, 1)) == PARM_DECL
4511 || TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
)
4512 && DECL_RTL (TREE_OPERAND (exp
, 1))
4513 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 1))) == REG
4514 && DECL_RTL (TREE_OPERAND (exp
, 1)) != temp
))
4515 && (TREE_CONSTANT (TREE_OPERAND (exp
, 2))
4516 || ((TREE_CODE (TREE_OPERAND (exp
, 2)) == PARM_DECL
4517 || TREE_CODE (TREE_OPERAND (exp
, 2)) == VAR_DECL
)
4518 && DECL_RTL (TREE_OPERAND (exp
, 2))
4519 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 2))) == REG
4520 && DECL_RTL (TREE_OPERAND (exp
, 2)) != temp
)))
4522 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
4523 temp
= gen_reg_rtx (mode
);
4524 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
4525 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
4526 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
4530 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4531 comparison operator. If we have one of these cases, set the
4532 output to A, branch on A (cse will merge these two references),
4533 then set the output to FOO. */
4535 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
4536 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
4537 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
4538 TREE_OPERAND (exp
, 1), 0)
4539 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
4540 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
4542 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
4543 temp
= gen_reg_rtx (mode
);
4544 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
4545 jumpif (TREE_OPERAND (exp
, 0), op0
);
4546 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
4550 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
4551 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
4552 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
4553 TREE_OPERAND (exp
, 2), 0)
4554 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
4555 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
4557 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
4558 temp
= gen_reg_rtx (mode
);
4559 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
4560 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
4561 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
4566 op1
= gen_label_rtx ();
4567 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
4569 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
4571 expand_expr (TREE_OPERAND (exp
, 1),
4572 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
4573 if (cleanups_this_call
)
4575 sorry ("aggregate value in COND_EXPR");
4576 cleanups_this_call
= 0;
4580 emit_jump_insn (gen_jump (op1
));
4584 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
4586 expand_expr (TREE_OPERAND (exp
, 2),
4587 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
4590 if (cleanups_this_call
)
4592 sorry ("aggregate value in COND_EXPR");
4593 cleanups_this_call
= 0;
4599 cleanups_this_call
= old_cleanups
;
4605 /* Something needs to be initialized, but we didn't know
4606 where that thing was when building the tree. For example,
4607 it could be the return value of a function, or a parameter
4608 to a function which lays down in the stack, or a temporary
4609 variable which must be passed by reference.
4611 We guarantee that the expression will either be constructed
4612 or copied into our original target. */
4614 tree slot
= TREE_OPERAND (exp
, 0);
4616 if (TREE_CODE (slot
) != VAR_DECL
)
4621 if (DECL_RTL (slot
) != 0)
4622 target
= DECL_RTL (slot
);
4625 target
= assign_stack_temp (mode
, int_size_in_bytes (type
), 0);
4626 /* All temp slots at this level must not conflict. */
4627 preserve_temp_slots (target
);
4628 DECL_RTL (slot
) = target
;
4632 /* Since SLOT is not known to the called function
4633 to belong to its stack frame, we must build an explicit
4634 cleanup. This case occurs when we must build up a reference
4635 to pass the reference as an argument. In this case,
4636 it is very likely that such a reference need not be
4639 if (TREE_OPERAND (exp
, 2) == 0)
4640 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
4641 if (TREE_OPERAND (exp
, 2))
4642 cleanups_this_call
= tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2),
4643 cleanups_this_call
);
4648 /* This case does occur, when expanding a parameter which
4649 needs to be constructed on the stack. The target
4650 is the actual stack address that we want to initialize.
4651 The function we call will perform the cleanup in this case. */
4653 DECL_RTL (slot
) = target
;
4656 return expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
4661 tree lhs
= TREE_OPERAND (exp
, 0);
4662 tree rhs
= TREE_OPERAND (exp
, 1);
4663 tree noncopied_parts
= 0;
4664 tree lhs_type
= TREE_TYPE (lhs
);
4666 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
4667 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
4668 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
4669 TYPE_NONCOPIED_PARTS (lhs_type
));
4670 while (noncopied_parts
!= 0)
4672 expand_assignment (TREE_VALUE (noncopied_parts
),
4673 TREE_PURPOSE (noncopied_parts
), 0, 0);
4674 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
4681 /* If lhs is complex, expand calls in rhs before computing it.
4682 That's so we don't compute a pointer and save it over a call.
4683 If lhs is simple, compute it first so we can give it as a
4684 target if the rhs is just a call. This avoids an extra temp and copy
4685 and that prevents a partial-subsumption which makes bad code.
4686 Actually we could treat component_ref's of vars like vars. */
4688 tree lhs
= TREE_OPERAND (exp
, 0);
4689 tree rhs
= TREE_OPERAND (exp
, 1);
4690 tree noncopied_parts
= 0;
4691 tree lhs_type
= TREE_TYPE (lhs
);
4695 if (TREE_CODE (lhs
) != VAR_DECL
4696 && TREE_CODE (lhs
) != RESULT_DECL
4697 && TREE_CODE (lhs
) != PARM_DECL
)
4698 preexpand_calls (exp
);
4700 /* Check for |= or &= of a bitfield of size one into another bitfield
4701 of size 1. In this case, (unless we need the result of the
4702 assignment) we can do this more efficiently with a
4703 test followed by an assignment, if necessary.
4705 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4706 things change so we do, this code should be enhanced to
4709 && TREE_CODE (lhs
) == COMPONENT_REF
4710 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
4711 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
4712 && TREE_OPERAND (rhs
, 0) == lhs
4713 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
4714 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
4715 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
4717 rtx label
= gen_label_rtx ();
4719 do_jump (TREE_OPERAND (rhs
, 1),
4720 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
4721 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
4722 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
4723 (TREE_CODE (rhs
) == BIT_IOR_EXPR
4725 : integer_zero_node
)),
4727 do_pending_stack_adjust ();
4732 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
4733 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
4734 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
4735 TYPE_NONCOPIED_PARTS (lhs_type
));
4737 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
4738 while (noncopied_parts
!= 0)
4740 expand_assignment (TREE_PURPOSE (noncopied_parts
),
4741 TREE_VALUE (noncopied_parts
), 0, 0);
4742 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
4747 case PREINCREMENT_EXPR
:
4748 case PREDECREMENT_EXPR
:
4749 return expand_increment (exp
, 0);
4751 case POSTINCREMENT_EXPR
:
4752 case POSTDECREMENT_EXPR
:
4753 /* Faster to treat as pre-increment if result is not used. */
4754 return expand_increment (exp
, ! ignore
);
4757 /* Are we taking the address of a nested function? */
4758 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
4759 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0)
4761 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
4762 op0
= force_operand (op0
, target
);
4766 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
4767 (modifier
== EXPAND_INITIALIZER
4768 ? modifier
: EXPAND_CONST_ADDRESS
));
4769 if (GET_CODE (op0
) != MEM
)
4772 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
4773 return XEXP (op0
, 0);
4774 op0
= force_operand (XEXP (op0
, 0), target
);
4776 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
4777 return force_reg (Pmode
, op0
);
4780 case ENTRY_VALUE_EXPR
:
4787 return (*lang_expand_expr
) (exp
, target
, tmode
, modifier
);
4790 /* Here to do an ordinary binary operator, generating an instruction
4791 from the optab already placed in `this_optab'. */
4793 preexpand_calls (exp
);
4794 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
4796 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
4797 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
4799 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
4800 unsignedp
, OPTAB_LIB_WIDEN
);
4806 /* Return the alignment in bits of EXP, a pointer valued expression.
4807 But don't return more than MAX_ALIGN no matter what.
4808 The alignment returned is, by default, the alignment of the thing that
4809 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4811 Otherwise, look at the expression to see if we can do better, i.e., if the
4812 expression is actually pointing at an object whose alignment is tighter. */
4815 get_pointer_alignment (exp
, max_align
)
4819 unsigned align
, inner
;
4821 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
4824 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
4825 align
= MIN (align
, max_align
);
4829 switch (TREE_CODE (exp
))
4833 case NON_LVALUE_EXPR
:
4834 exp
= TREE_OPERAND (exp
, 0);
4835 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
4837 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
4838 inner
= MIN (inner
, max_align
);
4839 align
= MAX (align
, inner
);
4843 /* If sum of pointer + int, restrict our maximum alignment to that
4844 imposed by the integer. If not, we can't do any better than
4846 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
4849 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
4854 exp
= TREE_OPERAND (exp
, 0);
4858 /* See what we are pointing at and look at its alignment. */
4859 exp
= TREE_OPERAND (exp
, 0);
4860 if (TREE_CODE (exp
) == FUNCTION_DECL
)
4861 align
= MAX (align
, FUNCTION_BOUNDARY
);
4862 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4863 align
= MAX (align
, DECL_ALIGN (exp
));
4864 #ifdef CONSTANT_ALIGNMENT
4865 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
4866 align
= CONSTANT_ALIGNMENT (exp
, align
);
4868 return MIN (align
, max_align
);
4876 /* Return the tree node and offset if a given argument corresponds to
4877 a string constant. */
4880 string_constant (arg
, ptr_offset
)
4886 if (TREE_CODE (arg
) == ADDR_EXPR
4887 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
4889 *ptr_offset
= integer_zero_node
;
4890 return TREE_OPERAND (arg
, 0);
4892 else if (TREE_CODE (arg
) == PLUS_EXPR
)
4894 tree arg0
= TREE_OPERAND (arg
, 0);
4895 tree arg1
= TREE_OPERAND (arg
, 1);
4900 if (TREE_CODE (arg0
) == ADDR_EXPR
4901 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
4904 return TREE_OPERAND (arg0
, 0);
4906 else if (TREE_CODE (arg1
) == ADDR_EXPR
4907 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
4910 return TREE_OPERAND (arg1
, 0);
4917 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4918 way, because it could contain a zero byte in the middle.
4919 TREE_STRING_LENGTH is the size of the character array, not the string.
4921 Unfortunately, string_constant can't access the values of const char
4922 arrays with initializers, so neither can we do so here. */
4932 src
= string_constant (src
, &offset_node
);
4935 max
= TREE_STRING_LENGTH (src
);
4936 ptr
= TREE_STRING_POINTER (src
);
4937 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
4939 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4940 compute the offset to the following null if we don't know where to
4941 start searching for it. */
4943 for (i
= 0; i
< max
; i
++)
4946 /* We don't know the starting offset, but we do know that the string
4947 has no internal zero bytes. We can assume that the offset falls
4948 within the bounds of the string; otherwise, the programmer deserves
4949 what he gets. Subtract the offset from the length of the string,
4951 /* This would perhaps not be valid if we were dealing with named
4952 arrays in addition to literal string constants. */
4953 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
4956 /* We have a known offset into the string. Start searching there for
4957 a null character. */
4958 if (offset_node
== 0)
4962 /* Did we get a long long offset? If so, punt. */
4963 if (TREE_INT_CST_HIGH (offset_node
) != 0)
4965 offset
= TREE_INT_CST_LOW (offset_node
);
4967 /* If the offset is known to be out of bounds, warn, and call strlen at
4969 if (offset
< 0 || offset
> max
)
4971 warning ("offset outside bounds of constant string");
4974 /* Use strlen to search for the first zero byte. Since any strings
4975 constructed with build_string will have nulls appended, we win even
4976 if we get handed something like (char[4])"abcd".
4978 Since OFFSET is our starting index into the string, no further
4979 calculation is needed. */
4980 return size_int (strlen (ptr
+ offset
));
4983 /* Expand an expression EXP that calls a built-in function,
4984 with result going to TARGET if that's convenient
4985 (and in mode MODE if that's convenient).
4986 SUBTARGET may be used as the target for computing one of EXP's operands.
4987 IGNORE is nonzero if the value is to be ignored. */
4990 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
4994 enum machine_mode mode
;
4997 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4998 tree arglist
= TREE_OPERAND (exp
, 1);
5001 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
5003 switch (DECL_FUNCTION_CODE (fndecl
))
5008 /* build_function_call changes these into ABS_EXPR. */
5011 case BUILT_IN_FSQRT
:
5012 /* If not optimizing, call the library function. */
5017 /* Arg could be wrong type if user redeclared this fcn wrong. */
5018 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
5019 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp
)));
5021 /* Stabilize and compute the argument. */
5022 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
5023 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
5025 exp
= copy_node (exp
);
5026 arglist
= copy_node (arglist
);
5027 TREE_OPERAND (exp
, 1) = arglist
;
5028 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
5030 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
5032 /* Make a suitable register to place result in. */
5033 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5038 /* Compute sqrt into TARGET.
5039 Set TARGET to wherever the result comes back. */
5040 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
5041 sqrt_optab
, op0
, target
, 0);
5043 /* If we were unable to expand via the builtin, stop the
5044 sequence (without outputting the insns) and break, causing
5045 a call the the library function. */
5052 /* Check the results by default. But if flag_fast_math is turned on,
5053 then assume sqrt will always be called with valid arguments. */
5055 if (! flag_fast_math
)
5057 /* Don't define the sqrt instructions
5058 if your machine is not IEEE. */
5059 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
5062 lab1
= gen_label_rtx ();
5064 /* Test the result; if it is NaN, set errno=EDOM because
5065 the argument was not in the domain. */
5066 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
5067 emit_jump_insn (gen_beq (lab1
));
5071 #ifdef GEN_ERRNO_RTX
5072 rtx errno_rtx
= GEN_ERRNO_RTX
;
5075 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "*errno"));
5078 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
5081 /* We can't set errno=EDOM directly; let the library call do it.
5082 Pop the arguments right away in case the call gets deleted. */
5084 expand_call (exp
, target
, 0);
5091 /* Output the entire sequence. */
5092 insns
= get_insns ();
5098 case BUILT_IN_SAVEREGS
:
5099 /* Don't do __builtin_saveregs more than once in a function.
5100 Save the result of the first call and reuse it. */
5101 if (saveregs_value
!= 0)
5102 return saveregs_value
;
5104 /* When this function is called, it means that registers must be
5105 saved on entry to this function. So we migrate the
5106 call to the first insn of this function. */
5109 rtx valreg
, saved_valreg
;
5111 /* Now really call the function. `expand_call' does not call
5112 expand_builtin, so there is no danger of infinite recursion here. */
5115 #ifdef EXPAND_BUILTIN_SAVEREGS
5116 /* Do whatever the machine needs done in this case. */
5117 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
5119 /* The register where the function returns its value
5120 is likely to have something else in it, such as an argument.
5121 So preserve that register around the call. */
5122 if (value_mode
!= VOIDmode
)
5124 valreg
= hard_libcall_value (value_mode
);
5125 saved_valreg
= gen_reg_rtx (value_mode
);
5126 emit_move_insn (saved_valreg
, valreg
);
5129 /* Generate the call, putting the value in a pseudo. */
5130 temp
= expand_call (exp
, target
, ignore
);
5132 if (value_mode
!= VOIDmode
)
5133 emit_move_insn (valreg
, saved_valreg
);
5139 saveregs_value
= temp
;
5141 /* This won't work inside a SEQUENCE--it really has to be
5142 at the start of the function. */
5143 if (in_sequence_p ())
5145 /* Better to do this than to crash. */
5146 error ("`va_start' used within `({...})'");
5150 /* Put the sequence after the NOTE that starts the function. */
5151 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
5155 /* __builtin_args_info (N) returns word N of the arg space info
5156 for the current function. The number and meanings of words
5157 is controlled by the definition of CUMULATIVE_ARGS. */
5158 case BUILT_IN_ARGS_INFO
:
5160 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
5162 int *word_ptr
= (int *) ¤t_function_args_info
;
5163 tree type
, elts
, result
;
5165 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
5166 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5167 __FILE__
, __LINE__
);
5171 tree arg
= TREE_VALUE (arglist
);
5172 if (TREE_CODE (arg
) != INTEGER_CST
)
5173 error ("argument of __builtin_args_info must be constant");
5176 int wordnum
= TREE_INT_CST_LOW (arg
);
5178 if (wordnum
< 0 || wordnum
>= nwords
)
5179 error ("argument of __builtin_args_info out of range");
5181 return GEN_INT (word_ptr
[wordnum
]);
5185 error ("missing argument in __builtin_args_info");
5190 for (i
= 0; i
< nwords
; i
++)
5191 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
5193 type
= build_array_type (integer_type_node
,
5194 build_index_type (build_int_2 (nwords
, 0)));
5195 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
5196 TREE_CONSTANT (result
) = 1;
5197 TREE_STATIC (result
) = 1;
5198 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
5199 TREE_CONSTANT (result
) = 1;
5200 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
5204 /* Return the address of the first anonymous stack arg. */
5205 case BUILT_IN_NEXT_ARG
:
5207 tree fntype
= TREE_TYPE (current_function_decl
);
5208 if (!(TYPE_ARG_TYPES (fntype
) != 0
5209 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
5210 != void_type_node
)))
5212 error ("`va_start' used in function with fixed args");
5217 return expand_binop (Pmode
, add_optab
,
5218 current_function_internal_arg_pointer
,
5219 current_function_arg_offset_rtx
,
5220 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
5222 case BUILT_IN_CLASSIFY_TYPE
:
5225 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
5226 enum tree_code code
= TREE_CODE (type
);
5227 if (code
== VOID_TYPE
)
5228 return GEN_INT (void_type_class
);
5229 if (code
== INTEGER_TYPE
)
5230 return GEN_INT (integer_type_class
);
5231 if (code
== CHAR_TYPE
)
5232 return GEN_INT (char_type_class
);
5233 if (code
== ENUMERAL_TYPE
)
5234 return GEN_INT (enumeral_type_class
);
5235 if (code
== BOOLEAN_TYPE
)
5236 return GEN_INT (boolean_type_class
);
5237 if (code
== POINTER_TYPE
)
5238 return GEN_INT (pointer_type_class
);
5239 if (code
== REFERENCE_TYPE
)
5240 return GEN_INT (reference_type_class
);
5241 if (code
== OFFSET_TYPE
)
5242 return GEN_INT (offset_type_class
);
5243 if (code
== REAL_TYPE
)
5244 return GEN_INT (real_type_class
);
5245 if (code
== COMPLEX_TYPE
)
5246 return GEN_INT (complex_type_class
);
5247 if (code
== FUNCTION_TYPE
)
5248 return GEN_INT (function_type_class
);
5249 if (code
== METHOD_TYPE
)
5250 return GEN_INT (method_type_class
);
5251 if (code
== RECORD_TYPE
)
5252 return GEN_INT (record_type_class
);
5253 if (code
== UNION_TYPE
)
5254 return GEN_INT (union_type_class
);
5255 if (code
== ARRAY_TYPE
)
5256 return GEN_INT (array_type_class
);
5257 if (code
== STRING_TYPE
)
5258 return GEN_INT (string_type_class
);
5259 if (code
== SET_TYPE
)
5260 return GEN_INT (set_type_class
);
5261 if (code
== FILE_TYPE
)
5262 return GEN_INT (file_type_class
);
5263 if (code
== LANG_TYPE
)
5264 return GEN_INT (lang_type_class
);
5266 return GEN_INT (no_type_class
);
5268 case BUILT_IN_CONSTANT_P
:
5272 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist
))) == 'c'
5273 ? const1_rtx
: const0_rtx
);
5275 case BUILT_IN_FRAME_ADDRESS
:
5276 /* The argument must be a nonnegative integer constant.
5277 It counts the number of frames to scan up the stack.
5278 The value is the address of that frame. */
5279 case BUILT_IN_RETURN_ADDRESS
:
5280 /* The argument must be a nonnegative integer constant.
5281 It counts the number of frames to scan up the stack.
5282 The value is the return address saved in that frame. */
5284 /* Warning about missing arg was already issued. */
5286 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
)
5288 error ("invalid arg to __builtin_return_address");
5291 else if (tree_int_cst_lt (TREE_VALUE (arglist
), integer_zero_node
))
5293 error ("invalid arg to __builtin_return_address");
5298 int count
= TREE_INT_CST_LOW (TREE_VALUE (arglist
));
5299 rtx tem
= frame_pointer_rtx
;
5302 /* Scan back COUNT frames to the specified frame. */
5303 for (i
= 0; i
< count
; i
++)
5305 /* Assume the dynamic chain pointer is in the word that
5306 the frame address points to, unless otherwise specified. */
5307 #ifdef DYNAMIC_CHAIN_ADDRESS
5308 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
5310 tem
= memory_address (Pmode
, tem
);
5311 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
5314 /* For __builtin_frame_address, return what we've got. */
5315 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5318 /* For __builtin_return_address,
5319 Get the return address from that frame. */
5320 #ifdef RETURN_ADDR_RTX
5321 return RETURN_ADDR_RTX (count
, tem
);
5323 tem
= memory_address (Pmode
,
5324 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
5325 return copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
5329 case BUILT_IN_ALLOCA
:
5331 /* Arg could be non-integer if user redeclared this fcn wrong. */
5332 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
5334 current_function_calls_alloca
= 1;
5335 /* Compute the argument. */
5336 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
5338 /* Allocate the desired space. */
5339 target
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
5341 /* Record the new stack level for nonlocal gotos. */
5342 if (nonlocal_goto_handler_slot
!= 0)
5343 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
5347 /* If not optimizing, call the library function. */
5352 /* Arg could be non-integer if user redeclared this fcn wrong. */
5353 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
5356 /* Compute the argument. */
5357 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
5358 /* Compute ffs, into TARGET if possible.
5359 Set TARGET to wherever the result comes back. */
5360 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
5361 ffs_optab
, op0
, target
, 1);
5366 case BUILT_IN_STRLEN
:
5367 /* If not optimizing, call the library function. */
5372 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5373 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
5377 tree src
= TREE_VALUE (arglist
);
5378 tree len
= c_strlen (src
);
5381 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
5383 rtx result
, src_rtx
, char_rtx
;
5384 enum machine_mode insn_mode
= value_mode
, char_mode
;
5385 enum insn_code icode
;
5387 /* If the length is known, just return it. */
5389 return expand_expr (len
, target
, mode
, 0);
5391 /* If SRC is not a pointer type, don't do this operation inline. */
5395 /* Call a function if we can't compute strlen in the right mode. */
5397 while (insn_mode
!= VOIDmode
)
5399 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
5400 if (icode
!= CODE_FOR_nothing
)
5403 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
5405 if (insn_mode
== VOIDmode
)
5408 /* Make a place to write the result of the instruction. */
5411 && GET_CODE (result
) == REG
5412 && GET_MODE (result
) == insn_mode
5413 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
5414 result
= gen_reg_rtx (insn_mode
);
5416 /* Make sure the operands are acceptable to the predicates. */
5418 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
5419 result
= gen_reg_rtx (insn_mode
);
5421 src_rtx
= memory_address (BLKmode
,
5422 expand_expr (src
, NULL_RTX
, Pmode
,
5424 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
5425 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
5427 char_rtx
= const0_rtx
;
5428 char_mode
= insn_operand_mode
[(int)icode
][2];
5429 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
5430 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
5432 emit_insn (GEN_FCN (icode
) (result
,
5433 gen_rtx (MEM
, BLKmode
, src_rtx
),
5434 char_rtx
, GEN_INT (align
)));
5436 /* Return the value in the proper mode for this function. */
5437 if (GET_MODE (result
) == value_mode
)
5439 else if (target
!= 0)
5441 convert_move (target
, result
, 0);
5445 return convert_to_mode (value_mode
, result
, 0);
5448 case BUILT_IN_STRCPY
:
5449 /* If not optimizing, call the library function. */
5454 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5455 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
5456 || TREE_CHAIN (arglist
) == 0
5457 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
5461 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
5466 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
5468 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
5472 case BUILT_IN_MEMCPY
:
5473 /* If not optimizing, call the library function. */
5478 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5479 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
5480 || TREE_CHAIN (arglist
) == 0
5481 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
5482 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
5483 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
5487 tree dest
= TREE_VALUE (arglist
);
5488 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
5489 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5492 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
5494 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
5497 /* If either SRC or DEST is not a pointer type, don't do
5498 this operation in-line. */
5499 if (src_align
== 0 || dest_align
== 0)
5501 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
5502 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
5506 dest_rtx
= expand_expr (dest
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5508 /* Copy word part most expediently. */
5509 emit_block_move (gen_rtx (MEM
, BLKmode
,
5510 memory_address (BLKmode
, dest_rtx
)),
5511 gen_rtx (MEM
, BLKmode
,
5512 memory_address (BLKmode
,
5513 expand_expr (src
, NULL_RTX
,
5516 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
5517 MIN (src_align
, dest_align
));
5521 /* These comparison functions need an instruction that returns an actual
5522 index. An ordinary compare that just sets the condition codes
5524 #ifdef HAVE_cmpstrsi
5525 case BUILT_IN_STRCMP
:
5526 /* If not optimizing, call the library function. */
5531 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5532 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
5533 || TREE_CHAIN (arglist
) == 0
5534 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
5536 else if (!HAVE_cmpstrsi
)
5539 tree arg1
= TREE_VALUE (arglist
);
5540 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
5544 len
= c_strlen (arg1
);
5546 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
5547 len2
= c_strlen (arg2
);
5549 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
5551 /* If we don't have a constant length for the first, use the length
5552 of the second, if we know it. We don't require a constant for
5553 this case; some cost analysis could be done if both are available
5554 but neither is constant. For now, assume they're equally cheap.
5556 If both strings have constant lengths, use the smaller. This
5557 could arise if optimization results in strcpy being called with
5558 two fixed strings, or if the code was machine-generated. We should
5559 add some code to the `memcmp' handler below to deal with such
5560 situations, someday. */
5561 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
5568 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
5570 if (tree_int_cst_lt (len2
, len
))
5574 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
5578 case BUILT_IN_MEMCMP
:
5579 /* If not optimizing, call the library function. */
5584 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5585 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
5586 || TREE_CHAIN (arglist
) == 0
5587 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
5588 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
5589 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
5591 else if (!HAVE_cmpstrsi
)
5594 tree arg1
= TREE_VALUE (arglist
);
5595 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
5596 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5600 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
5602 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
5603 enum machine_mode insn_mode
5604 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
5606 /* If we don't have POINTER_TYPE, call the function. */
5607 if (arg1_align
== 0 || arg2_align
== 0)
5609 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
5610 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
5614 /* Make a place to write the result of the instruction. */
5617 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
5618 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
5619 result
= gen_reg_rtx (insn_mode
);
5621 emit_insn (gen_cmpstrsi (result
,
5622 gen_rtx (MEM
, BLKmode
,
5623 expand_expr (arg1
, NULL_RTX
, Pmode
,
5625 gen_rtx (MEM
, BLKmode
,
5626 expand_expr (arg2
, NULL_RTX
, Pmode
,
5628 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
5629 GEN_INT (MIN (arg1_align
, arg2_align
))));
5631 /* Return the value in the proper mode for this function. */
5632 mode
= TYPE_MODE (TREE_TYPE (exp
));
5633 if (GET_MODE (result
) == mode
)
5635 else if (target
!= 0)
5637 convert_move (target
, result
, 0);
5641 return convert_to_mode (mode
, result
, 0);
5644 case BUILT_IN_STRCMP
:
5645 case BUILT_IN_MEMCMP
:
5649 default: /* just do library call, if unknown builtin */
5650 error ("built-in function %s not currently supported",
5651 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
5654 /* The switch statement above can drop through to cause the function
5655 to be called normally. */
5657 return expand_call (exp
, target
, ignore
);
5660 /* Expand code for a post- or pre- increment or decrement
5661 and return the RTX for the result.
5662 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5665 expand_increment (exp
, post
)
5669 register rtx op0
, op1
;
5670 register rtx temp
, value
;
5671 register tree incremented
= TREE_OPERAND (exp
, 0);
5672 optab this_optab
= add_optab
;
5674 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
5675 int op0_is_copy
= 0;
5677 /* Stabilize any component ref that might need to be
5678 evaluated more than once below. */
5679 if (TREE_CODE (incremented
) == BIT_FIELD_REF
5680 || (TREE_CODE (incremented
) == COMPONENT_REF
5681 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
5682 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
5683 incremented
= stabilize_reference (incremented
);
5685 /* Compute the operands as RTX.
5686 Note whether OP0 is the actual lvalue or a copy of it:
5687 I believe it is a copy iff it is a register or subreg
5688 and insns were generated in computing it. */
5689 temp
= get_last_insn ();
5690 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
5691 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
5692 && temp
!= get_last_insn ());
5693 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5695 /* Decide whether incrementing or decrementing. */
5696 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
5697 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
5698 this_optab
= sub_optab
;
5700 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5701 then we cannot just increment OP0. We must
5702 therefore contrive to increment the original value.
5703 Then we can return OP0 since it is a copy of the old value. */
5706 /* This is the easiest way to increment the value wherever it is.
5707 Problems with multiple evaluation of INCREMENTED
5708 are prevented because either (1) it is a component_ref,
5709 in which case it was stabilized above, or (2) it is an array_ref
5710 with constant index in an array in a register, which is
5711 safe to reevaluate. */
5712 tree newexp
= build ((this_optab
== add_optab
5713 ? PLUS_EXPR
: MINUS_EXPR
),
5716 TREE_OPERAND (exp
, 1));
5717 temp
= expand_assignment (incremented
, newexp
, ! post
, 0);
5718 return post
? op0
: temp
;
5721 /* Convert decrement by a constant into a negative increment. */
5722 if (this_optab
== sub_optab
5723 && GET_CODE (op1
) == CONST_INT
)
5725 op1
= GEN_INT (- INTVAL (op1
));
5726 this_optab
= add_optab
;
5731 /* We have a true reference to the value in OP0.
5732 If there is an insn to add or subtract in this mode, queue it. */
5734 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5735 op0
= stabilize (op0
);
5738 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
5739 if (icode
!= (int) CODE_FOR_nothing
5740 /* Make sure that OP0 is valid for operands 0 and 1
5741 of the insn we want to queue. */
5742 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
5743 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
5745 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
5746 op1
= force_reg (mode
, op1
);
5748 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
5752 /* Preincrement, or we can't increment with one simple insn. */
5754 /* Save a copy of the value before inc or dec, to return it later. */
5755 temp
= value
= copy_to_reg (op0
);
5757 /* Arrange to return the incremented value. */
5758 /* Copy the rtx because expand_binop will protect from the queue,
5759 and the results of that would be invalid for us to return
5760 if our caller does emit_queue before using our result. */
5761 temp
= copy_rtx (value
= op0
);
5763 /* Increment however we can. */
5764 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
5765 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
5766 /* Make sure the value is stored into OP0. */
5768 emit_move_insn (op0
, op1
);
5773 /* Expand all function calls contained within EXP, innermost ones first.
5774 But don't look within expressions that have sequence points.
5775 For each CALL_EXPR, record the rtx for its value
5776 in the CALL_EXPR_RTL field. */
5779 preexpand_calls (exp
)
5782 register int nops
, i
;
5783 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
5785 if (! do_preexpand_calls
)
5788 /* Only expressions and references can contain calls. */
5790 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
5793 switch (TREE_CODE (exp
))
5796 /* Do nothing if already expanded. */
5797 if (CALL_EXPR_RTL (exp
) != 0)
5800 /* Do nothing to built-in functions. */
5801 if (TREE_CODE (TREE_OPERAND (exp
, 0)) != ADDR_EXPR
5802 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != FUNCTION_DECL
5803 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5804 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
5809 case TRUTH_ANDIF_EXPR
:
5810 case TRUTH_ORIF_EXPR
:
5811 /* If we find one of these, then we can be sure
5812 the adjust will be done for it (since it makes jumps).
5813 Do it now, so that if this is inside an argument
5814 of a function, we don't get the stack adjustment
5815 after some other args have already been pushed. */
5816 do_pending_stack_adjust ();
5821 case WITH_CLEANUP_EXPR
:
5825 if (SAVE_EXPR_RTL (exp
) != 0)
5829 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
5830 for (i
= 0; i
< nops
; i
++)
5831 if (TREE_OPERAND (exp
, i
) != 0)
5833 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
5834 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
5836 preexpand_calls (TREE_OPERAND (exp
, i
));
5840 /* At the start of a function, record that we have no previously-pushed
5841 arguments waiting to be popped. */
5844 init_pending_stack_adjust ()
5846 pending_stack_adjust
= 0;
5849 /* When exiting from function, if safe, clear out any pending stack adjust
5850 so the adjustment won't get done. */
5853 clear_pending_stack_adjust ()
5855 #ifdef EXIT_IGNORE_STACK
5856 if (! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
5857 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
5858 && ! flag_inline_functions
)
5859 pending_stack_adjust
= 0;
5863 /* Pop any previously-pushed arguments that have not been popped yet. */
5866 do_pending_stack_adjust ()
5868 if (inhibit_defer_pop
== 0)
5870 if (pending_stack_adjust
!= 0)
5871 adjust_stack (GEN_INT (pending_stack_adjust
));
5872 pending_stack_adjust
= 0;
5876 /* Expand all cleanups up to OLD_CLEANUPS.
5877 Needed here, and also for language-dependent calls. */
5880 expand_cleanups_to (old_cleanups
)
5883 while (cleanups_this_call
!= old_cleanups
)
5885 expand_expr (TREE_VALUE (cleanups_this_call
), NULL_RTX
, VOIDmode
, 0);
5886 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
5890 /* Expand conditional expressions. */
5892 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5893 LABEL is an rtx of code CODE_LABEL, in this function and all the
5897 jumpifnot (exp
, label
)
5901 do_jump (exp
, label
, NULL_RTX
);
5904 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5911 do_jump (exp
, NULL_RTX
, label
);
5914 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5915 the result is zero, or IF_TRUE_LABEL if the result is one.
5916 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5917 meaning fall through in that case.
5919 do_jump always does any pending stack adjust except when it does not
5920 actually perform a jump. An example where there is no jump
5921 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5923 This function is responsible for optimizing cases such as
5924 &&, || and comparison operators in EXP. */
5927 do_jump (exp
, if_false_label
, if_true_label
)
5929 rtx if_false_label
, if_true_label
;
5931 register enum tree_code code
= TREE_CODE (exp
);
5932 /* Some cases need to create a label to jump to
5933 in order to properly fall through.
5934 These cases set DROP_THROUGH_LABEL nonzero. */
5935 rtx drop_through_label
= 0;
5949 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
5955 /* This is not true with #pragma weak */
5957 /* The address of something can never be zero. */
5959 emit_jump (if_true_label
);
5964 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
5965 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
5966 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
5969 /* If we are narrowing the operand, we have to do the compare in the
5971 if ((TYPE_PRECISION (TREE_TYPE (exp
))
5972 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5974 case NON_LVALUE_EXPR
:
5975 case REFERENCE_EXPR
:
5980 /* These cannot change zero->non-zero or vice versa. */
5981 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
5985 /* This is never less insns than evaluating the PLUS_EXPR followed by
5986 a test and can be longer if the test is eliminated. */
5988 /* Reduce to minus. */
5989 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
5990 TREE_OPERAND (exp
, 0),
5991 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
5992 TREE_OPERAND (exp
, 1))));
5993 /* Process as MINUS. */
5997 /* Non-zero iff operands of minus differ. */
5998 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
5999 TREE_OPERAND (exp
, 0),
6000 TREE_OPERAND (exp
, 1)),
6005 /* If we are AND'ing with a small constant, do this comparison in the
6006 smallest type that fits. If the machine doesn't have comparisons
6007 that small, it will be converted back to the wider comparison.
6008 This helps if we are testing the sign bit of a narrower object.
6009 combine can't do this for us because it can't know whether a
6010 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6012 if (! SLOW_BYTE_ACCESS
6013 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6014 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
6015 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
6016 && (type
= type_for_size (i
+ 1, 1)) != 0
6017 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
6018 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
6019 != CODE_FOR_nothing
))
6021 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
6026 case TRUTH_NOT_EXPR
:
6027 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
6030 case TRUTH_ANDIF_EXPR
:
6031 if (if_false_label
== 0)
6032 if_false_label
= drop_through_label
= gen_label_rtx ();
6033 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
6034 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
6037 case TRUTH_ORIF_EXPR
:
6038 if (if_true_label
== 0)
6039 if_true_label
= drop_through_label
= gen_label_rtx ();
6040 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
6041 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
6045 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6048 do_pending_stack_adjust ();
6049 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
6056 int bitsize
, bitpos
, unsignedp
;
6057 enum machine_mode mode
;
6062 /* Get description of this reference. We don't actually care
6063 about the underlying object here. */
6064 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6065 &mode
, &unsignedp
, &volatilep
);
6067 type
= type_for_size (bitsize
, unsignedp
);
6068 if (! SLOW_BYTE_ACCESS
6069 && type
!= 0 && bitsize
>= 0
6070 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
6071 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
6072 != CODE_FOR_nothing
))
6074 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
6081 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6082 if (integer_onep (TREE_OPERAND (exp
, 1))
6083 && integer_zerop (TREE_OPERAND (exp
, 2)))
6084 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
6086 else if (integer_zerop (TREE_OPERAND (exp
, 1))
6087 && integer_onep (TREE_OPERAND (exp
, 2)))
6088 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
6092 register rtx label1
= gen_label_rtx ();
6093 drop_through_label
= gen_label_rtx ();
6094 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
6095 /* Now the THEN-expression. */
6096 do_jump (TREE_OPERAND (exp
, 1),
6097 if_false_label
? if_false_label
: drop_through_label
,
6098 if_true_label
? if_true_label
: drop_through_label
);
6099 /* In case the do_jump just above never jumps. */
6100 do_pending_stack_adjust ();
6101 emit_label (label1
);
6102 /* Now the ELSE-expression. */
6103 do_jump (TREE_OPERAND (exp
, 2),
6104 if_false_label
? if_false_label
: drop_through_label
,
6105 if_true_label
? if_true_label
: drop_through_label
);
6110 if (integer_zerop (TREE_OPERAND (exp
, 1)))
6111 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
6112 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6115 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6116 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
6118 comparison
= compare (exp
, EQ
, EQ
);
6122 if (integer_zerop (TREE_OPERAND (exp
, 1)))
6123 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
6124 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6127 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6128 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
6130 comparison
= compare (exp
, NE
, NE
);
6134 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6136 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6137 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
6139 comparison
= compare (exp
, LT
, LTU
);
6143 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6145 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6146 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
6148 comparison
= compare (exp
, LE
, LEU
);
6152 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6154 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6155 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
6157 comparison
= compare (exp
, GT
, GTU
);
6161 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6163 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6164 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
6166 comparison
= compare (exp
, GE
, GEU
);
6171 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
6173 /* This is not needed any more and causes poor code since it causes
6174 comparisons and tests from non-SI objects to have different code
6176 /* Copy to register to avoid generating bad insns by cse
6177 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6178 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
6179 temp
= copy_to_reg (temp
);
6181 do_pending_stack_adjust ();
6182 if (GET_CODE (temp
) == CONST_INT
)
6183 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
6184 else if (GET_CODE (temp
) == LABEL_REF
)
6185 comparison
= const_true_rtx
;
6186 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6187 && !can_compare_p (GET_MODE (temp
)))
6188 /* Note swapping the labels gives us not-equal. */
6189 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
6190 else if (GET_MODE (temp
) != VOIDmode
)
6191 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
6192 NE
, 1, GET_MODE (temp
), NULL_RTX
, 0);
6197 /* Do any postincrements in the expression that was tested. */
6200 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6201 straight into a conditional jump instruction as the jump condition.
6202 Otherwise, all the work has been done already. */
6204 if (comparison
== const_true_rtx
)
6207 emit_jump (if_true_label
);
6209 else if (comparison
== const0_rtx
)
6212 emit_jump (if_false_label
);
6214 else if (comparison
)
6215 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
6219 if (drop_through_label
)
6221 /* If do_jump produces code that might be jumped around,
6222 do any stack adjusts from that code, before the place
6223 where control merges in. */
6224 do_pending_stack_adjust ();
6225 emit_label (drop_through_label
);
6229 /* Given a comparison expression EXP for values too wide to be compared
6230 with one insn, test the comparison and jump to the appropriate label.
6231 The code of EXP is ignored; we always test GT if SWAP is 0,
6232 and LT if SWAP is 1. */
6235 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
6238 rtx if_false_label
, if_true_label
;
6240 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
6241 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
6242 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6243 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
6244 rtx drop_through_label
= 0;
6245 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6248 if (! if_true_label
|| ! if_false_label
)
6249 drop_through_label
= gen_label_rtx ();
6250 if (! if_true_label
)
6251 if_true_label
= drop_through_label
;
6252 if (! if_false_label
)
6253 if_false_label
= drop_through_label
;
6255 /* Compare a word at a time, high order first. */
6256 for (i
= 0; i
< nwords
; i
++)
6259 rtx op0_word
, op1_word
;
6261 if (WORDS_BIG_ENDIAN
)
6263 op0_word
= operand_subword_force (op0
, i
, mode
);
6264 op1_word
= operand_subword_force (op1
, i
, mode
);
6268 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
6269 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
6272 /* All but high-order word must be compared as unsigned. */
6273 comp
= compare_from_rtx (op0_word
, op1_word
,
6274 (unsignedp
|| i
> 0) ? GTU
: GT
,
6275 unsignedp
, word_mode
, NULL_RTX
, 0);
6276 if (comp
== const_true_rtx
)
6277 emit_jump (if_true_label
);
6278 else if (comp
!= const0_rtx
)
6279 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
6281 /* Consider lower words only if these are equal. */
6282 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
6284 if (comp
== const_true_rtx
)
6285 emit_jump (if_false_label
);
6286 else if (comp
!= const0_rtx
)
6287 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
6291 emit_jump (if_false_label
);
6292 if (drop_through_label
)
6293 emit_label (drop_through_label
);
6296 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6297 with one insn, test the comparison and jump to the appropriate label. */
6300 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
6302 rtx if_false_label
, if_true_label
;
6304 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6305 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6306 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6307 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
6309 rtx drop_through_label
= 0;
6311 if (! if_false_label
)
6312 drop_through_label
= if_false_label
= gen_label_rtx ();
6314 for (i
= 0; i
< nwords
; i
++)
6316 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
6317 operand_subword_force (op1
, i
, mode
),
6318 EQ
, 0, word_mode
, NULL_RTX
, 0);
6319 if (comp
== const_true_rtx
)
6320 emit_jump (if_false_label
);
6321 else if (comp
!= const0_rtx
)
6322 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
6326 emit_jump (if_true_label
);
6327 if (drop_through_label
)
6328 emit_label (drop_through_label
);
6331 /* Jump according to whether OP0 is 0.
6332 We assume that OP0 has an integer mode that is too wide
6333 for the available compare insns. */
6336 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
6338 rtx if_false_label
, if_true_label
;
6340 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
6342 rtx drop_through_label
= 0;
6344 if (! if_false_label
)
6345 drop_through_label
= if_false_label
= gen_label_rtx ();
6347 for (i
= 0; i
< nwords
; i
++)
6349 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
6351 const0_rtx
, EQ
, 0, word_mode
, NULL_RTX
, 0);
6352 if (comp
== const_true_rtx
)
6353 emit_jump (if_false_label
);
6354 else if (comp
!= const0_rtx
)
6355 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
6359 emit_jump (if_true_label
);
6360 if (drop_through_label
)
6361 emit_label (drop_through_label
);
6364 /* Given a comparison expression in rtl form, output conditional branches to
6365 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6368 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
6369 rtx comparison
, if_false_label
, if_true_label
;
6373 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
6374 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
6379 emit_jump (if_false_label
);
6381 else if (if_false_label
)
6384 rtx prev
= PREV_INSN (get_last_insn ());
6387 /* Output the branch with the opposite condition. Then try to invert
6388 what is generated. If more than one insn is a branch, or if the
6389 branch is not the last insn written, abort. If we can't invert
6390 the branch, emit make a true label, redirect this jump to that,
6391 emit a jump to the false label and define the true label. */
6393 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
6394 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_false_label
));
6398 /* Here we get the insn before what was just emitted.
6399 On some machines, emitting the branch can discard
6400 the previous compare insn and emit a replacement. */
6402 /* If there's only one preceding insn... */
6403 insn
= get_insns ();
6405 insn
= NEXT_INSN (prev
);
6407 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
6408 if (GET_CODE (insn
) == JUMP_INSN
)
6415 if (branch
!= get_last_insn ())
6418 if (! invert_jump (branch
, if_false_label
))
6420 if_true_label
= gen_label_rtx ();
6421 redirect_jump (branch
, if_true_label
);
6422 emit_jump (if_false_label
);
6423 emit_label (if_true_label
);
6428 /* Generate code for a comparison expression EXP
6429 (including code to compute the values to be compared)
6430 and set (CC0) according to the result.
6431 SIGNED_CODE should be the rtx operation for this comparison for
6432 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6434 We force a stack adjustment unless there are currently
6435 things pushed on the stack that aren't yet used. */
6438 compare (exp
, signed_code
, unsigned_code
)
6440 enum rtx_code signed_code
, unsigned_code
;
6443 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6445 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6446 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6447 register enum machine_mode mode
= TYPE_MODE (type
);
6448 int unsignedp
= TREE_UNSIGNED (type
);
6449 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
6451 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
6453 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
6454 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
6457 /* Like compare but expects the values to compare as two rtx's.
6458 The decision as to signed or unsigned comparison must be made by the caller.
6460 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6463 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6464 size of MODE should be used. */
6467 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
6468 register rtx op0
, op1
;
6471 enum machine_mode mode
;
6475 /* If one operand is constant, make it the second one. */
6477 if (GET_CODE (op0
) == CONST_INT
|| GET_CODE (op0
) == CONST_DOUBLE
)
6482 code
= swap_condition (code
);
6487 op0
= force_not_mem (op0
);
6488 op1
= force_not_mem (op1
);
6491 do_pending_stack_adjust ();
6493 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
)
6494 return simplify_relational_operation (code
, mode
, op0
, op1
);
6496 /* If this is a signed equality comparison, we can do it as an
6497 unsigned comparison since zero-extension is cheaper than sign
6498 extension and comparisons with zero are done as unsigned. This is
6499 the case even on machines that can do fast sign extension, since
6500 zero-extension is easier to combinen with other operations than
6501 sign-extension is. If we are comparing against a constant, we must
6502 convert it to what it would look like unsigned. */
6503 if ((code
== EQ
|| code
== NE
) && ! unsignedp
6504 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
6506 if (GET_CODE (op1
) == CONST_INT
6507 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
6508 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
6512 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
6514 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
6517 /* Generate code to calculate EXP using a store-flag instruction
6518 and return an rtx for the result. EXP is either a comparison
6519 or a TRUTH_NOT_EXPR whose operand is a comparison.
6521 If TARGET is nonzero, store the result there if convenient.
6523 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6526 Return zero if there is no suitable set-flag instruction
6527 available on this machine.
6529 Once expand_expr has been called on the arguments of the comparison,
6530 we are committed to doing the store flag, since it is not safe to
6531 re-evaluate the expression. We emit the store-flag insn by calling
6532 emit_store_flag, but only expand the arguments if we have a reason
6533 to believe that emit_store_flag will be successful. If we think that
6534 it will, but it isn't, we have to simulate the store-flag with a
6535 set/jump/set sequence. */
6538 do_store_flag (exp
, target
, mode
, only_cheap
)
6541 enum machine_mode mode
;
6545 tree arg0
, arg1
, type
;
6547 enum machine_mode operand_mode
;
6551 enum insn_code icode
;
6552 rtx subtarget
= target
;
6553 rtx result
, label
, pattern
, jump_pat
;
6555 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6556 result at the end. We can't simply invert the test since it would
6557 have already been inverted if it were valid. This case occurs for
6558 some floating-point comparisons. */
6560 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
6561 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
6563 arg0
= TREE_OPERAND (exp
, 0);
6564 arg1
= TREE_OPERAND (exp
, 1);
6565 type
= TREE_TYPE (arg0
);
6566 operand_mode
= TYPE_MODE (type
);
6567 unsignedp
= TREE_UNSIGNED (type
);
6569 /* We won't bother with BLKmode store-flag operations because it would mean
6570 passing a lot of information to emit_store_flag. */
6571 if (operand_mode
== BLKmode
)
6577 /* Get the rtx comparison code to use. We know that EXP is a comparison
6578 operation of some type. Some comparisons against 1 and -1 can be
6579 converted to comparisons with zero. Do so here so that the tests
6580 below will be aware that we have a comparison with zero. These
6581 tests will not catch constants in the first operand, but constants
6582 are rarely passed as the first operand. */
6584 switch (TREE_CODE (exp
))
6593 if (integer_onep (arg1
))
6594 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
6596 code
= unsignedp
? LTU
: LT
;
6599 if (integer_all_onesp (arg1
))
6600 arg1
= integer_zero_node
, code
= unsignedp
? LTU
: LT
;
6602 code
= unsignedp
? LEU
: LE
;
6605 if (integer_all_onesp (arg1
))
6606 arg1
= integer_zero_node
, code
= unsignedp
? GEU
: GE
;
6608 code
= unsignedp
? GTU
: GT
;
6611 if (integer_onep (arg1
))
6612 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
6614 code
= unsignedp
? GEU
: GE
;
6620 /* Put a constant second. */
6621 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
6623 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
6624 code
= swap_condition (code
);
6627 /* If this is an equality or inequality test of a single bit, we can
6628 do this by shifting the bit being tested to the low-order bit and
6629 masking the result with the constant 1. If the condition was EQ,
6630 we xor it with 1. This does not require an scc insn and is faster
6631 than an scc insn even if we have it. */
6633 if ((code
== NE
|| code
== EQ
)
6634 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6635 && integer_pow2p (TREE_OPERAND (arg0
, 1))
6636 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
6638 int bitnum
= exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
6639 NULL_RTX
, VOIDmode
, 0)));
6641 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
6642 || GET_MODE (subtarget
) != operand_mode
6643 || ! safe_from_p (subtarget
, TREE_OPERAND (arg0
, 0)))
6646 op0
= expand_expr (TREE_OPERAND (arg0
, 0), subtarget
, VOIDmode
, 0);
6649 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
6650 size_int (bitnum
), target
, 1);
6652 if (GET_MODE (op0
) != mode
)
6653 op0
= convert_to_mode (mode
, op0
, 1);
6655 if (bitnum
!= TYPE_PRECISION (type
) - 1)
6656 op0
= expand_and (op0
, const1_rtx
, target
);
6658 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
6659 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, target
, 0,
6665 /* Now see if we are likely to be able to do this. Return if not. */
6666 if (! can_compare_p (operand_mode
))
6668 icode
= setcc_gen_code
[(int) code
];
6669 if (icode
== CODE_FOR_nothing
6670 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
6672 /* We can only do this if it is one of the special cases that
6673 can be handled without an scc insn. */
6674 if ((code
== LT
&& integer_zerop (arg1
))
6675 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
6677 else if (BRANCH_COST
>= 0
6678 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
6679 && TREE_CODE (type
) != REAL_TYPE
6680 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
6681 != CODE_FOR_nothing
)
6682 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
6683 != CODE_FOR_nothing
)))
6689 preexpand_calls (exp
);
6690 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
6691 || GET_MODE (subtarget
) != operand_mode
6692 || ! safe_from_p (subtarget
, arg1
))
6695 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
6696 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6699 target
= gen_reg_rtx (mode
);
6701 result
= emit_store_flag (target
, code
, op0
, op1
, operand_mode
,
6707 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
6708 result
, 0, OPTAB_LIB_WIDEN
);
6712 /* If this failed, we have to do this with set/compare/jump/set code. */
6713 if (target
== 0 || GET_CODE (target
) != REG
6714 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
6715 target
= gen_reg_rtx (GET_MODE (target
));
6717 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
6718 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
6719 operand_mode
, NULL_RTX
, 0);
6720 if (GET_CODE (result
) == CONST_INT
)
6721 return (((result
== const0_rtx
&& ! invert
)
6722 || (result
!= const0_rtx
&& invert
))
6723 ? const0_rtx
: const1_rtx
);
6725 label
= gen_label_rtx ();
6726 if (bcc_gen_fctn
[(int) code
] == 0)
6729 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
6730 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
6736 /* Generate a tablejump instruction (used for switch statements). */
6738 #ifdef HAVE_tablejump
6740 /* INDEX is the value being switched on, with the lowest value
6741 in the table already subtracted.
6742 MODE is its expected mode (needed if INDEX is constant).
6743 RANGE is the length of the jump table.
6744 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6746 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6747 index value is out of range. */
6750 do_tablejump (index
, mode
, range
, table_label
, default_label
)
6751 rtx index
, range
, table_label
, default_label
;
6752 enum machine_mode mode
;
6754 register rtx temp
, vector
;
6756 /* Do an unsigned comparison (in the proper mode) between the index
6757 expression and the value which represents the length of the range.
6758 Since we just finished subtracting the lower bound of the range
6759 from the index expression, this comparison allows us to simultaneously
6760 check that the original index expression value is both greater than
6761 or equal to the minimum value of the range and less than or equal to
6762 the maximum value of the range. */
6764 emit_cmp_insn (range
, index
, LTU
, NULL_RTX
, mode
, 0, 0);
6765 emit_jump_insn (gen_bltu (default_label
));
6767 /* If index is in range, it must fit in Pmode.
6768 Convert to Pmode so we can index with it. */
6770 index
= convert_to_mode (Pmode
, index
, 1);
6772 /* If flag_force_addr were to affect this address
6773 it could interfere with the tricky assumptions made
6774 about addresses that contain label-refs,
6775 which may be valid only very near the tablejump itself. */
6776 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6777 GET_MODE_SIZE, because this indicates how large insns are. The other
6778 uses should all be Pmode, because they are addresses. This code
6779 could fail if addresses and insns are not the same size. */
6780 index
= memory_address_noforce
6782 gen_rtx (PLUS
, Pmode
,
6783 gen_rtx (MULT
, Pmode
, index
,
6784 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
6785 gen_rtx (LABEL_REF
, Pmode
, table_label
)));
6786 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
6787 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
6788 RTX_UNCHANGING_P (vector
) = 1;
6789 convert_move (temp
, vector
, 0);
6791 emit_jump_insn (gen_tablejump (temp
, table_label
));
6793 #ifndef CASE_VECTOR_PC_RELATIVE
6794 /* If we are generating PIC code or if the table is PC-relative, the
6795 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6801 #endif /* HAVE_tablejump */