1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
26 #include "insn-flags.h"
27 #include "insn-codes.h"
29 #include "insn-config.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first.
40 They should if the stack and args grow in opposite directions, but
41 only if we have push insns. */
45 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
46 #define PUSH_ARGS_REVERSED /* If it's last to first */
51 #ifndef STACK_PUSH_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_PUSH_CODE PRE_DEC
55 #define STACK_PUSH_CODE PRE_INC
59 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
60 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
62 /* If this is nonzero, we do not bother generating VOLATILE
63 around volatile memory references, and we are willing to
64 output indirect addresses. If cse is to follow, we reject
65 indirect addresses so a useful potential cse is generated;
66 if it is used only once, instruction combination will produce
67 the same indirect address eventually. */
70 /* Nonzero to generate code for all the subroutines within an
71 expression before generating the upper levels of the expression.
72 Nowadays this is never zero. */
73 int do_preexpand_calls
= 1;
75 /* Number of units that we should eventually pop off the stack.
76 These are the arguments to function calls that have already returned. */
77 int pending_stack_adjust
;
79 /* Nonzero means stack pops must not be deferred, and deferred stack
80 pops must not be output. It is nonzero inside a function call,
81 inside a conditional expression, inside a statement expression,
82 and in other cases as well. */
83 int inhibit_defer_pop
;
85 /* A list of all cleanups which belong to the arguments of
86 function calls being expanded by expand_call. */
87 tree cleanups_this_call
;
89 /* Similarly for __builtin_apply_args. */
90 static rtx apply_args_value
;
92 /* Nonzero means __builtin_saveregs has already been done in this function.
93 The value is the pseudoreg containing the value __builtin_saveregs
95 static rtx saveregs_value
;
97 /* This structure is used by move_by_pieces to describe the move to
100 struct move_by_pieces
109 int explicit_inc_from
;
115 static rtx enqueue_insn
PROTO((rtx
, rtx
));
116 static int queued_subexp_p
PROTO((rtx
));
117 static void init_queue
PROTO((void));
118 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
119 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
120 static void move_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
121 struct move_by_pieces
*));
122 static void group_insns
PROTO((rtx
));
123 static void store_constructor
PROTO((tree
, rtx
));
124 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
125 enum machine_mode
, int, int, int));
126 static tree save_noncopied_parts
PROTO((tree
, tree
));
127 static tree init_noncopied_parts
PROTO((tree
, tree
));
128 static int safe_from_p
PROTO((rtx
, tree
));
129 static int fixed_type_p
PROTO((tree
));
130 static int get_pointer_alignment
PROTO((tree
, unsigned));
131 static tree string_constant
PROTO((tree
, tree
*));
132 static tree c_strlen
PROTO((tree
));
133 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
, enum machine_mode
, int));
134 static int apply_args_size
PROTO((void));
135 static int apply_result_size
PROTO((void));
136 static rtx result_vector
PROTO((int, rtx
));
137 static rtx expand_builtin_apply_args
PROTO((void));
138 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
139 static void expand_builtin_return
PROTO((rtx
));
140 static rtx expand_increment
PROTO((tree
, int));
141 static void preexpand_calls
PROTO((tree
));
142 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
143 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
144 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
145 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
146 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
147 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
149 /* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
153 static char direct_load
[NUM_MACHINE_MODES
];
154 static char direct_store
[NUM_MACHINE_MODES
];
156 /* MOVE_RATIO is the number of move instructions that is better than
160 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
163 /* A value of around 6 would minimize code size; infinity would minimize
165 #define MOVE_RATIO 15
169 /* This array records the insn_code of insns to perform block moves. */
170 static enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
172 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
174 #ifndef SLOW_UNALIGNED_ACCESS
175 #define SLOW_UNALIGNED_ACCESS 0
178 /* Register mappings for target machines without register windows. */
179 #ifndef INCOMING_REGNO
180 #define INCOMING_REGNO(OUT) (OUT)
182 #ifndef OUTGOING_REGNO
183 #define OUTGOING_REGNO(IN) (IN)
186 /* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
193 enum machine_mode mode
;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
198 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
201 insn
= emit_insn (gen_rtx (SET
, 0, 0));
202 pat
= PATTERN (insn
);
204 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
205 mode
= (enum machine_mode
) ((int) mode
+ 1))
211 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
212 PUT_MODE (mem
, mode
);
213 PUT_MODE (mem1
, mode
);
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
218 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
219 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
220 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
223 if (! HARD_REGNO_MODE_OK (regno
, mode
))
226 reg
= gen_rtx (REG
, mode
, regno
);
229 SET_DEST (pat
) = reg
;
230 if (recog (pat
, insn
, &num_clobbers
) >= 0)
231 direct_load
[(int) mode
] = 1;
233 SET_SRC (pat
) = mem1
;
234 SET_DEST (pat
) = reg
;
235 if (recog (pat
, insn
, &num_clobbers
) >= 0)
236 direct_load
[(int) mode
] = 1;
239 SET_DEST (pat
) = mem
;
240 if (recog (pat
, insn
, &num_clobbers
) >= 0)
241 direct_store
[(int) mode
] = 1;
244 SET_DEST (pat
) = mem1
;
245 if (recog (pat
, insn
, &num_clobbers
) >= 0)
246 direct_store
[(int) mode
] = 1;
249 movstr_optab
[(int) mode
] = CODE_FOR_nothing
;
256 movstr_optab
[(int) QImode
] = CODE_FOR_movstrqi
;
260 movstr_optab
[(int) HImode
] = CODE_FOR_movstrhi
;
264 movstr_optab
[(int) SImode
] = CODE_FOR_movstrsi
;
268 movstr_optab
[(int) DImode
] = CODE_FOR_movstrdi
;
272 movstr_optab
[(int) TImode
] = CODE_FOR_movstrti
;
276 /* This is run at the start of compiling a function. */
283 pending_stack_adjust
= 0;
284 inhibit_defer_pop
= 0;
285 cleanups_this_call
= 0;
287 apply_args_value
= 0;
291 /* Save all variables describing the current status into the structure *P.
292 This is used before starting a nested function. */
298 /* Instead of saving the postincrement queue, empty it. */
301 p
->pending_stack_adjust
= pending_stack_adjust
;
302 p
->inhibit_defer_pop
= inhibit_defer_pop
;
303 p
->cleanups_this_call
= cleanups_this_call
;
304 p
->saveregs_value
= saveregs_value
;
305 p
->apply_args_value
= apply_args_value
;
306 p
->forced_labels
= forced_labels
;
308 pending_stack_adjust
= 0;
309 inhibit_defer_pop
= 0;
310 cleanups_this_call
= 0;
312 apply_args_value
= 0;
316 /* Restore all variables describing the current status from the structure *P.
317 This is used after a nested function. */
320 restore_expr_status (p
)
323 pending_stack_adjust
= p
->pending_stack_adjust
;
324 inhibit_defer_pop
= p
->inhibit_defer_pop
;
325 cleanups_this_call
= p
->cleanups_this_call
;
326 saveregs_value
= p
->saveregs_value
;
327 apply_args_value
= p
->apply_args_value
;
328 forced_labels
= p
->forced_labels
;
331 /* Manage the queue of increment instructions to be output
332 for POSTINCREMENT_EXPR expressions, etc. */
334 static rtx pending_chain
;
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
344 enqueue_insn (var
, body
)
347 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
348 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
349 return pending_chain
;
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
368 protect_from_queue (x
, modify
)
372 register RTX_CODE code
= GET_CODE (x
);
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain
== 0)
382 /* A special hack for read access to (MEM (QUEUED ...))
383 to facilitate use of autoincrement.
384 Make a copy of the contents of the memory location
385 rather than a copy of the address, but not
386 if the value is of mode BLKmode. */
387 if (code
== MEM
&& GET_MODE (x
) != BLKmode
388 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
390 register rtx y
= XEXP (x
, 0);
391 XEXP (x
, 0) = QUEUED_VAR (y
);
394 register rtx temp
= gen_reg_rtx (GET_MODE (x
));
395 emit_insn_before (gen_move_insn (temp
, x
),
401 /* Otherwise, recursively protect the subexpressions of all
402 the kinds of rtx's that can contain a QUEUED. */
404 XEXP (x
, 0) = protect_from_queue (XEXP (x
, 0), 0);
405 else if (code
== PLUS
|| code
== MULT
)
407 XEXP (x
, 0) = protect_from_queue (XEXP (x
, 0), 0);
408 XEXP (x
, 1) = protect_from_queue (XEXP (x
, 1), 0);
412 /* If the increment has not happened, use the variable itself. */
413 if (QUEUED_INSN (x
) == 0)
414 return QUEUED_VAR (x
);
415 /* If the increment has happened and a pre-increment copy exists,
417 if (QUEUED_COPY (x
) != 0)
418 return QUEUED_COPY (x
);
419 /* The increment has happened but we haven't set up a pre-increment copy.
420 Set one up now, and use it. */
421 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
422 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
424 return QUEUED_COPY (x
);
427 /* Return nonzero if X contains a QUEUED expression:
428 if it contains anything that will be altered by a queued increment.
429 We handle only combinations of MEM, PLUS, MINUS and MULT operators
430 since memory addresses generally contain only those. */
436 register enum rtx_code code
= GET_CODE (x
);
442 return queued_subexp_p (XEXP (x
, 0));
446 return queued_subexp_p (XEXP (x
, 0))
447 || queued_subexp_p (XEXP (x
, 1));
452 /* Perform all the pending incrementations. */
458 while (p
= pending_chain
)
460 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
461 pending_chain
= QUEUED_NEXT (p
);
472 /* Copy data from FROM to TO, where the machine modes are not the same.
473 Both modes may be integer, or both may be floating.
474 UNSIGNEDP should be nonzero if FROM is an unsigned type.
475 This causes zero-extension instead of sign-extension. */
478 convert_move (to
, from
, unsignedp
)
479 register rtx to
, from
;
482 enum machine_mode to_mode
= GET_MODE (to
);
483 enum machine_mode from_mode
= GET_MODE (from
);
484 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
485 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
489 /* rtx code for making an equivalent value. */
490 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
492 to
= protect_from_queue (to
, 1);
493 from
= protect_from_queue (from
, 0);
495 if (to_real
!= from_real
)
498 /* If FROM is a SUBREG that indicates that we have already done at least
499 the required extension, strip it. We don't handle such SUBREGs as
502 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
503 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
504 >= GET_MODE_SIZE (to_mode
))
505 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
506 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
508 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
511 if (to_mode
== from_mode
512 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
514 emit_move_insn (to
, from
);
520 #ifdef HAVE_extendqfhf2
521 if (HAVE_extendqfsf2
&& from_mode
== QFmode
&& to_mode
== HFmode
)
523 emit_unop_insn (CODE_FOR_extendqfsf2
, to
, from
, UNKNOWN
);
527 #ifdef HAVE_extendqfsf2
528 if (HAVE_extendqfsf2
&& from_mode
== QFmode
&& to_mode
== SFmode
)
530 emit_unop_insn (CODE_FOR_extendqfsf2
, to
, from
, UNKNOWN
);
534 #ifdef HAVE_extendqfdf2
535 if (HAVE_extendqfdf2
&& from_mode
== QFmode
&& to_mode
== DFmode
)
537 emit_unop_insn (CODE_FOR_extendqfdf2
, to
, from
, UNKNOWN
);
541 #ifdef HAVE_extendqfxf2
542 if (HAVE_extendqfxf2
&& from_mode
== QFmode
&& to_mode
== XFmode
)
544 emit_unop_insn (CODE_FOR_extendqfxf2
, to
, from
, UNKNOWN
);
548 #ifdef HAVE_extendqftf2
549 if (HAVE_extendqftf2
&& from_mode
== QFmode
&& to_mode
== TFmode
)
551 emit_unop_insn (CODE_FOR_extendqftf2
, to
, from
, UNKNOWN
);
556 #ifdef HAVE_extendhfsf2
557 if (HAVE_extendhfsf2
&& from_mode
== HFmode
&& to_mode
== SFmode
)
559 emit_unop_insn (CODE_FOR_extendhfsf2
, to
, from
, UNKNOWN
);
563 #ifdef HAVE_extendhfdf2
564 if (HAVE_extendhfdf2
&& from_mode
== HFmode
&& to_mode
== DFmode
)
566 emit_unop_insn (CODE_FOR_extendhfdf2
, to
, from
, UNKNOWN
);
570 #ifdef HAVE_extendhfxf2
571 if (HAVE_extendhfxf2
&& from_mode
== HFmode
&& to_mode
== XFmode
)
573 emit_unop_insn (CODE_FOR_extendhfxf2
, to
, from
, UNKNOWN
);
577 #ifdef HAVE_extendhftf2
578 if (HAVE_extendhftf2
&& from_mode
== HFmode
&& to_mode
== TFmode
)
580 emit_unop_insn (CODE_FOR_extendhftf2
, to
, from
, UNKNOWN
);
585 #ifdef HAVE_extendsfdf2
586 if (HAVE_extendsfdf2
&& from_mode
== SFmode
&& to_mode
== DFmode
)
588 emit_unop_insn (CODE_FOR_extendsfdf2
, to
, from
, UNKNOWN
);
592 #ifdef HAVE_extendsfxf2
593 if (HAVE_extendsfxf2
&& from_mode
== SFmode
&& to_mode
== XFmode
)
595 emit_unop_insn (CODE_FOR_extendsfxf2
, to
, from
, UNKNOWN
);
599 #ifdef HAVE_extendsftf2
600 if (HAVE_extendsftf2
&& from_mode
== SFmode
&& to_mode
== TFmode
)
602 emit_unop_insn (CODE_FOR_extendsftf2
, to
, from
, UNKNOWN
);
606 #ifdef HAVE_extenddfxf2
607 if (HAVE_extenddfxf2
&& from_mode
== DFmode
&& to_mode
== XFmode
)
609 emit_unop_insn (CODE_FOR_extenddfxf2
, to
, from
, UNKNOWN
);
613 #ifdef HAVE_extenddftf2
614 if (HAVE_extenddftf2
&& from_mode
== DFmode
&& to_mode
== TFmode
)
616 emit_unop_insn (CODE_FOR_extenddftf2
, to
, from
, UNKNOWN
);
621 #ifdef HAVE_trunchfqf2
622 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
624 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
628 #ifdef HAVE_truncsfqf2
629 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
631 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
635 #ifdef HAVE_truncdfqf2
636 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
638 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
642 #ifdef HAVE_truncxfqf2
643 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
645 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
649 #ifdef HAVE_trunctfqf2
650 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
652 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
656 #ifdef HAVE_truncsfhf2
657 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
659 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_truncdfhf2
664 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
666 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_truncxfhf2
671 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
673 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_trunctfhf2
678 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
680 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_truncdfsf2
685 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
687 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_truncxfsf2
692 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
694 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
698 #ifdef HAVE_trunctfsf2
699 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
701 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
705 #ifdef HAVE_truncxfdf2
706 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
708 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
712 #ifdef HAVE_trunctfdf2
713 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
715 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
727 libcall
= extendsfdf2_libfunc
;
731 libcall
= extendsfxf2_libfunc
;
735 libcall
= extendsftf2_libfunc
;
744 libcall
= truncdfsf2_libfunc
;
748 libcall
= extenddfxf2_libfunc
;
752 libcall
= extenddftf2_libfunc
;
761 libcall
= truncxfsf2_libfunc
;
765 libcall
= truncxfdf2_libfunc
;
774 libcall
= trunctfsf2_libfunc
;
778 libcall
= trunctfdf2_libfunc
;
784 if (libcall
== (rtx
) 0)
785 /* This conversion is not implemented yet. */
788 emit_library_call (libcall
, 1, to_mode
, 1, from
, from_mode
);
789 emit_move_insn (to
, hard_libcall_value (to_mode
));
793 /* Now both modes are integers. */
795 /* Handle expanding beyond a word. */
796 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
797 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
804 enum machine_mode lowpart_mode
;
805 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
807 /* Try converting directly if the insn is supported. */
808 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
811 /* If FROM is a SUBREG, put it into a register. Do this
812 so that we always generate the same set of insns for
813 better cse'ing; if an intermediate assignment occurred,
814 we won't be doing the operation directly on the SUBREG. */
815 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
816 from
= force_reg (from_mode
, from
);
817 emit_unop_insn (code
, to
, from
, equiv_code
);
820 /* Next, try converting via full word. */
821 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
822 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
823 != CODE_FOR_nothing
))
825 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
826 emit_unop_insn (code
, to
,
827 gen_lowpart (word_mode
, to
), equiv_code
);
831 /* No special multiword conversion insn; do it by hand. */
834 /* Get a copy of FROM widened to a word, if necessary. */
835 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
836 lowpart_mode
= word_mode
;
838 lowpart_mode
= from_mode
;
840 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
842 lowpart
= gen_lowpart (lowpart_mode
, to
);
843 emit_move_insn (lowpart
, lowfrom
);
845 /* Compute the value to put in each remaining word. */
847 fill_value
= const0_rtx
;
852 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
853 && STORE_FLAG_VALUE
== -1)
855 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
857 fill_value
= gen_reg_rtx (word_mode
);
858 emit_insn (gen_slt (fill_value
));
864 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
865 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
867 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
871 /* Fill the remaining words. */
872 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
874 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
875 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
880 if (fill_value
!= subword
)
881 emit_move_insn (subword
, fill_value
);
884 insns
= get_insns ();
887 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
888 gen_rtx (equiv_code
, to_mode
, from
));
892 /* Truncating multi-word to a word or less. */
893 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
894 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
896 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
900 /* Handle pointer conversion */ /* SPEE 900220 */
901 if (to_mode
== PSImode
)
903 if (from_mode
!= SImode
)
904 from
= convert_to_mode (SImode
, from
, unsignedp
);
906 #ifdef HAVE_truncsipsi
909 emit_unop_insn (CODE_FOR_truncsipsi
, to
, from
, UNKNOWN
);
912 #endif /* HAVE_truncsipsi */
916 if (from_mode
== PSImode
)
918 if (to_mode
!= SImode
)
920 from
= convert_to_mode (SImode
, from
, unsignedp
);
925 #ifdef HAVE_extendpsisi
926 if (HAVE_extendpsisi
)
928 emit_unop_insn (CODE_FOR_extendpsisi
, to
, from
, UNKNOWN
);
931 #endif /* HAVE_extendpsisi */
936 /* Now follow all the conversions between integers
937 no more than a word long. */
939 /* For truncation, usually we can just refer to FROM in a narrower mode. */
940 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
941 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
942 GET_MODE_BITSIZE (from_mode
)))
944 if (!((GET_CODE (from
) == MEM
945 && ! MEM_VOLATILE_P (from
)
946 && direct_load
[(int) to_mode
]
947 && ! mode_dependent_address_p (XEXP (from
, 0)))
948 || GET_CODE (from
) == REG
949 || GET_CODE (from
) == SUBREG
))
950 from
= force_reg (from_mode
, from
);
951 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
955 /* Handle extension. */
956 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
958 /* Convert directly if that works. */
959 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
962 /* If FROM is a SUBREG, put it into a register. Do this
963 so that we always generate the same set of insns for
964 better cse'ing; if an intermediate assignment occurred,
965 we won't be doing the operation directly on the SUBREG. */
966 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
967 from
= force_reg (from_mode
, from
);
968 emit_unop_insn (code
, to
, from
, equiv_code
);
973 enum machine_mode intermediate
;
975 /* Search for a mode to convert via. */
976 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
977 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
978 if ((can_extend_p (to_mode
, intermediate
, unsignedp
)
980 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
981 != CODE_FOR_nothing
))
983 convert_move (to
, convert_to_mode (intermediate
, from
,
984 unsignedp
), unsignedp
);
988 /* No suitable intermediate mode. */
993 /* Support special truncate insns for certain modes. */
995 if (from_mode
== DImode
&& to_mode
== SImode
)
997 #ifdef HAVE_truncdisi2
1000 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1004 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1008 if (from_mode
== DImode
&& to_mode
== HImode
)
1010 #ifdef HAVE_truncdihi2
1011 if (HAVE_truncdihi2
)
1013 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1017 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1021 if (from_mode
== DImode
&& to_mode
== QImode
)
1023 #ifdef HAVE_truncdiqi2
1024 if (HAVE_truncdiqi2
)
1026 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1030 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1034 if (from_mode
== SImode
&& to_mode
== HImode
)
1036 #ifdef HAVE_truncsihi2
1037 if (HAVE_truncsihi2
)
1039 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1043 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1047 if (from_mode
== SImode
&& to_mode
== QImode
)
1049 #ifdef HAVE_truncsiqi2
1050 if (HAVE_truncsiqi2
)
1052 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1056 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1060 if (from_mode
== HImode
&& to_mode
== QImode
)
1062 #ifdef HAVE_trunchiqi2
1063 if (HAVE_trunchiqi2
)
1065 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1069 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1073 /* Handle truncation of volatile memrefs, and so on;
1074 the things that couldn't be truncated directly,
1075 and for which there was no special instruction. */
1076 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1078 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1079 emit_move_insn (to
, temp
);
1083 /* Mode combination is not recognized. */
1087 /* Return an rtx for a value that would result
1088 from converting X to mode MODE.
1089 Both X and MODE may be floating, or both integer.
1090 UNSIGNEDP is nonzero if X is an unsigned value.
1091 This can be done by referring to a part of X in place
1092 or by copying to a new temporary with conversion.
1094 This function *must not* call protect_from_queue
1095 except when putting X into an insn (in which case convert_move does it). */
1098 convert_to_mode (mode
, x
, unsignedp
)
1099 enum machine_mode mode
;
1105 /* If FROM is a SUBREG that indicates that we have already done at least
1106 the required extension, strip it. */
1108 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1109 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1110 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1111 x
= gen_lowpart (mode
, x
);
1113 if (mode
== GET_MODE (x
))
1116 /* There is one case that we must handle specially: If we are converting
1117 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1118 we are to interpret the constant as unsigned, gen_lowpart will do
1119 the wrong if the constant appears negative. What we want to do is
1120 make the high-order word of the constant zero, not all ones. */
1122 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1123 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1124 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1125 return immed_double_const (INTVAL (x
), (HOST_WIDE_INT
) 0, mode
);
1127 /* We can do this with a gen_lowpart if both desired and current modes
1128 are integer, and this is either a constant integer, a register, or a
1129 non-volatile MEM. Except for the constant case, we must be narrowing
1132 if (GET_CODE (x
) == CONST_INT
1133 || (GET_MODE_CLASS (mode
) == MODE_INT
1134 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
1135 && (GET_CODE (x
) == CONST_DOUBLE
1136 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (GET_MODE (x
))
1137 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
))
1138 && direct_load
[(int) mode
]
1139 || GET_CODE (x
) == REG
)))))
1140 return gen_lowpart (mode
, x
);
1142 temp
= gen_reg_rtx (mode
);
1143 convert_move (temp
, x
, unsignedp
);
1147 /* Generate several move instructions to copy LEN bytes
1148 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1149 The caller must pass FROM and TO
1150 through protect_from_queue before calling.
1151 ALIGN (in bytes) is maximum alignment we can assume. */
1154 move_by_pieces (to
, from
, len
, align
)
1158 struct move_by_pieces data
;
1159 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1160 int max_size
= MOVE_MAX
+ 1;
1163 data
.to_addr
= to_addr
;
1164 data
.from_addr
= from_addr
;
1168 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1169 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1171 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1172 || GET_CODE (from_addr
) == POST_INC
1173 || GET_CODE (from_addr
) == POST_DEC
);
1175 data
.explicit_inc_from
= 0;
1176 data
.explicit_inc_to
= 0;
1178 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1179 if (data
.reverse
) data
.offset
= len
;
1182 /* If copying requires more than two move insns,
1183 copy addresses to registers (to make displacements shorter)
1184 and use post-increment if available. */
1185 if (!(data
.autinc_from
&& data
.autinc_to
)
1186 && move_by_pieces_ninsns (len
, align
) > 2)
1188 #ifdef HAVE_PRE_DECREMENT
1189 if (data
.reverse
&& ! data
.autinc_from
)
1191 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1192 data
.autinc_from
= 1;
1193 data
.explicit_inc_from
= -1;
1196 #ifdef HAVE_POST_INCREMENT
1197 if (! data
.autinc_from
)
1199 data
.from_addr
= copy_addr_to_reg (from_addr
);
1200 data
.autinc_from
= 1;
1201 data
.explicit_inc_from
= 1;
1204 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1205 data
.from_addr
= copy_addr_to_reg (from_addr
);
1206 #ifdef HAVE_PRE_DECREMENT
1207 if (data
.reverse
&& ! data
.autinc_to
)
1209 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1211 data
.explicit_inc_to
= -1;
1214 #ifdef HAVE_POST_INCREMENT
1215 if (! data
.reverse
&& ! data
.autinc_to
)
1217 data
.to_addr
= copy_addr_to_reg (to_addr
);
1219 data
.explicit_inc_to
= 1;
1222 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1223 data
.to_addr
= copy_addr_to_reg (to_addr
);
1226 if (! (STRICT_ALIGNMENT
|| SLOW_UNALIGNED_ACCESS
)
1227 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1230 /* First move what we can in the largest integer mode, then go to
1231 successively smaller modes. */
1233 while (max_size
> 1)
1235 enum machine_mode mode
= VOIDmode
, tmode
;
1236 enum insn_code icode
;
1238 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1239 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1240 if (GET_MODE_SIZE (tmode
) < max_size
)
1243 if (mode
== VOIDmode
)
1246 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1247 if (icode
!= CODE_FOR_nothing
1248 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1249 GET_MODE_SIZE (mode
)))
1250 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1252 max_size
= GET_MODE_SIZE (mode
);
1255 /* The code above should have handled everything. */
1260 /* Return number of insns required to move L bytes by pieces.
1261 ALIGN (in bytes) is maximum alignment we can assume. */
1264 move_by_pieces_ninsns (l
, align
)
1268 register int n_insns
= 0;
1269 int max_size
= MOVE_MAX
+ 1;
1271 if (! (STRICT_ALIGNMENT
|| SLOW_UNALIGNED_ACCESS
)
1272 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1275 while (max_size
> 1)
1277 enum machine_mode mode
= VOIDmode
, tmode
;
1278 enum insn_code icode
;
1280 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1281 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1282 if (GET_MODE_SIZE (tmode
) < max_size
)
1285 if (mode
== VOIDmode
)
1288 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1289 if (icode
!= CODE_FOR_nothing
1290 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1291 GET_MODE_SIZE (mode
)))
1292 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1294 max_size
= GET_MODE_SIZE (mode
);
1300 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1301 with move instructions for mode MODE. GENFUN is the gen_... function
1302 to make a move insn for that mode. DATA has all the other info. */
1305 move_by_pieces_1 (genfun
, mode
, data
)
1307 enum machine_mode mode
;
1308 struct move_by_pieces
*data
;
1310 register int size
= GET_MODE_SIZE (mode
);
1311 register rtx to1
, from1
;
1313 while (data
->len
>= size
)
1315 if (data
->reverse
) data
->offset
-= size
;
1317 to1
= (data
->autinc_to
1318 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1319 : change_address (data
->to
, mode
,
1320 plus_constant (data
->to_addr
, data
->offset
)));
1323 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1324 : change_address (data
->from
, mode
,
1325 plus_constant (data
->from_addr
, data
->offset
)));
1327 #ifdef HAVE_PRE_DECREMENT
1328 if (data
->explicit_inc_to
< 0)
1329 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1330 if (data
->explicit_inc_from
< 0)
1331 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1334 emit_insn ((*genfun
) (to1
, from1
));
1335 #ifdef HAVE_POST_INCREMENT
1336 if (data
->explicit_inc_to
> 0)
1337 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1338 if (data
->explicit_inc_from
> 0)
1339 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1342 if (! data
->reverse
) data
->offset
+= size
;
1348 /* Emit code to move a block Y to a block X.
1349 This may be done with string-move instructions,
1350 with multiple scalar move instructions, or with a library call.
1352 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1354 SIZE is an rtx that says how long they are.
1355 ALIGN is the maximum alignment we can assume they have,
1356 measured in bytes. */
1359 emit_block_move (x
, y
, size
, align
)
1364 if (GET_MODE (x
) != BLKmode
)
1367 if (GET_MODE (y
) != BLKmode
)
1370 x
= protect_from_queue (x
, 1);
1371 y
= protect_from_queue (y
, 0);
1372 size
= protect_from_queue (size
, 0);
1374 if (GET_CODE (x
) != MEM
)
1376 if (GET_CODE (y
) != MEM
)
1381 if (GET_CODE (size
) == CONST_INT
1382 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1383 move_by_pieces (x
, y
, INTVAL (size
), align
);
1386 /* Try the most limited insn first, because there's no point
1387 including more than one in the machine description unless
1388 the more limited one has some advantage. */
1390 rtx opalign
= GEN_INT (align
);
1391 enum machine_mode mode
;
1393 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1394 mode
= GET_MODE_WIDER_MODE (mode
))
1396 enum insn_code code
= movstr_optab
[(int) mode
];
1398 if (code
!= CODE_FOR_nothing
1399 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1400 here because if SIZE is less than the mode mask, as it is
1401 returned by the macro, it will definitely be less than the
1402 actual mode mask. */
1403 && (unsigned) INTVAL (size
) <= GET_MODE_MASK (mode
)
1404 && (insn_operand_predicate
[(int) code
][0] == 0
1405 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1406 && (insn_operand_predicate
[(int) code
][1] == 0
1407 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1408 && (insn_operand_predicate
[(int) code
][3] == 0
1409 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1413 rtx last
= get_last_insn ();
1416 op2
= convert_to_mode (mode
, size
, 1);
1417 if (insn_operand_predicate
[(int) code
][2] != 0
1418 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1419 op2
= copy_to_mode_reg (mode
, op2
);
1421 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1428 delete_insns_since (last
);
1432 #ifdef TARGET_MEM_FUNCTIONS
1433 emit_library_call (memcpy_libfunc
, 0,
1434 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1436 convert_to_mode (Pmode
, size
, 1), Pmode
);
1438 emit_library_call (bcopy_libfunc
, 0,
1439 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1441 convert_to_mode (Pmode
, size
, 1), Pmode
);
1446 /* Copy all or part of a value X into registers starting at REGNO.
1447 The number of registers to be filled is NREGS. */
1450 move_block_to_reg (regno
, x
, nregs
, mode
)
1454 enum machine_mode mode
;
1459 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1460 x
= validize_mem (force_const_mem (mode
, x
));
1462 /* See if the machine can do this with a load multiple insn. */
1463 #ifdef HAVE_load_multiple
1464 last
= get_last_insn ();
1465 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1473 delete_insns_since (last
);
1476 for (i
= 0; i
< nregs
; i
++)
1477 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1478 operand_subword_force (x
, i
, mode
));
1481 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1482 The number of registers to be filled is NREGS. */
1485 move_block_from_reg (regno
, x
, nregs
)
1493 /* See if the machine can do this with a store multiple insn. */
1494 #ifdef HAVE_store_multiple
1495 last
= get_last_insn ();
1496 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1504 delete_insns_since (last
);
1507 for (i
= 0; i
< nregs
; i
++)
1509 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1514 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1518 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1521 use_regs (regno
, nregs
)
1527 for (i
= 0; i
< nregs
; i
++)
1528 emit_insn (gen_rtx (USE
, VOIDmode
, gen_rtx (REG
, word_mode
, regno
+ i
)));
1531 /* Mark the instructions since PREV as a libcall block.
1532 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1541 /* Find the instructions to mark */
1543 insn_first
= NEXT_INSN (prev
);
1545 insn_first
= get_insns ();
1547 insn_last
= get_last_insn ();
1549 REG_NOTES (insn_last
) = gen_rtx (INSN_LIST
, REG_RETVAL
, insn_first
,
1550 REG_NOTES (insn_last
));
1552 REG_NOTES (insn_first
) = gen_rtx (INSN_LIST
, REG_LIBCALL
, insn_last
,
1553 REG_NOTES (insn_first
));
1556 /* Write zeros through the storage of OBJECT.
1557 If OBJECT has BLKmode, SIZE is its length in bytes. */
1560 clear_storage (object
, size
)
1564 if (GET_MODE (object
) == BLKmode
)
1566 #ifdef TARGET_MEM_FUNCTIONS
1567 emit_library_call (memset_libfunc
, 0,
1569 XEXP (object
, 0), Pmode
, const0_rtx
, Pmode
,
1570 GEN_INT (size
), Pmode
);
1572 emit_library_call (bzero_libfunc
, 0,
1574 XEXP (object
, 0), Pmode
,
1575 GEN_INT (size
), Pmode
);
1579 emit_move_insn (object
, const0_rtx
);
1582 /* Generate code to copy Y into X.
1583 Both Y and X must have the same mode, except that
1584 Y can be a constant with VOIDmode.
1585 This mode cannot be BLKmode; use emit_block_move for that.
1587 Return the last instruction emitted. */
1590 emit_move_insn (x
, y
)
1593 enum machine_mode mode
= GET_MODE (x
);
1594 enum machine_mode submode
;
1595 enum mode_class
class = GET_MODE_CLASS (mode
);
1598 x
= protect_from_queue (x
, 1);
1599 y
= protect_from_queue (y
, 0);
1601 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
1604 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
1605 y
= force_const_mem (mode
, y
);
1607 /* If X or Y are memory references, verify that their addresses are valid
1609 if (GET_CODE (x
) == MEM
1610 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
1611 && ! push_operand (x
, GET_MODE (x
)))
1613 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
1614 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
1616 if (GET_CODE (y
) == MEM
1617 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
1619 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
1620 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
1622 if (mode
== BLKmode
)
1625 return emit_move_insn_1 (x
, y
);
1628 /* Low level part of emit_move_insn.
1629 Called just like emit_move_insn, but assumes X and Y
1630 are basically valid. */
1633 emit_move_insn_1 (x
, y
)
1636 enum machine_mode mode
= GET_MODE (x
);
1637 enum machine_mode submode
;
1638 enum mode_class
class = GET_MODE_CLASS (mode
);
1641 if (class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
1642 submode
= mode_for_size (GET_MODE_UNIT_SIZE (mode
) * BITS_PER_UNIT
,
1643 (class == MODE_COMPLEX_INT
1644 ? MODE_INT
: MODE_FLOAT
),
1647 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
1649 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
1651 /* Expand complex moves by moving real part and imag part, if possible. */
1652 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
1653 && submode
!= BLKmode
1654 && (mov_optab
->handlers
[(int) submode
].insn_code
1655 != CODE_FOR_nothing
))
1657 /* Don't split destination if it is a stack push. */
1658 int stack
= push_operand (x
, GET_MODE (x
));
1659 rtx prev
= get_last_insn ();
1661 /* Tell flow that the whole of the destination is being set. */
1662 if (GET_CODE (x
) == REG
)
1663 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
1665 /* If this is a stack, push the highpart first, so it
1666 will be in the argument order.
1668 In that case, change_address is used only to convert
1669 the mode, not to change the address. */
1670 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
1671 ((stack
? change_address (x
, submode
, (rtx
) 0)
1672 : gen_highpart (submode
, x
)),
1673 gen_highpart (submode
, y
)));
1674 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
1675 ((stack
? change_address (x
, submode
, (rtx
) 0)
1676 : gen_lowpart (submode
, x
)),
1677 gen_lowpart (submode
, y
)));
1681 return get_last_insn ();
1684 /* This will handle any multi-word mode that lacks a move_insn pattern.
1685 However, you will get better code if you define such patterns,
1686 even if they must turn into multiple assembler instructions. */
1687 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1690 rtx prev_insn
= get_last_insn ();
1693 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
1696 rtx xpart
= operand_subword (x
, i
, 1, mode
);
1697 rtx ypart
= operand_subword (y
, i
, 1, mode
);
1699 /* If we can't get a part of Y, put Y into memory if it is a
1700 constant. Otherwise, force it into a register. If we still
1701 can't get a part of Y, abort. */
1702 if (ypart
== 0 && CONSTANT_P (y
))
1704 y
= force_const_mem (mode
, y
);
1705 ypart
= operand_subword (y
, i
, 1, mode
);
1707 else if (ypart
== 0)
1708 ypart
= operand_subword_force (y
, i
, mode
);
1710 if (xpart
== 0 || ypart
== 0)
1713 last_insn
= emit_move_insn (xpart
, ypart
);
1715 /* Mark these insns as a libcall block. */
1716 group_insns (prev_insn
);
1724 /* Pushing data onto the stack. */
1726 /* Push a block of length SIZE (perhaps variable)
1727 and return an rtx to address the beginning of the block.
1728 Note that it is not possible for the value returned to be a QUEUED.
1729 The value may be virtual_outgoing_args_rtx.
1731 EXTRA is the number of bytes of padding to push in addition to SIZE.
1732 BELOW nonzero means this padding comes at low addresses;
1733 otherwise, the padding comes at high addresses. */
1736 push_block (size
, extra
, below
)
1741 if (CONSTANT_P (size
))
1742 anti_adjust_stack (plus_constant (size
, extra
));
1743 else if (GET_CODE (size
) == REG
&& extra
== 0)
1744 anti_adjust_stack (size
);
1747 rtx temp
= copy_to_mode_reg (Pmode
, size
);
1749 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
1750 temp
, 0, OPTAB_LIB_WIDEN
);
1751 anti_adjust_stack (temp
);
1754 #ifdef STACK_GROWS_DOWNWARD
1755 temp
= virtual_outgoing_args_rtx
;
1756 if (extra
!= 0 && below
)
1757 temp
= plus_constant (temp
, extra
);
1759 if (GET_CODE (size
) == CONST_INT
)
1760 temp
= plus_constant (virtual_outgoing_args_rtx
,
1761 - INTVAL (size
) - (below
? 0 : extra
));
1762 else if (extra
!= 0 && !below
)
1763 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
1764 negate_rtx (Pmode
, plus_constant (size
, extra
)));
1766 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
1767 negate_rtx (Pmode
, size
));
1770 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
1776 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
1779 /* Generate code to push X onto the stack, assuming it has mode MODE and
1781 MODE is redundant except when X is a CONST_INT (since they don't
1783 SIZE is an rtx for the size of data to be copied (in bytes),
1784 needed only if X is BLKmode.
1786 ALIGN (in bytes) is maximum alignment we can assume.
1788 If PARTIAL and REG are both nonzero, then copy that many of the first
1789 words of X into registers starting with REG, and push the rest of X.
1790 The amount of space pushed is decreased by PARTIAL words,
1791 rounded *down* to a multiple of PARM_BOUNDARY.
1792 REG must be a hard register in this case.
1793 If REG is zero but PARTIAL is not, take any all others actions for an
1794 argument partially in registers, but do not actually load any
1797 EXTRA is the amount in bytes of extra space to leave next to this arg.
1798 This is ignored if an argument block has already been allocated.
1800 On a machine that lacks real push insns, ARGS_ADDR is the address of
1801 the bottom of the argument block for this call. We use indexing off there
1802 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1803 argument block has not been preallocated.
1805 ARGS_SO_FAR is the size of args previously pushed for this call. */
1808 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
1809 args_addr
, args_so_far
)
1811 enum machine_mode mode
;
1822 enum direction stack_direction
1823 #ifdef STACK_GROWS_DOWNWARD
1829 /* Decide where to pad the argument: `downward' for below,
1830 `upward' for above, or `none' for don't pad it.
1831 Default is below for small data on big-endian machines; else above. */
1832 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
1834 /* Invert direction if stack is post-update. */
1835 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
1836 if (where_pad
!= none
)
1837 where_pad
= (where_pad
== downward
? upward
: downward
);
1839 xinner
= x
= protect_from_queue (x
, 0);
1841 if (mode
== BLKmode
)
1843 /* Copy a block into the stack, entirely or partially. */
1846 int used
= partial
* UNITS_PER_WORD
;
1847 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
1855 /* USED is now the # of bytes we need not copy to the stack
1856 because registers will take care of them. */
1859 xinner
= change_address (xinner
, BLKmode
,
1860 plus_constant (XEXP (xinner
, 0), used
));
1862 /* If the partial register-part of the arg counts in its stack size,
1863 skip the part of stack space corresponding to the registers.
1864 Otherwise, start copying to the beginning of the stack space,
1865 by setting SKIP to 0. */
1866 #ifndef REG_PARM_STACK_SPACE
1872 #ifdef PUSH_ROUNDING
1873 /* Do it with several push insns if that doesn't take lots of insns
1874 and if there is no difficulty with push insns that skip bytes
1875 on the stack for alignment purposes. */
1877 && GET_CODE (size
) == CONST_INT
1879 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
1881 /* Here we avoid the case of a structure whose weak alignment
1882 forces many pushes of a small amount of data,
1883 and such small pushes do rounding that causes trouble. */
1884 && ((! STRICT_ALIGNMENT
&& ! SLOW_UNALIGNED_ACCESS
)
1885 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
1886 || PUSH_ROUNDING (align
) == align
)
1887 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
1889 /* Push padding now if padding above and stack grows down,
1890 or if padding below and stack grows up.
1891 But if space already allocated, this has already been done. */
1892 if (extra
&& args_addr
== 0
1893 && where_pad
!= none
&& where_pad
!= stack_direction
)
1894 anti_adjust_stack (GEN_INT (extra
));
1896 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
1897 INTVAL (size
) - used
, align
);
1900 #endif /* PUSH_ROUNDING */
1902 /* Otherwise make space on the stack and copy the data
1903 to the address of that space. */
1905 /* Deduct words put into registers from the size we must copy. */
1908 if (GET_CODE (size
) == CONST_INT
)
1909 size
= GEN_INT (INTVAL (size
) - used
);
1911 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
1912 GEN_INT (used
), NULL_RTX
, 0,
1916 /* Get the address of the stack space.
1917 In this case, we do not deal with EXTRA separately.
1918 A single stack adjust will do. */
1921 temp
= push_block (size
, extra
, where_pad
== downward
);
1924 else if (GET_CODE (args_so_far
) == CONST_INT
)
1925 temp
= memory_address (BLKmode
,
1926 plus_constant (args_addr
,
1927 skip
+ INTVAL (args_so_far
)));
1929 temp
= memory_address (BLKmode
,
1930 plus_constant (gen_rtx (PLUS
, Pmode
,
1931 args_addr
, args_so_far
),
1934 /* TEMP is the address of the block. Copy the data there. */
1935 if (GET_CODE (size
) == CONST_INT
1936 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
1939 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
1940 INTVAL (size
), align
);
1943 /* Try the most limited insn first, because there's no point
1944 including more than one in the machine description unless
1945 the more limited one has some advantage. */
1946 #ifdef HAVE_movstrqi
1948 && GET_CODE (size
) == CONST_INT
1949 && ((unsigned) INTVAL (size
)
1950 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
1952 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
1953 xinner
, size
, GEN_INT (align
));
1961 #ifdef HAVE_movstrhi
1963 && GET_CODE (size
) == CONST_INT
1964 && ((unsigned) INTVAL (size
)
1965 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
1967 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
1968 xinner
, size
, GEN_INT (align
));
1976 #ifdef HAVE_movstrsi
1979 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
1980 xinner
, size
, GEN_INT (align
));
1988 #ifdef HAVE_movstrdi
1991 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
1992 xinner
, size
, GEN_INT (align
));
2001 #ifndef ACCUMULATE_OUTGOING_ARGS
2002 /* If the source is referenced relative to the stack pointer,
2003 copy it to another register to stabilize it. We do not need
2004 to do this if we know that we won't be changing sp. */
2006 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2007 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2008 temp
= copy_to_reg (temp
);
2011 /* Make inhibit_defer_pop nonzero around the library call
2012 to force it to pop the bcopy-arguments right away. */
2014 #ifdef TARGET_MEM_FUNCTIONS
2015 emit_library_call (memcpy_libfunc
, 0,
2016 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2019 emit_library_call (bcopy_libfunc
, 0,
2020 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2026 else if (partial
> 0)
2028 /* Scalar partly in registers. */
2030 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2033 /* # words of start of argument
2034 that we must make space for but need not store. */
2035 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2036 int args_offset
= INTVAL (args_so_far
);
2039 /* Push padding now if padding above and stack grows down,
2040 or if padding below and stack grows up.
2041 But if space already allocated, this has already been done. */
2042 if (extra
&& args_addr
== 0
2043 && where_pad
!= none
&& where_pad
!= stack_direction
)
2044 anti_adjust_stack (GEN_INT (extra
));
2046 /* If we make space by pushing it, we might as well push
2047 the real data. Otherwise, we can leave OFFSET nonzero
2048 and leave the space uninitialized. */
2052 /* Now NOT_STACK gets the number of words that we don't need to
2053 allocate on the stack. */
2054 not_stack
= partial
- offset
;
2056 /* If the partial register-part of the arg counts in its stack size,
2057 skip the part of stack space corresponding to the registers.
2058 Otherwise, start copying to the beginning of the stack space,
2059 by setting SKIP to 0. */
2060 #ifndef REG_PARM_STACK_SPACE
2066 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2067 x
= validize_mem (force_const_mem (mode
, x
));
2069 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2070 SUBREGs of such registers are not allowed. */
2071 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2072 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2073 x
= copy_to_reg (x
);
2075 /* Loop over all the words allocated on the stack for this arg. */
2076 /* We can do it by words, because any scalar bigger than a word
2077 has a size a multiple of a word. */
2078 #ifndef PUSH_ARGS_REVERSED
2079 for (i
= not_stack
; i
< size
; i
++)
2081 for (i
= size
- 1; i
>= not_stack
; i
--)
2083 if (i
>= not_stack
+ offset
)
2084 emit_push_insn (operand_subword_force (x
, i
, mode
),
2085 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2087 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2088 * UNITS_PER_WORD
)));
2094 /* Push padding now if padding above and stack grows down,
2095 or if padding below and stack grows up.
2096 But if space already allocated, this has already been done. */
2097 if (extra
&& args_addr
== 0
2098 && where_pad
!= none
&& where_pad
!= stack_direction
)
2099 anti_adjust_stack (GEN_INT (extra
));
2101 #ifdef PUSH_ROUNDING
2103 addr
= gen_push_operand ();
2106 if (GET_CODE (args_so_far
) == CONST_INT
)
2108 = memory_address (mode
,
2109 plus_constant (args_addr
, INTVAL (args_so_far
)));
2111 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2114 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2118 /* If part should go in registers, copy that part
2119 into the appropriate registers. Do this now, at the end,
2120 since mem-to-mem copies above may do function calls. */
2121 if (partial
> 0 && reg
!= 0)
2122 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2124 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2125 anti_adjust_stack (GEN_INT (extra
));
2128 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2129 (emitting the queue unless NO_QUEUE is nonzero),
2130 for a value of mode OUTMODE,
2131 with NARGS different arguments, passed as alternating rtx values
2132 and machine_modes to convert them to.
2133 The rtx values should have been passed through protect_from_queue already.
2135 NO_QUEUE will be true if and only if the library call is a `const' call
2136 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2137 to the variable is_const in expand_call.
2139 NO_QUEUE must be true for const calls, because if it isn't, then
2140 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2141 and will be lost if the libcall sequence is optimized away.
2143 NO_QUEUE must be false for non-const calls, because if it isn't, the
2144 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2145 optimized. For instance, the instruction scheduler may incorrectly
2146 move memory references across the non-const call. */
2149 emit_library_call (va_alist
)
2153 /* Total size in bytes of all the stack-parms scanned so far. */
2154 struct args_size args_size
;
2155 /* Size of arguments before any adjustments (such as rounding). */
2156 struct args_size original_args_size
;
2157 register int argnum
;
2158 enum machine_mode outmode
;
2165 CUMULATIVE_ARGS args_so_far
;
2166 struct arg
{ rtx value
; enum machine_mode mode
; rtx reg
; int partial
;
2167 struct args_size offset
; struct args_size size
; };
2169 int old_inhibit_defer_pop
= inhibit_defer_pop
;
2174 orgfun
= fun
= va_arg (p
, rtx
);
2175 no_queue
= va_arg (p
, int);
2176 outmode
= va_arg (p
, enum machine_mode
);
2177 nargs
= va_arg (p
, int);
2179 /* Copy all the libcall-arguments out of the varargs data
2180 and into a vector ARGVEC.
2182 Compute how to pass each argument. We only support a very small subset
2183 of the full argument passing conventions to limit complexity here since
2184 library functions shouldn't have many args. */
2186 argvec
= (struct arg
*) alloca (nargs
* sizeof (struct arg
));
2188 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
);
2190 args_size
.constant
= 0;
2193 for (count
= 0; count
< nargs
; count
++)
2195 rtx val
= va_arg (p
, rtx
);
2196 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
2198 /* We cannot convert the arg value to the mode the library wants here;
2199 must do it earlier where we know the signedness of the arg. */
2201 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
2204 /* On some machines, there's no way to pass a float to a library fcn.
2205 Pass it as a double instead. */
2206 #ifdef LIBGCC_NEEDS_DOUBLE
2207 if (LIBGCC_NEEDS_DOUBLE
&& mode
== SFmode
)
2208 val
= convert_to_mode (DFmode
, val
, 0), mode
= DFmode
;
2211 /* There's no need to call protect_from_queue, because
2212 either emit_move_insn or emit_push_insn will do that. */
2214 /* Make sure it is a reasonable operand for a move or push insn. */
2215 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
2216 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
2217 val
= force_operand (val
, NULL_RTX
);
2219 argvec
[count
].value
= val
;
2220 argvec
[count
].mode
= mode
;
2222 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2223 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
2227 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
2228 if (argvec
[count
].reg
&& GET_CODE (argvec
[count
].reg
) == EXPR_LIST
)
2230 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2231 argvec
[count
].partial
2232 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
2234 argvec
[count
].partial
= 0;
2237 locate_and_pad_parm (mode
, NULL_TREE
,
2238 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
2239 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
2240 &argvec
[count
].size
);
2242 if (argvec
[count
].size
.var
)
2245 #ifndef REG_PARM_STACK_SPACE
2246 if (argvec
[count
].partial
)
2247 argvec
[count
].size
.constant
-= argvec
[count
].partial
* UNITS_PER_WORD
;
2250 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
2251 #ifdef REG_PARM_STACK_SPACE
2255 args_size
.constant
+= argvec
[count
].size
.constant
;
2257 #ifdef ACCUMULATE_OUTGOING_ARGS
2258 /* If this arg is actually passed on the stack, it might be
2259 clobbering something we already put there (this library call might
2260 be inside the evaluation of an argument to a function whose call
2261 requires the stack). This will only occur when the library call
2262 has sufficient args to run out of argument registers. Abort in
2263 this case; if this ever occurs, code must be added to save and
2264 restore the arg slot. */
2266 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0)
2270 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
)0, 1);
2274 /* If this machine requires an external definition for library
2275 functions, write one out. */
2276 assemble_external_libcall (fun
);
2278 original_args_size
= args_size
;
2279 #ifdef STACK_BOUNDARY
2280 args_size
.constant
= (((args_size
.constant
+ (STACK_BYTES
- 1))
2281 / STACK_BYTES
) * STACK_BYTES
);
2284 #ifdef REG_PARM_STACK_SPACE
2285 args_size
.constant
= MAX (args_size
.constant
,
2286 REG_PARM_STACK_SPACE (NULL_TREE
));
2287 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2288 args_size
.constant
-= REG_PARM_STACK_SPACE (NULL_TREE
);
2292 #ifdef ACCUMULATE_OUTGOING_ARGS
2293 if (args_size
.constant
> current_function_outgoing_args_size
)
2294 current_function_outgoing_args_size
= args_size
.constant
;
2295 args_size
.constant
= 0;
2298 #ifndef PUSH_ROUNDING
2299 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
2302 #ifdef PUSH_ARGS_REVERSED
2303 #ifdef STACK_BOUNDARY
2304 /* If we push args individually in reverse order, perform stack alignment
2305 before the first push (the last arg). */
2307 anti_adjust_stack (GEN_INT (args_size
.constant
2308 - original_args_size
.constant
));
2312 #ifdef PUSH_ARGS_REVERSED
2320 /* Push the args that need to be pushed. */
2322 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
2324 register enum machine_mode mode
= argvec
[argnum
].mode
;
2325 register rtx val
= argvec
[argnum
].value
;
2326 rtx reg
= argvec
[argnum
].reg
;
2327 int partial
= argvec
[argnum
].partial
;
2329 if (! (reg
!= 0 && partial
== 0))
2330 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, 0, partial
, reg
, 0,
2331 argblock
, GEN_INT (argvec
[count
].offset
.constant
));
2335 #ifndef PUSH_ARGS_REVERSED
2336 #ifdef STACK_BOUNDARY
2337 /* If we pushed args in forward order, perform stack alignment
2338 after pushing the last arg. */
2340 anti_adjust_stack (GEN_INT (args_size
.constant
2341 - original_args_size
.constant
));
2345 #ifdef PUSH_ARGS_REVERSED
2351 /* Now load any reg parms into their regs. */
2353 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
2355 register enum machine_mode mode
= argvec
[argnum
].mode
;
2356 register rtx val
= argvec
[argnum
].value
;
2357 rtx reg
= argvec
[argnum
].reg
;
2358 int partial
= argvec
[argnum
].partial
;
2360 if (reg
!= 0 && partial
== 0)
2361 emit_move_insn (reg
, val
);
2365 /* For version 1.37, try deleting this entirely. */
2369 /* Any regs containing parms remain in use through the call. */
2371 for (count
= 0; count
< nargs
; count
++)
2372 if (argvec
[count
].reg
!= 0)
2373 emit_insn (gen_rtx (USE
, VOIDmode
, argvec
[count
].reg
));
2375 use_insns
= get_insns ();
2378 fun
= prepare_call_address (fun
, NULL_TREE
, &use_insns
);
2380 /* Don't allow popping to be deferred, since then
2381 cse'ing of library calls could delete a call and leave the pop. */
2384 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2385 will set inhibit_defer_pop to that value. */
2387 emit_call_1 (fun
, get_identifier (XSTR (orgfun
, 0)), args_size
.constant
, 0,
2388 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
2389 outmode
!= VOIDmode
? hard_libcall_value (outmode
) : NULL_RTX
,
2390 old_inhibit_defer_pop
+ 1, use_insns
, no_queue
);
2392 /* Now restore inhibit_defer_pop to its actual original value. */
2396 /* Like emit_library_call except that an extra argument, VALUE,
2397 comes second and says where to store the result.
2398 (If VALUE is zero, the result comes in the function value register.) */
2401 emit_library_call_value (va_alist
)
2405 /* Total size in bytes of all the stack-parms scanned so far. */
2406 struct args_size args_size
;
2407 /* Size of arguments before any adjustments (such as rounding). */
2408 struct args_size original_args_size
;
2409 register int argnum
;
2410 enum machine_mode outmode
;
2417 CUMULATIVE_ARGS args_so_far
;
2418 struct arg
{ rtx value
; enum machine_mode mode
; rtx reg
; int partial
;
2419 struct args_size offset
; struct args_size size
; };
2421 int old_inhibit_defer_pop
= inhibit_defer_pop
;
2428 orgfun
= fun
= va_arg (p
, rtx
);
2429 value
= va_arg (p
, rtx
);
2430 no_queue
= va_arg (p
, int);
2431 outmode
= va_arg (p
, enum machine_mode
);
2432 nargs
= va_arg (p
, int);
2434 /* If this kind of value comes back in memory,
2435 decide where in memory it should come back. */
2436 if (RETURN_IN_MEMORY (type_for_mode (outmode
, 0)))
2438 if (GET_CODE (value
) == MEM
)
2441 mem_value
= assign_stack_temp (outmode
, GET_MODE_SIZE (outmode
), 0);
2444 /* ??? Unfinished: must pass the memory address as an argument. */
2446 /* Copy all the libcall-arguments out of the varargs data
2447 and into a vector ARGVEC.
2449 Compute how to pass each argument. We only support a very small subset
2450 of the full argument passing conventions to limit complexity here since
2451 library functions shouldn't have many args. */
2453 argvec
= (struct arg
*) alloca ((nargs
+ 1) * sizeof (struct arg
));
2455 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
);
2457 args_size
.constant
= 0;
2462 /* If there's a structure value address to be passed,
2463 either pass it in the special place, or pass it as an extra argument. */
2466 rtx addr
= XEXP (mem_value
, 0);
2468 if (! struct_value_rtx
)
2472 /* Make sure it is a reasonable operand for a move or push insn. */
2473 if (GET_CODE (addr
) != REG
&& GET_CODE (addr
) != MEM
2474 && ! (CONSTANT_P (addr
) && LEGITIMATE_CONSTANT_P (addr
)))
2475 addr
= force_operand (addr
, NULL_RTX
);
2477 argvec
[count
].value
= addr
;
2478 argvec
[count
].mode
= outmode
;
2479 argvec
[count
].partial
= 0;
2481 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, outmode
, NULL_TREE
, 1);
2482 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2483 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, outmode
, NULL_TREE
, 1))
2487 locate_and_pad_parm (outmode
, NULL_TREE
,
2488 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
2489 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
2490 &argvec
[count
].size
);
2493 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
2494 #ifdef REG_PARM_STACK_SPACE
2498 args_size
.constant
+= argvec
[count
].size
.constant
;
2500 FUNCTION_ARG_ADVANCE (args_so_far
, outmode
, (tree
)0, 1);
2504 for (; count
< nargs
; count
++)
2506 rtx val
= va_arg (p
, rtx
);
2507 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
2509 /* We cannot convert the arg value to the mode the library wants here;
2510 must do it earlier where we know the signedness of the arg. */
2512 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
2515 /* On some machines, there's no way to pass a float to a library fcn.
2516 Pass it as a double instead. */
2517 #ifdef LIBGCC_NEEDS_DOUBLE
2518 if (LIBGCC_NEEDS_DOUBLE
&& mode
== SFmode
)
2519 val
= convert_to_mode (DFmode
, val
, 0), mode
= DFmode
;
2522 /* There's no need to call protect_from_queue, because
2523 either emit_move_insn or emit_push_insn will do that. */
2525 /* Make sure it is a reasonable operand for a move or push insn. */
2526 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
2527 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
2528 val
= force_operand (val
, NULL_RTX
);
2530 argvec
[count
].value
= val
;
2531 argvec
[count
].mode
= mode
;
2533 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2534 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
2538 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
2539 if (argvec
[count
].reg
&& GET_CODE (argvec
[count
].reg
) == EXPR_LIST
)
2541 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2542 argvec
[count
].partial
2543 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
2545 argvec
[count
].partial
= 0;
2548 locate_and_pad_parm (mode
, NULL_TREE
,
2549 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
2550 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
2551 &argvec
[count
].size
);
2553 if (argvec
[count
].size
.var
)
2556 #ifndef REG_PARM_STACK_SPACE
2557 if (argvec
[count
].partial
)
2558 argvec
[count
].size
.constant
-= argvec
[count
].partial
* UNITS_PER_WORD
;
2561 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
2562 #ifdef REG_PARM_STACK_SPACE
2566 args_size
.constant
+= argvec
[count
].size
.constant
;
2568 #ifdef ACCUMULATE_OUTGOING_ARGS
2569 /* If this arg is actually passed on the stack, it might be
2570 clobbering something we already put there (this library call might
2571 be inside the evaluation of an argument to a function whose call
2572 requires the stack). This will only occur when the library call
2573 has sufficient args to run out of argument registers. Abort in
2574 this case; if this ever occurs, code must be added to save and
2575 restore the arg slot. */
2577 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0)
2581 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
)0, 1);
2585 /* If this machine requires an external definition for library
2586 functions, write one out. */
2587 assemble_external_libcall (fun
);
2589 original_args_size
= args_size
;
2590 #ifdef STACK_BOUNDARY
2591 args_size
.constant
= (((args_size
.constant
+ (STACK_BYTES
- 1))
2592 / STACK_BYTES
) * STACK_BYTES
);
2595 #ifdef REG_PARM_STACK_SPACE
2596 args_size
.constant
= MAX (args_size
.constant
,
2597 REG_PARM_STACK_SPACE (NULL_TREE
));
2598 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2599 args_size
.constant
-= REG_PARM_STACK_SPACE (NULL_TREE
);
2603 #ifdef ACCUMULATE_OUTGOING_ARGS
2604 if (args_size
.constant
> current_function_outgoing_args_size
)
2605 current_function_outgoing_args_size
= args_size
.constant
;
2606 args_size
.constant
= 0;
2609 #ifndef PUSH_ROUNDING
2610 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
2613 #ifdef PUSH_ARGS_REVERSED
2614 #ifdef STACK_BOUNDARY
2615 /* If we push args individually in reverse order, perform stack alignment
2616 before the first push (the last arg). */
2618 anti_adjust_stack (GEN_INT (args_size
.constant
2619 - original_args_size
.constant
));
2623 #ifdef PUSH_ARGS_REVERSED
2631 /* Push the args that need to be pushed. */
2633 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
2635 register enum machine_mode mode
= argvec
[argnum
].mode
;
2636 register rtx val
= argvec
[argnum
].value
;
2637 rtx reg
= argvec
[argnum
].reg
;
2638 int partial
= argvec
[argnum
].partial
;
2640 if (! (reg
!= 0 && partial
== 0))
2641 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, 0, partial
, reg
, 0,
2642 argblock
, GEN_INT (argvec
[count
].offset
.constant
));
2646 #ifndef PUSH_ARGS_REVERSED
2647 #ifdef STACK_BOUNDARY
2648 /* If we pushed args in forward order, perform stack alignment
2649 after pushing the last arg. */
2651 anti_adjust_stack (GEN_INT (args_size
.constant
2652 - original_args_size
.constant
));
2656 #ifdef PUSH_ARGS_REVERSED
2662 /* Now load any reg parms into their regs. */
2664 if (mem_value
!= 0 && struct_value_rtx
!= 0)
2665 emit_move_insn (struct_value_rtx
, XEXP (mem_value
, 0));
2667 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
2669 register enum machine_mode mode
= argvec
[argnum
].mode
;
2670 register rtx val
= argvec
[argnum
].value
;
2671 rtx reg
= argvec
[argnum
].reg
;
2672 int partial
= argvec
[argnum
].partial
;
2674 if (reg
!= 0 && partial
== 0)
2675 emit_move_insn (reg
, val
);
2680 /* For version 1.37, try deleting this entirely. */
2685 /* Any regs containing parms remain in use through the call. */
2687 for (count
= 0; count
< nargs
; count
++)
2688 if (argvec
[count
].reg
!= 0)
2689 emit_insn (gen_rtx (USE
, VOIDmode
, argvec
[count
].reg
));
2691 use_insns
= get_insns ();
2694 fun
= prepare_call_address (fun
, NULL_TREE
, &use_insns
);
2696 /* Don't allow popping to be deferred, since then
2697 cse'ing of library calls could delete a call and leave the pop. */
2700 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2701 will set inhibit_defer_pop to that value. */
2703 emit_call_1 (fun
, get_identifier (XSTR (orgfun
, 0)), args_size
.constant
, 0,
2704 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
2705 outmode
!= VOIDmode
? hard_libcall_value (outmode
) : NULL_RTX
,
2706 old_inhibit_defer_pop
+ 1, use_insns
, no_queue
);
2708 /* Now restore inhibit_defer_pop to its actual original value. */
2711 /* Copy the value to the right place. */
2712 if (outmode
!= VOIDmode
)
2717 value
= hard_libcall_value (outmode
);
2718 if (value
!= mem_value
)
2719 emit_move_insn (value
, mem_value
);
2721 else if (value
!= 0)
2722 emit_move_insn (value
, hard_libcall_value (outmode
));
2726 /* Expand an assignment that stores the value of FROM into TO.
2727 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2728 (This may contain a QUEUED rtx.)
2729 Otherwise, the returned value is not meaningful.
2731 SUGGEST_REG is no longer actually used.
2732 It used to mean, copy the value through a register
2733 and return that register, if that is possible.
2734 But now we do this if WANT_VALUE.
2736 If the value stored is a constant, we return the constant. */
2739 expand_assignment (to
, from
, want_value
, suggest_reg
)
2744 register rtx to_rtx
= 0;
2747 /* Don't crash if the lhs of the assignment was erroneous. */
2749 if (TREE_CODE (to
) == ERROR_MARK
)
2750 return expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2752 /* Assignment of a structure component needs special treatment
2753 if the structure component's rtx is not simply a MEM.
2754 Assignment of an array element at a constant index
2755 has the same problem. */
2757 if (TREE_CODE (to
) == COMPONENT_REF
2758 || TREE_CODE (to
) == BIT_FIELD_REF
2759 || (TREE_CODE (to
) == ARRAY_REF
2760 && TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2761 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
))
2763 enum machine_mode mode1
;
2769 tree tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
,
2770 &mode1
, &unsignedp
, &volatilep
);
2772 /* If we are going to use store_bit_field and extract_bit_field,
2773 make sure to_rtx will be safe for multiple use. */
2775 if (mode1
== VOIDmode
&& want_value
)
2776 tem
= stabilize_reference (tem
);
2778 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2781 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2783 if (GET_CODE (to_rtx
) != MEM
)
2785 to_rtx
= change_address (to_rtx
, VOIDmode
,
2786 gen_rtx (PLUS
, Pmode
, XEXP (to_rtx
, 0),
2787 force_reg (Pmode
, offset_rtx
)));
2791 if (GET_CODE (to_rtx
) == MEM
)
2792 MEM_VOLATILE_P (to_rtx
) = 1;
2793 #if 0 /* This was turned off because, when a field is volatile
2794 in an object which is not volatile, the object may be in a register,
2795 and then we would abort over here. */
2801 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2803 /* Spurious cast makes HPUX compiler happy. */
2804 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2807 /* Required alignment of containing datum. */
2808 TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
,
2809 int_size_in_bytes (TREE_TYPE (tem
)));
2810 preserve_temp_slots (result
);
2816 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2817 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2820 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2822 /* Don't move directly into a return register. */
2823 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
2825 rtx temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
2826 emit_move_insn (to_rtx
, temp
);
2827 preserve_temp_slots (to_rtx
);
2832 /* In case we are returning the contents of an object which overlaps
2833 the place the value is being stored, use a safe function when copying
2834 a value through a pointer into a structure value return block. */
2835 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2836 && current_function_returns_struct
2837 && !current_function_returns_pcc_struct
)
2839 rtx from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2840 rtx size
= expr_size (from
);
2842 #ifdef TARGET_MEM_FUNCTIONS
2843 emit_library_call (memcpy_libfunc
, 0,
2844 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2845 XEXP (from_rtx
, 0), Pmode
,
2848 emit_library_call (bcopy_libfunc
, 0,
2849 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2850 XEXP (to_rtx
, 0), Pmode
,
2854 preserve_temp_slots (to_rtx
);
2859 /* Compute FROM and store the value in the rtx we got. */
2861 result
= store_expr (from
, to_rtx
, want_value
);
2862 preserve_temp_slots (result
);
2867 /* Generate code for computing expression EXP,
2868 and storing the value into TARGET.
2869 Returns TARGET or an equivalent value.
2870 TARGET may contain a QUEUED rtx.
2872 If SUGGEST_REG is nonzero, copy the value through a register
2873 and return that register, if that is possible.
2875 If the value stored is a constant, we return the constant. */
2878 store_expr (exp
, target
, suggest_reg
)
2880 register rtx target
;
2884 int dont_return_target
= 0;
2886 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2888 /* Perform first part of compound expression, then assign from second
2890 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
2892 return store_expr (TREE_OPERAND (exp
, 1), target
, suggest_reg
);
2894 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
2896 /* For conditional expression, get safe form of the target. Then
2897 test the condition, doing the appropriate assignment on either
2898 side. This avoids the creation of unnecessary temporaries.
2899 For non-BLKmode, it is more efficient not to do this. */
2901 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
2904 target
= protect_from_queue (target
, 1);
2907 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
2908 store_expr (TREE_OPERAND (exp
, 1), target
, suggest_reg
);
2910 emit_jump_insn (gen_jump (lab2
));
2913 store_expr (TREE_OPERAND (exp
, 2), target
, suggest_reg
);
2919 else if (suggest_reg
&& GET_CODE (target
) == MEM
2920 && GET_MODE (target
) != BLKmode
)
2921 /* If target is in memory and caller wants value in a register instead,
2922 arrange that. Pass TARGET as target for expand_expr so that,
2923 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2924 We know expand_expr will not use the target in that case. */
2926 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
2927 GET_MODE (target
), 0);
2928 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
2929 temp
= copy_to_reg (temp
);
2930 dont_return_target
= 1;
2932 else if (queued_subexp_p (target
))
2933 /* If target contains a postincrement, it is not safe
2934 to use as the returned value. It would access the wrong
2935 place by the time the queued increment gets output.
2936 So copy the value through a temporary and use that temp
2939 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
2941 /* Expand EXP into a new pseudo. */
2942 temp
= gen_reg_rtx (GET_MODE (target
));
2943 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
2946 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
2947 dont_return_target
= 1;
2949 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
2950 /* If this is an scalar in a register that is stored in a wider mode
2951 than the declared mode, compute the result into its declared mode
2952 and then convert to the wider mode. Our value is the computed
2955 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
2956 convert_move (SUBREG_REG (target
), temp
,
2957 SUBREG_PROMOTED_UNSIGNED_P (target
));
2962 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
2963 /* DO return TARGET if it's a specified hardware register.
2964 expand_return relies on this. */
2965 if (!(target
&& GET_CODE (target
) == REG
2966 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
2967 && CONSTANT_P (temp
))
2968 dont_return_target
= 1;
2971 /* If value was not generated in the target, store it there.
2972 Convert the value to TARGET's type first if nec. */
2974 if (temp
!= target
&& TREE_CODE (exp
) != ERROR_MARK
)
2976 target
= protect_from_queue (target
, 1);
2977 if (GET_MODE (temp
) != GET_MODE (target
)
2978 && GET_MODE (temp
) != VOIDmode
)
2980 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
2981 if (dont_return_target
)
2983 /* In this case, we will return TEMP,
2984 so make sure it has the proper mode.
2985 But don't forget to store the value into TARGET. */
2986 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
2987 emit_move_insn (target
, temp
);
2990 convert_move (target
, temp
, unsignedp
);
2993 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
2995 /* Handle copying a string constant into an array.
2996 The string constant may be shorter than the array.
2997 So copy just the string's actual length, and clear the rest. */
3000 /* Get the size of the data type of the string,
3001 which is actually the size of the target. */
3002 size
= expr_size (exp
);
3003 if (GET_CODE (size
) == CONST_INT
3004 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3005 emit_block_move (target
, temp
, size
,
3006 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3009 /* Compute the size of the data to copy from the string. */
3011 = fold (build (MIN_EXPR
, sizetype
,
3012 size_binop (CEIL_DIV_EXPR
,
3013 TYPE_SIZE (TREE_TYPE (exp
)),
3014 size_int (BITS_PER_UNIT
)),
3016 build_int_2 (TREE_STRING_LENGTH (exp
), 0))));
3017 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3021 /* Copy that much. */
3022 emit_block_move (target
, temp
, copy_size_rtx
,
3023 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3025 /* Figure out how much is left in TARGET
3026 that we have to clear. */
3027 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3029 temp
= plus_constant (XEXP (target
, 0),
3030 TREE_STRING_LENGTH (exp
));
3031 size
= plus_constant (size
,
3032 - TREE_STRING_LENGTH (exp
));
3036 enum machine_mode size_mode
= Pmode
;
3038 temp
= force_reg (Pmode
, XEXP (target
, 0));
3039 temp
= expand_binop (size_mode
, add_optab
, temp
,
3040 copy_size_rtx
, NULL_RTX
, 0,
3043 size
= expand_binop (size_mode
, sub_optab
, size
,
3044 copy_size_rtx
, NULL_RTX
, 0,
3047 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3048 GET_MODE (size
), 0, 0);
3049 label
= gen_label_rtx ();
3050 emit_jump_insn (gen_blt (label
));
3053 if (size
!= const0_rtx
)
3055 #ifdef TARGET_MEM_FUNCTIONS
3056 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3057 temp
, Pmode
, const0_rtx
, Pmode
, size
, Pmode
);
3059 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3060 temp
, Pmode
, size
, Pmode
);
3067 else if (GET_MODE (temp
) == BLKmode
)
3068 emit_block_move (target
, temp
, expr_size (exp
),
3069 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3071 emit_move_insn (target
, temp
);
3073 if (dont_return_target
)
3078 /* Store the value of constructor EXP into the rtx TARGET.
3079 TARGET is either a REG or a MEM. */
3082 store_constructor (exp
, target
)
3086 tree type
= TREE_TYPE (exp
);
3088 /* We know our target cannot conflict, since safe_from_p has been called. */
3090 /* Don't try copying piece by piece into a hard register
3091 since that is vulnerable to being clobbered by EXP.
3092 Instead, construct in a pseudo register and then copy it all. */
3093 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3095 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3096 store_constructor (exp
, temp
);
3097 emit_move_insn (target
, temp
);
3102 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
)
3106 /* Inform later passes that the whole union value is dead. */
3107 if (TREE_CODE (type
) == UNION_TYPE
)
3108 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3110 /* If we are building a static constructor into a register,
3111 set the initial value as zero so we can fold the value into
3113 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
))
3114 emit_move_insn (target
, const0_rtx
);
3116 /* If the constructor has fewer fields than the structure,
3117 clear the whole structure first. */
3118 else if (list_length (CONSTRUCTOR_ELTS (exp
))
3119 != list_length (TYPE_FIELDS (type
)))
3120 clear_storage (target
, int_size_in_bytes (type
));
3122 /* Inform later passes that the old value is dead. */
3123 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3125 /* Store each element of the constructor into
3126 the corresponding field of TARGET. */
3128 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3130 register tree field
= TREE_PURPOSE (elt
);
3131 register enum machine_mode mode
;
3136 /* Just ignore missing fields.
3137 We cleared the whole structure, above,
3138 if any fields are missing. */
3142 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3143 unsignedp
= TREE_UNSIGNED (field
);
3144 mode
= DECL_MODE (field
);
3145 if (DECL_BIT_FIELD (field
))
3148 if (TREE_CODE (DECL_FIELD_BITPOS (field
)) != INTEGER_CST
)
3149 /* ??? This case remains to be written. */
3152 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
3154 store_field (target
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
),
3155 /* The alignment of TARGET is
3156 at least what its type requires. */
3158 TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3159 int_size_in_bytes (type
));
3162 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3166 tree domain
= TYPE_DOMAIN (type
);
3167 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3168 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3169 tree elttype
= TREE_TYPE (type
);
3171 /* If the constructor has fewer fields than the structure,
3172 clear the whole structure first. Similarly if this this is
3173 static constructor of a non-BLKmode object. */
3175 if (list_length (CONSTRUCTOR_ELTS (exp
)) < maxelt
- minelt
+ 1
3176 || (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3177 clear_storage (target
, maxelt
- minelt
+ 1);
3179 /* Inform later passes that the old value is dead. */
3180 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3182 /* Store each element of the constructor into
3183 the corresponding element of TARGET, determined
3184 by counting the elements. */
3185 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3187 elt
= TREE_CHAIN (elt
), i
++)
3189 register enum machine_mode mode
;
3194 mode
= TYPE_MODE (elttype
);
3195 bitsize
= GET_MODE_BITSIZE (mode
);
3196 unsignedp
= TREE_UNSIGNED (elttype
);
3198 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3200 store_field (target
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
),
3201 /* The alignment of TARGET is
3202 at least what its type requires. */
3204 TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3205 int_size_in_bytes (type
));
3213 /* Store the value of EXP (an expression tree)
3214 into a subfield of TARGET which has mode MODE and occupies
3215 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3216 If MODE is VOIDmode, it means that we are storing into a bit-field.
3218 If VALUE_MODE is VOIDmode, return nothing in particular.
3219 UNSIGNEDP is not used in this case.
3221 Otherwise, return an rtx for the value stored. This rtx
3222 has mode VALUE_MODE if that is convenient to do.
3223 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3225 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3226 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3229 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
3230 unsignedp
, align
, total_size
)
3232 int bitsize
, bitpos
;
3233 enum machine_mode mode
;
3235 enum machine_mode value_mode
;
3240 HOST_WIDE_INT width_mask
= 0;
3242 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
3243 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
3245 /* If we are storing into an unaligned field of an aligned union that is
3246 in a register, we may have the mode of TARGET being an integer mode but
3247 MODE == BLKmode. In that case, get an aligned object whose size and
3248 alignment are the same as TARGET and store TARGET into it (we can avoid
3249 the store if the field being stored is the entire width of TARGET). Then
3250 call ourselves recursively to store the field into a BLKmode version of
3251 that object. Finally, load from the object into TARGET. This is not
3252 very efficient in general, but should only be slightly more expensive
3253 than the otherwise-required unaligned accesses. Perhaps this can be
3254 cleaned up later. */
3257 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
3259 rtx object
= assign_stack_temp (GET_MODE (target
),
3260 GET_MODE_SIZE (GET_MODE (target
)), 0);
3261 rtx blk_object
= copy_rtx (object
);
3263 PUT_MODE (blk_object
, BLKmode
);
3265 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
3266 emit_move_insn (object
, target
);
3268 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
3271 emit_move_insn (target
, object
);
3276 /* If the structure is in a register or if the component
3277 is a bit field, we cannot use addressing to access it.
3278 Use bit-field techniques or SUBREG to store in it. */
3280 if (mode
== VOIDmode
3281 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
3282 || GET_CODE (target
) == REG
3283 || GET_CODE (target
) == SUBREG
)
3285 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3286 /* Store the value in the bitfield. */
3287 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
3288 if (value_mode
!= VOIDmode
)
3290 /* The caller wants an rtx for the value. */
3291 /* If possible, avoid refetching from the bitfield itself. */
3293 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
3296 enum machine_mode tmode
;
3299 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
3300 tmode
= GET_MODE (temp
);
3301 if (tmode
== VOIDmode
)
3303 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
3304 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
3305 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
3307 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
3308 NULL_RTX
, value_mode
, 0, align
,
3315 rtx addr
= XEXP (target
, 0);
3318 /* If a value is wanted, it must be the lhs;
3319 so make the address stable for multiple use. */
3321 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
3322 && ! CONSTANT_ADDRESS_P (addr
)
3323 /* A frame-pointer reference is already stable. */
3324 && ! (GET_CODE (addr
) == PLUS
3325 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
3326 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
3327 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
3328 addr
= copy_to_reg (addr
);
3330 /* Now build a reference to just the desired component. */
3332 to_rtx
= change_address (target
, mode
,
3333 plus_constant (addr
, (bitpos
/ BITS_PER_UNIT
)));
3334 MEM_IN_STRUCT_P (to_rtx
) = 1;
3336 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
3340 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3341 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3342 ARRAY_REFs and find the ultimate containing object, which we return.
3344 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3345 bit position, and *PUNSIGNEDP to the signedness of the field.
3346 If the position of the field is variable, we store a tree
3347 giving the variable offset (in units) in *POFFSET.
3348 This offset is in addition to the bit position.
3349 If the position is not variable, we store 0 in *POFFSET.
3351 If any of the extraction expressions is volatile,
3352 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3354 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3355 is a mode that can be used to access the field. In that case, *PBITSIZE
3358 If the field describes a variable-sized object, *PMODE is set to
3359 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3360 this case, but the address of the object can be found. */
3363 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
3364 punsignedp
, pvolatilep
)
3369 enum machine_mode
*pmode
;
3374 enum machine_mode mode
= VOIDmode
;
3375 tree offset
= integer_zero_node
;
3377 if (TREE_CODE (exp
) == COMPONENT_REF
)
3379 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
3380 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
3381 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
3382 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
3384 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
3386 size_tree
= TREE_OPERAND (exp
, 1);
3387 *punsignedp
= TREE_UNSIGNED (exp
);
3391 mode
= TYPE_MODE (TREE_TYPE (exp
));
3392 *pbitsize
= GET_MODE_BITSIZE (mode
);
3393 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3398 if (TREE_CODE (size_tree
) != INTEGER_CST
)
3399 mode
= BLKmode
, *pbitsize
= -1;
3401 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
3404 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3405 and find the ultimate containing object. */
3411 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
3413 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
3414 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
3415 : TREE_OPERAND (exp
, 2));
3417 if (TREE_CODE (pos
) == PLUS_EXPR
)
3420 if (TREE_CODE (TREE_OPERAND (pos
, 0)) == INTEGER_CST
)
3422 constant
= TREE_OPERAND (pos
, 0);
3423 var
= TREE_OPERAND (pos
, 1);
3425 else if (TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3427 constant
= TREE_OPERAND (pos
, 1);
3428 var
= TREE_OPERAND (pos
, 0);
3433 *pbitpos
+= TREE_INT_CST_LOW (constant
);
3434 offset
= size_binop (PLUS_EXPR
, offset
,
3435 size_binop (FLOOR_DIV_EXPR
, var
,
3436 size_int (BITS_PER_UNIT
)));
3438 else if (TREE_CODE (pos
) == INTEGER_CST
)
3439 *pbitpos
+= TREE_INT_CST_LOW (pos
);
3442 /* Assume here that the offset is a multiple of a unit.
3443 If not, there should be an explicitly added constant. */
3444 offset
= size_binop (PLUS_EXPR
, offset
,
3445 size_binop (FLOOR_DIV_EXPR
, pos
,
3446 size_int (BITS_PER_UNIT
)));
3450 else if (TREE_CODE (exp
) == ARRAY_REF
)
3452 /* This code is based on the code in case ARRAY_REF in expand_expr
3453 below. We assume here that the size of an array element is
3454 always an integral multiple of BITS_PER_UNIT. */
3456 tree index
= TREE_OPERAND (exp
, 1);
3457 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
3459 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
3460 tree index_type
= TREE_TYPE (index
);
3462 if (! integer_zerop (low_bound
))
3463 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
3465 if (TYPE_PRECISION (index_type
) != POINTER_SIZE
)
3467 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
3468 index_type
= TREE_TYPE (index
);
3471 index
= fold (build (MULT_EXPR
, index_type
, index
,
3472 TYPE_SIZE (TREE_TYPE (exp
))));
3474 if (TREE_CODE (index
) == INTEGER_CST
3475 && TREE_INT_CST_HIGH (index
) == 0)
3476 *pbitpos
+= TREE_INT_CST_LOW (index
);
3478 offset
= size_binop (PLUS_EXPR
, offset
,
3479 size_binop (FLOOR_DIV_EXPR
, index
,
3480 size_int (BITS_PER_UNIT
)));
3482 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
3483 && ! ((TREE_CODE (exp
) == NOP_EXPR
3484 || TREE_CODE (exp
) == CONVERT_EXPR
)
3485 && (TYPE_MODE (TREE_TYPE (exp
))
3486 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
3489 /* If any reference in the chain is volatile, the effect is volatile. */
3490 if (TREE_THIS_VOLATILE (exp
))
3492 exp
= TREE_OPERAND (exp
, 0);
3495 /* If this was a bit-field, see if there is a mode that allows direct
3496 access in case EXP is in memory. */
3497 if (mode
== VOIDmode
&& *pbitpos
% *pbitsize
== 0)
3499 mode
= mode_for_size (*pbitsize
, MODE_INT
, 0);
3500 if (mode
== BLKmode
)
3504 if (integer_zerop (offset
))
3510 /* We aren't finished fixing the callers to really handle nonzero offset. */
3518 /* Given an rtx VALUE that may contain additions and multiplications,
3519 return an equivalent value that just refers to a register or memory.
3520 This is done by generating instructions to perform the arithmetic
3521 and returning a pseudo-register containing the value.
3523 The returned value may be a REG, SUBREG, MEM or constant. */
3526 force_operand (value
, target
)
3529 register optab binoptab
= 0;
3530 /* Use a temporary to force order of execution of calls to
3534 /* Use subtarget as the target for operand 0 of a binary operation. */
3535 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
3537 if (GET_CODE (value
) == PLUS
)
3538 binoptab
= add_optab
;
3539 else if (GET_CODE (value
) == MINUS
)
3540 binoptab
= sub_optab
;
3541 else if (GET_CODE (value
) == MULT
)
3543 op2
= XEXP (value
, 1);
3544 if (!CONSTANT_P (op2
)
3545 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
3547 tmp
= force_operand (XEXP (value
, 0), subtarget
);
3548 return expand_mult (GET_MODE (value
), tmp
,
3549 force_operand (op2
, NULL_RTX
),
3555 op2
= XEXP (value
, 1);
3556 if (!CONSTANT_P (op2
)
3557 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
3559 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
3561 binoptab
= add_optab
;
3562 op2
= negate_rtx (GET_MODE (value
), op2
);
3565 /* Check for an addition with OP2 a constant integer and our first
3566 operand a PLUS of a virtual register and something else. In that
3567 case, we want to emit the sum of the virtual register and the
3568 constant first and then add the other value. This allows virtual
3569 register instantiation to simply modify the constant rather than
3570 creating another one around this addition. */
3571 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
3572 && GET_CODE (XEXP (value
, 0)) == PLUS
3573 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
3574 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3575 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
3577 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
3578 XEXP (XEXP (value
, 0), 0), op2
,
3579 subtarget
, 0, OPTAB_LIB_WIDEN
);
3580 return expand_binop (GET_MODE (value
), binoptab
, temp
,
3581 force_operand (XEXP (XEXP (value
, 0), 1), 0),
3582 target
, 0, OPTAB_LIB_WIDEN
);
3585 tmp
= force_operand (XEXP (value
, 0), subtarget
);
3586 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
3587 force_operand (op2
, NULL_RTX
),
3588 target
, 0, OPTAB_LIB_WIDEN
);
3589 /* We give UNSIGNEDP = 0 to expand_binop
3590 because the only operations we are expanding here are signed ones. */
3595 /* Subroutine of expand_expr:
3596 save the non-copied parts (LIST) of an expr (LHS), and return a list
3597 which can restore these values to their previous values,
3598 should something modify their storage. */
3601 save_noncopied_parts (lhs
, list
)
3608 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
3609 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
3610 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
3613 tree part
= TREE_VALUE (tail
);
3614 tree part_type
= TREE_TYPE (part
);
3615 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
3616 rtx target
= assign_stack_temp (TYPE_MODE (part_type
),
3617 int_size_in_bytes (part_type
), 0);
3618 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
3619 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
3620 parts
= tree_cons (to_be_saved
,
3621 build (RTL_EXPR
, part_type
, NULL_TREE
,
3624 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
3629 /* Subroutine of expand_expr:
3630 record the non-copied parts (LIST) of an expr (LHS), and return a list
3631 which specifies the initial values of these parts. */
3634 init_noncopied_parts (lhs
, list
)
3641 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
3642 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
3643 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
3646 tree part
= TREE_VALUE (tail
);
3647 tree part_type
= TREE_TYPE (part
);
3648 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
3649 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
3654 /* Subroutine of expand_expr: return nonzero iff there is no way that
3655 EXP can reference X, which is being modified. */
3658 safe_from_p (x
, exp
)
3668 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3669 find the underlying pseudo. */
3670 if (GET_CODE (x
) == SUBREG
)
3673 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3677 /* If X is a location in the outgoing argument area, it is always safe. */
3678 if (GET_CODE (x
) == MEM
3679 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
3680 || (GET_CODE (XEXP (x
, 0)) == PLUS
3681 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
3684 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
3687 exp_rtl
= DECL_RTL (exp
);
3694 if (TREE_CODE (exp
) == TREE_LIST
)
3695 return ((TREE_VALUE (exp
) == 0
3696 || safe_from_p (x
, TREE_VALUE (exp
)))
3697 && (TREE_CHAIN (exp
) == 0
3698 || safe_from_p (x
, TREE_CHAIN (exp
))));
3703 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
3707 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
3708 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
3712 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3713 the expression. If it is set, we conflict iff we are that rtx or
3714 both are in memory. Otherwise, we check all operands of the
3715 expression recursively. */
3717 switch (TREE_CODE (exp
))
3720 return staticp (TREE_OPERAND (exp
, 0));
3723 if (GET_CODE (x
) == MEM
)
3728 exp_rtl
= CALL_EXPR_RTL (exp
);
3731 /* Assume that the call will clobber all hard registers and
3733 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3734 || GET_CODE (x
) == MEM
)
3741 exp_rtl
= RTL_EXPR_RTL (exp
);
3743 /* We don't know what this can modify. */
3748 case WITH_CLEANUP_EXPR
:
3749 exp_rtl
= RTL_EXPR_RTL (exp
);
3753 exp_rtl
= SAVE_EXPR_RTL (exp
);
3757 /* The only operand we look at is operand 1. The rest aren't
3758 part of the expression. */
3759 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
3761 case METHOD_CALL_EXPR
:
3762 /* This takes a rtx argument, but shouldn't appear here. */
3766 /* If we have an rtx, we do not need to scan our operands. */
3770 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
3771 for (i
= 0; i
< nops
; i
++)
3772 if (TREE_OPERAND (exp
, i
) != 0
3773 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
3777 /* If we have an rtl, find any enclosed object. Then see if we conflict
3781 if (GET_CODE (exp_rtl
) == SUBREG
)
3783 exp_rtl
= SUBREG_REG (exp_rtl
);
3784 if (GET_CODE (exp_rtl
) == REG
3785 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
3789 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3790 are memory and EXP is not readonly. */
3791 return ! (rtx_equal_p (x
, exp_rtl
)
3792 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
3793 && ! TREE_READONLY (exp
)));
3796 /* If we reach here, it is safe. */
3800 /* Subroutine of expand_expr: return nonzero iff EXP is an
3801 expression whose type is statically determinable. */
3807 if (TREE_CODE (exp
) == PARM_DECL
3808 || TREE_CODE (exp
) == VAR_DECL
3809 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
3810 || TREE_CODE (exp
) == COMPONENT_REF
3811 || TREE_CODE (exp
) == ARRAY_REF
)
3816 /* expand_expr: generate code for computing expression EXP.
3817 An rtx for the computed value is returned. The value is never null.
3818 In the case of a void EXP, const0_rtx is returned.
3820 The value may be stored in TARGET if TARGET is nonzero.
3821 TARGET is just a suggestion; callers must assume that
3822 the rtx returned may not be the same as TARGET.
3824 If TARGET is CONST0_RTX, it means that the value will be ignored.
3826 If TMODE is not VOIDmode, it suggests generating the
3827 result in mode TMODE. But this is done only when convenient.
3828 Otherwise, TMODE is ignored and the value generated in its natural mode.
3829 TMODE is just a suggestion; callers must assume that
3830 the rtx returned may not have mode TMODE.
3832 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3833 with a constant address even if that address is not normally legitimate.
3834 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3836 If MODIFIER is EXPAND_SUM then when EXP is an addition
3837 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3838 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3839 products as above, or REG or MEM, or constant.
3840 Ordinarily in such cases we would output mul or add instructions
3841 and then return a pseudo reg containing the sum.
3843 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3844 it also marks a label as absolutely required (it can't be dead).
3845 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3846 This is used for outputting expressions used in initializers. */
3849 expand_expr (exp
, target
, tmode
, modifier
)
3852 enum machine_mode tmode
;
3853 enum expand_modifier modifier
;
3855 register rtx op0
, op1
, temp
;
3856 tree type
= TREE_TYPE (exp
);
3857 int unsignedp
= TREE_UNSIGNED (type
);
3858 register enum machine_mode mode
= TYPE_MODE (type
);
3859 register enum tree_code code
= TREE_CODE (exp
);
3861 /* Use subtarget as the target for operand 0 of a binary operation. */
3862 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
3863 rtx original_target
= target
;
3864 int ignore
= target
== const0_rtx
;
3867 /* Don't use hard regs as subtargets, because the combiner
3868 can only handle pseudo regs. */
3869 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
3871 /* Avoid subtargets inside loops,
3872 since they hide some invariant expressions. */
3873 if (preserve_subexpressions_p ())
3876 if (ignore
) target
= 0, original_target
= 0;
3878 /* If will do cse, generate all results into pseudo registers
3879 since 1) that allows cse to find more things
3880 and 2) otherwise cse could produce an insn the machine
3883 if (! cse_not_expected
&& mode
!= BLKmode
&& target
3884 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3887 /* Ensure we reference a volatile object even if value is ignored. */
3888 if (ignore
&& TREE_THIS_VOLATILE (exp
)
3889 && TREE_CODE (exp
) != FUNCTION_DECL
3890 && mode
!= VOIDmode
&& mode
!= BLKmode
)
3892 target
= gen_reg_rtx (mode
);
3893 temp
= expand_expr (exp
, target
, VOIDmode
, modifier
);
3895 emit_move_insn (target
, temp
);
3903 tree function
= decl_function_context (exp
);
3904 /* Handle using a label in a containing function. */
3905 if (function
!= current_function_decl
&& function
!= 0)
3907 struct function
*p
= find_function_data (function
);
3908 /* Allocate in the memory associated with the function
3909 that the label is in. */
3910 push_obstacks (p
->function_obstack
,
3911 p
->function_maybepermanent_obstack
);
3913 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
3914 label_rtx (exp
), p
->forced_labels
);
3917 else if (modifier
== EXPAND_INITIALIZER
)
3918 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
3919 label_rtx (exp
), forced_labels
);
3920 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
3921 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
3922 if (function
!= current_function_decl
&& function
!= 0)
3923 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
3928 if (DECL_RTL (exp
) == 0)
3930 error_with_decl (exp
, "prior parameter's size depends on `%s'");
3931 return CONST0_RTX (mode
);
3937 if (DECL_RTL (exp
) == 0)
3939 /* Ensure variable marked as used
3940 even if it doesn't go through a parser. */
3941 TREE_USED (exp
) = 1;
3942 /* Handle variables inherited from containing functions. */
3943 context
= decl_function_context (exp
);
3945 /* We treat inline_function_decl as an alias for the current function
3946 because that is the inline function whose vars, types, etc.
3947 are being merged into the current function.
3948 See expand_inline_function. */
3949 if (context
!= 0 && context
!= current_function_decl
3950 && context
!= inline_function_decl
3951 /* If var is static, we don't need a static chain to access it. */
3952 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
3953 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
3957 /* Mark as non-local and addressable. */
3958 DECL_NONLOCAL (exp
) = 1;
3959 mark_addressable (exp
);
3960 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
3962 addr
= XEXP (DECL_RTL (exp
), 0);
3963 if (GET_CODE (addr
) == MEM
)
3964 addr
= gen_rtx (MEM
, Pmode
, fix_lexical_addr (XEXP (addr
, 0), exp
));
3966 addr
= fix_lexical_addr (addr
, exp
);
3967 return change_address (DECL_RTL (exp
), mode
, addr
);
3970 /* This is the case of an array whose size is to be determined
3971 from its initializer, while the initializer is still being parsed.
3973 if (GET_CODE (DECL_RTL (exp
)) == MEM
3974 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
3975 return change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
3976 XEXP (DECL_RTL (exp
), 0));
3977 if (GET_CODE (DECL_RTL (exp
)) == MEM
3978 && modifier
!= EXPAND_CONST_ADDRESS
3979 && modifier
!= EXPAND_SUM
3980 && modifier
!= EXPAND_INITIALIZER
)
3982 /* DECL_RTL probably contains a constant address.
3983 On RISC machines where a constant address isn't valid,
3984 make some insns to get that address into a register. */
3985 if (!memory_address_p (DECL_MODE (exp
), XEXP (DECL_RTL (exp
), 0))
3987 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp
), 0))))
3988 return change_address (DECL_RTL (exp
), VOIDmode
,
3989 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
3992 /* If the mode of DECL_RTL does not match that of the decl, it
3993 must be a promoted value. We return a SUBREG of the wanted mode,
3994 but mark it so that we know that it was already extended. */
3996 if (GET_CODE (DECL_RTL (exp
)) == REG
3997 && GET_MODE (DECL_RTL (exp
)) != mode
)
3999 enum machine_mode decl_mode
= DECL_MODE (exp
);
4001 /* Get the signedness used for this variable. Ensure we get the
4002 same mode we got when the variable was declared. */
4004 PROMOTE_MODE (decl_mode
, unsignedp
, type
);
4006 if (decl_mode
!= GET_MODE (DECL_RTL (exp
)))
4009 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
4010 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4011 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4015 return DECL_RTL (exp
);
4018 return immed_double_const (TREE_INT_CST_LOW (exp
),
4019 TREE_INT_CST_HIGH (exp
),
4023 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
4026 /* If optimized, generate immediate CONST_DOUBLE
4027 which will be turned into memory by reload if necessary.
4029 We used to force a register so that loop.c could see it. But
4030 this does not allow gen_* patterns to perform optimizations with
4031 the constants. It also produces two insns in cases like "x = 1.0;".
4032 On most machines, floating-point constants are not permitted in
4033 many insns, so we'd end up copying it to a register in any case.
4035 Now, we do the copying in expand_binop, if appropriate. */
4036 return immed_real_const (exp
);
4040 if (! TREE_CST_RTL (exp
))
4041 output_constant_def (exp
);
4043 /* TREE_CST_RTL probably contains a constant address.
4044 On RISC machines where a constant address isn't valid,
4045 make some insns to get that address into a register. */
4046 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
4047 && modifier
!= EXPAND_CONST_ADDRESS
4048 && modifier
!= EXPAND_INITIALIZER
4049 && modifier
!= EXPAND_SUM
4050 && !memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0)))
4051 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
4052 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
4053 return TREE_CST_RTL (exp
);
4056 context
= decl_function_context (exp
);
4057 /* We treat inline_function_decl as an alias for the current function
4058 because that is the inline function whose vars, types, etc.
4059 are being merged into the current function.
4060 See expand_inline_function. */
4061 if (context
== current_function_decl
|| context
== inline_function_decl
)
4064 /* If this is non-local, handle it. */
4067 temp
= SAVE_EXPR_RTL (exp
);
4068 if (temp
&& GET_CODE (temp
) == REG
)
4070 put_var_into_stack (exp
);
4071 temp
= SAVE_EXPR_RTL (exp
);
4073 if (temp
== 0 || GET_CODE (temp
) != MEM
)
4075 return change_address (temp
, mode
,
4076 fix_lexical_addr (XEXP (temp
, 0), exp
));
4078 if (SAVE_EXPR_RTL (exp
) == 0)
4080 if (mode
== BLKmode
)
4082 = assign_stack_temp (mode
,
4083 int_size_in_bytes (TREE_TYPE (exp
)), 0);
4086 enum machine_mode var_mode
= mode
;
4088 if (TREE_CODE (type
) == INTEGER_TYPE
4089 || TREE_CODE (type
) == ENUMERAL_TYPE
4090 || TREE_CODE (type
) == BOOLEAN_TYPE
4091 || TREE_CODE (type
) == CHAR_TYPE
4092 || TREE_CODE (type
) == REAL_TYPE
4093 || TREE_CODE (type
) == POINTER_TYPE
4094 || TREE_CODE (type
) == OFFSET_TYPE
)
4096 PROMOTE_MODE (var_mode
, unsignedp
, type
);
4099 temp
= gen_reg_rtx (var_mode
);
4102 SAVE_EXPR_RTL (exp
) = temp
;
4103 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
4104 if (!optimize
&& GET_CODE (temp
) == REG
)
4105 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
4109 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4110 must be a promoted value. We return a SUBREG of the wanted mode,
4111 but mark it so that we know that it was already extended. Note
4112 that `unsignedp' was modified above in this case. */
4114 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
4115 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
4117 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
4118 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4119 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4123 return SAVE_EXPR_RTL (exp
);
4126 /* Exit the current loop if the body-expression is true. */
4128 rtx label
= gen_label_rtx ();
4129 do_jump (TREE_OPERAND (exp
, 0), label
, NULL_RTX
);
4130 expand_exit_loop (NULL_PTR
);
4136 expand_start_loop (1);
4137 expand_expr_stmt (TREE_OPERAND (exp
, 0));
4144 tree vars
= TREE_OPERAND (exp
, 0);
4145 int vars_need_expansion
= 0;
4147 /* Need to open a binding contour here because
4148 if there are any cleanups they most be contained here. */
4149 expand_start_bindings (0);
4151 /* Mark the corresponding BLOCK for output in its proper place. */
4152 if (TREE_OPERAND (exp
, 2) != 0
4153 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
4154 insert_block (TREE_OPERAND (exp
, 2));
4156 /* If VARS have not yet been expanded, expand them now. */
4159 if (DECL_RTL (vars
) == 0)
4161 vars_need_expansion
= 1;
4164 expand_decl_init (vars
);
4165 vars
= TREE_CHAIN (vars
);
4168 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
4170 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
4176 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
4178 emit_insns (RTL_EXPR_SEQUENCE (exp
));
4179 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
4180 return RTL_EXPR_RTL (exp
);
4183 /* All elts simple constants => refer to a constant in memory. But
4184 if this is a non-BLKmode mode, let it store a field at a time
4185 since that should make a CONST_INT or CONST_DOUBLE when we
4187 if (TREE_STATIC (exp
) && (mode
== BLKmode
|| TREE_ADDRESSABLE (exp
)))
4189 rtx constructor
= output_constant_def (exp
);
4190 if (modifier
!= EXPAND_CONST_ADDRESS
4191 && modifier
!= EXPAND_INITIALIZER
4192 && modifier
!= EXPAND_SUM
4193 && !memory_address_p (GET_MODE (constructor
),
4194 XEXP (constructor
, 0)))
4195 constructor
= change_address (constructor
, VOIDmode
,
4196 XEXP (constructor
, 0));
4203 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4204 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
4209 if (target
== 0 || ! safe_from_p (target
, exp
))
4211 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
4212 target
= gen_reg_rtx (mode
);
4215 enum tree_code c
= TREE_CODE (type
);
4217 = assign_stack_temp (mode
, int_size_in_bytes (type
), 0);
4218 if (c
== RECORD_TYPE
|| c
== UNION_TYPE
|| c
== ARRAY_TYPE
)
4219 MEM_IN_STRUCT_P (target
) = 1;
4222 store_constructor (exp
, target
);
4228 tree exp1
= TREE_OPERAND (exp
, 0);
4231 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4232 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4233 This code has the same general effect as simply doing
4234 expand_expr on the save expr, except that the expression PTR
4235 is computed for use as a memory address. This means different
4236 code, suitable for indexing, may be generated. */
4237 if (TREE_CODE (exp1
) == SAVE_EXPR
4238 && SAVE_EXPR_RTL (exp1
) == 0
4239 && TREE_CODE (exp2
= TREE_OPERAND (exp1
, 0)) != ERROR_MARK
4240 && TYPE_MODE (TREE_TYPE (exp1
)) == Pmode
4241 && TYPE_MODE (TREE_TYPE (exp2
)) == Pmode
)
4243 temp
= expand_expr (TREE_OPERAND (exp1
, 0), NULL_RTX
,
4244 VOIDmode
, EXPAND_SUM
);
4245 op0
= memory_address (mode
, temp
);
4246 op0
= copy_all_regs (op0
);
4247 SAVE_EXPR_RTL (exp1
) = op0
;
4251 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4252 op0
= memory_address (mode
, op0
);
4255 temp
= gen_rtx (MEM
, mode
, op0
);
4256 /* If address was computed by addition,
4257 mark this as an element of an aggregate. */
4258 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
4259 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
4260 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
4261 || TREE_CODE (TREE_TYPE (exp
)) == ARRAY_TYPE
4262 || TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
4263 || TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4264 || (TREE_CODE (exp1
) == ADDR_EXPR
4265 && (exp2
= TREE_OPERAND (exp1
, 0))
4266 && (TREE_CODE (TREE_TYPE (exp2
)) == ARRAY_TYPE
4267 || TREE_CODE (TREE_TYPE (exp2
)) == RECORD_TYPE
4268 || TREE_CODE (TREE_TYPE (exp2
)) == UNION_TYPE
)))
4269 MEM_IN_STRUCT_P (temp
) = 1;
4270 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
);
4271 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4272 a location is accessed through a pointer to const does not mean
4273 that the value there can never change. */
4274 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
);
4280 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
4284 tree array
= TREE_OPERAND (exp
, 0);
4285 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
4286 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4287 tree index
= TREE_OPERAND (exp
, 1);
4288 tree index_type
= TREE_TYPE (index
);
4291 /* Optimize the special-case of a zero lower bound. */
4292 if (! integer_zerop (low_bound
))
4293 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4295 if (TREE_CODE (index
) != INTEGER_CST
4296 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4298 /* Nonconstant array index or nonconstant element size.
4299 Generate the tree for *(&array+index) and expand that,
4300 except do it in a language-independent way
4301 and don't complain about non-lvalue arrays.
4302 `mark_addressable' should already have been called
4303 for any array for which this case will be reached. */
4305 /* Don't forget the const or volatile flag from the array
4307 tree variant_type
= build_type_variant (type
,
4308 TREE_READONLY (exp
),
4309 TREE_THIS_VOLATILE (exp
));
4310 tree array_adr
= build1 (ADDR_EXPR
,
4311 build_pointer_type (variant_type
), array
);
4314 /* Convert the integer argument to a type the same size as a
4315 pointer so the multiply won't overflow spuriously. */
4316 if (TYPE_PRECISION (index_type
) != POINTER_SIZE
)
4317 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
4319 /* Don't think the address has side effects
4320 just because the array does.
4321 (In some cases the address might have side effects,
4322 and we fail to record that fact here. However, it should not
4323 matter, since expand_expr should not care.) */
4324 TREE_SIDE_EFFECTS (array_adr
) = 0;
4326 elt
= build1 (INDIRECT_REF
, type
,
4327 fold (build (PLUS_EXPR
,
4328 TYPE_POINTER_TO (variant_type
),
4330 fold (build (MULT_EXPR
,
4331 TYPE_POINTER_TO (variant_type
),
4333 size_in_bytes (type
))))));
4335 /* Volatility, etc., of new expression is same as old
4337 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
4338 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
4339 TREE_READONLY (elt
) = TREE_READONLY (exp
);
4341 return expand_expr (elt
, target
, tmode
, modifier
);
4344 /* Fold an expression like: "foo"[2].
4345 This is not done in fold so it won't happen inside &. */
4347 if (TREE_CODE (array
) == STRING_CST
4348 && TREE_CODE (index
) == INTEGER_CST
4349 && !TREE_INT_CST_HIGH (index
)
4350 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
))
4352 if (TREE_TYPE (TREE_TYPE (array
)) == integer_type_node
)
4354 exp
= build_int_2 (((int *)TREE_STRING_POINTER (array
))[i
], 0);
4355 TREE_TYPE (exp
) = integer_type_node
;
4356 return expand_expr (exp
, target
, tmode
, modifier
);
4358 if (TREE_TYPE (TREE_TYPE (array
)) == char_type_node
)
4360 exp
= build_int_2 (TREE_STRING_POINTER (array
)[i
], 0);
4361 TREE_TYPE (exp
) = integer_type_node
;
4362 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array
)),
4364 target
, tmode
, modifier
);
4368 /* If this is a constant index into a constant array,
4369 just get the value from the array. Handle both the cases when
4370 we have an explicit constructor and when our operand is a variable
4371 that was declared const. */
4373 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
4375 if (TREE_CODE (index
) == INTEGER_CST
4376 && TREE_INT_CST_HIGH (index
) == 0)
4378 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
4380 i
= TREE_INT_CST_LOW (index
);
4382 elem
= TREE_CHAIN (elem
);
4384 return expand_expr (fold (TREE_VALUE (elem
)), target
,
4389 else if (optimize
>= 1
4390 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
4391 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
4392 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
4394 if (TREE_CODE (index
) == INTEGER_CST
4395 && TREE_INT_CST_HIGH (index
) == 0)
4397 tree init
= DECL_INITIAL (array
);
4399 i
= TREE_INT_CST_LOW (index
);
4400 if (TREE_CODE (init
) == CONSTRUCTOR
)
4402 tree elem
= CONSTRUCTOR_ELTS (init
);
4405 elem
= TREE_CHAIN (elem
);
4407 return expand_expr (fold (TREE_VALUE (elem
)), target
,
4410 else if (TREE_CODE (init
) == STRING_CST
4411 && i
< TREE_STRING_LENGTH (init
))
4413 temp
= GEN_INT (TREE_STRING_POINTER (init
)[i
]);
4414 return convert_to_mode (mode
, temp
, 0);
4420 /* Treat array-ref with constant index as a component-ref. */
4424 /* If the operand is a CONSTRUCTOR, we can just extract the
4425 appropriate field if it is present. */
4426 if (code
!= ARRAY_REF
4427 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
4431 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
4432 elt
= TREE_CHAIN (elt
))
4433 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
4434 return expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
4438 enum machine_mode mode1
;
4443 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
4444 &mode1
, &unsignedp
, &volatilep
);
4446 /* In some cases, we will be offsetting OP0's address by a constant.
4447 So get it as a sum, if possible. If we will be using it
4448 directly in an insn, we validate it. */
4449 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4451 /* If this is a constant, put it into a register if it is a
4452 legitimate constant and memory if it isn't. */
4453 if (CONSTANT_P (op0
))
4455 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
4456 if (LEGITIMATE_CONSTANT_P (op0
))
4457 op0
= force_reg (mode
, op0
);
4459 op0
= validize_mem (force_const_mem (mode
, op0
));
4464 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4466 if (GET_CODE (op0
) != MEM
)
4468 op0
= change_address (op0
, VOIDmode
,
4469 gen_rtx (PLUS
, Pmode
, XEXP (op0
, 0),
4470 force_reg (Pmode
, offset_rtx
)));
4473 /* Don't forget about volatility even if this is a bitfield. */
4474 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
4476 op0
= copy_rtx (op0
);
4477 MEM_VOLATILE_P (op0
) = 1;
4480 if (mode1
== VOIDmode
4481 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
4482 && modifier
!= EXPAND_CONST_ADDRESS
4483 && modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
4484 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
)
4486 /* In cases where an aligned union has an unaligned object
4487 as a field, we might be extracting a BLKmode value from
4488 an integer-mode (e.g., SImode) object. Handle this case
4489 by doing the extract into an object as wide as the field
4490 (which we know to be the width of a basic mode), then
4491 storing into memory, and changing the mode to BLKmode. */
4492 enum machine_mode ext_mode
= mode
;
4494 if (ext_mode
== BLKmode
)
4495 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
4497 if (ext_mode
== BLKmode
)
4500 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
4501 unsignedp
, target
, ext_mode
, ext_mode
,
4502 TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
,
4503 int_size_in_bytes (TREE_TYPE (tem
)));
4504 if (mode
== BLKmode
)
4506 rtx
new = assign_stack_temp (ext_mode
,
4507 bitsize
/ BITS_PER_UNIT
, 0);
4509 emit_move_insn (new, op0
);
4510 op0
= copy_rtx (new);
4511 PUT_MODE (op0
, BLKmode
);
4517 /* Get a reference to just this component. */
4518 if (modifier
== EXPAND_CONST_ADDRESS
4519 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
4520 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
4521 (bitpos
/ BITS_PER_UNIT
)));
4523 op0
= change_address (op0
, mode1
,
4524 plus_constant (XEXP (op0
, 0),
4525 (bitpos
/ BITS_PER_UNIT
)));
4526 MEM_IN_STRUCT_P (op0
) = 1;
4527 MEM_VOLATILE_P (op0
) |= volatilep
;
4528 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
)
4531 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
4532 convert_move (target
, op0
, unsignedp
);
4538 tree base
= build_unary_op (ADDR_EXPR
, TREE_OPERAND (exp
, 0), 0);
4539 tree addr
= build (PLUS_EXPR
, type
, base
, TREE_OPERAND (exp
, 1));
4540 op0
= expand_expr (addr
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4541 temp
= gen_rtx (MEM
, mode
, memory_address (mode
, op0
));
4542 MEM_IN_STRUCT_P (temp
) = 1;
4543 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
);
4544 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4545 a location is accessed through a pointer to const does not mean
4546 that the value there can never change. */
4547 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
);
4552 /* Intended for a reference to a buffer of a file-object in Pascal.
4553 But it's not certain that a special tree code will really be
4554 necessary for these. INDIRECT_REF might work for them. */
4558 /* IN_EXPR: Inlined pascal set IN expression.
4561 rlo = set_low - (set_low%bits_per_word);
4562 the_word = set [ (index - rlo)/bits_per_word ];
4563 bit_index = index % bits_per_word;
4564 bitmask = 1 << bit_index;
4565 return !!(the_word & bitmask); */
4567 preexpand_calls (exp
);
4569 tree set
= TREE_OPERAND (exp
, 0);
4570 tree index
= TREE_OPERAND (exp
, 1);
4571 tree set_type
= TREE_TYPE (set
);
4573 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
4574 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
4580 rtx diff
, quo
, rem
, addr
, bit
, result
;
4581 rtx setval
, setaddr
;
4582 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
4585 target
= gen_reg_rtx (mode
);
4587 /* If domain is empty, answer is no. */
4588 if (tree_int_cst_lt (set_high_bound
, set_low_bound
))
4591 index_val
= expand_expr (index
, 0, VOIDmode
, 0);
4592 lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
4593 hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
4594 setval
= expand_expr (set
, 0, VOIDmode
, 0);
4595 setaddr
= XEXP (setval
, 0);
4597 /* Compare index against bounds, if they are constant. */
4598 if (GET_CODE (index_val
) == CONST_INT
4599 && GET_CODE (lo_r
) == CONST_INT
4600 && INTVAL (index_val
) < INTVAL (lo_r
))
4603 if (GET_CODE (index_val
) == CONST_INT
4604 && GET_CODE (hi_r
) == CONST_INT
4605 && INTVAL (hi_r
) < INTVAL (index_val
))
4608 /* If we get here, we have to generate the code for both cases
4609 (in range and out of range). */
4611 op0
= gen_label_rtx ();
4612 op1
= gen_label_rtx ();
4614 if (! (GET_CODE (index_val
) == CONST_INT
4615 && GET_CODE (lo_r
) == CONST_INT
))
4617 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
4618 GET_MODE (index_val
), 0, 0);
4619 emit_jump_insn (gen_blt (op1
));
4622 if (! (GET_CODE (index_val
) == CONST_INT
4623 && GET_CODE (hi_r
) == CONST_INT
))
4625 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
4626 GET_MODE (index_val
), 0, 0);
4627 emit_jump_insn (gen_bgt (op1
));
4630 /* Calculate the element number of bit zero in the first word
4632 if (GET_CODE (lo_r
) == CONST_INT
)
4633 rlow
= GEN_INT (INTVAL (lo_r
)
4634 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
4636 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
4637 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
4638 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4640 diff
= expand_binop (index_mode
, sub_optab
,
4641 index_val
, rlow
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4643 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
4644 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, 0);
4645 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
4646 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, 0);
4647 addr
= memory_address (byte_mode
,
4648 expand_binop (index_mode
, add_optab
,
4649 diff
, setaddr
, NULL_RTX
, 0,
4651 /* Extract the bit we want to examine */
4652 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
4653 gen_rtx (MEM
, byte_mode
, addr
),
4654 make_tree (TREE_TYPE (index
), rem
),
4656 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
4657 GET_MODE (target
) == byte_mode
? target
: 0,
4658 1, OPTAB_LIB_WIDEN
);
4660 if (result
!= target
)
4661 convert_move (target
, result
, 1);
4663 /* Output the code to handle the out-of-range case. */
4666 emit_move_insn (target
, const0_rtx
);
4671 case WITH_CLEANUP_EXPR
:
4672 if (RTL_EXPR_RTL (exp
) == 0)
4675 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
4677 = tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2), cleanups_this_call
);
4678 /* That's it for this cleanup. */
4679 TREE_OPERAND (exp
, 2) = 0;
4681 return RTL_EXPR_RTL (exp
);
4684 /* Check for a built-in function. */
4685 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
4686 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == FUNCTION_DECL
4687 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
4688 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
4689 /* If this call was expanded already by preexpand_calls,
4690 just return the result we got. */
4691 if (CALL_EXPR_RTL (exp
) != 0)
4692 return CALL_EXPR_RTL (exp
);
4693 return expand_call (exp
, target
, ignore
);
4695 case NON_LVALUE_EXPR
:
4698 case REFERENCE_EXPR
:
4699 if (TREE_CODE (type
) == VOID_TYPE
|| ignore
)
4701 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
4704 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4705 return expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, modifier
);
4706 if (TREE_CODE (type
) == UNION_TYPE
)
4708 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
4711 if (mode
== BLKmode
)
4713 if (TYPE_SIZE (type
) == 0
4714 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4716 target
= assign_stack_temp (BLKmode
,
4717 (TREE_INT_CST_LOW (TYPE_SIZE (type
))
4718 + BITS_PER_UNIT
- 1)
4719 / BITS_PER_UNIT
, 0);
4722 target
= gen_reg_rtx (mode
);
4724 if (GET_CODE (target
) == MEM
)
4725 /* Store data into beginning of memory target. */
4726 store_expr (TREE_OPERAND (exp
, 0),
4727 change_address (target
, TYPE_MODE (valtype
), 0), 0);
4729 else if (GET_CODE (target
) == REG
)
4730 /* Store this field into a union of the proper type. */
4731 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
4732 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
4734 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4738 /* Return the entire union. */
4741 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
4742 if (GET_MODE (op0
) == mode
)
4744 /* If arg is a constant integer being extended from a narrower mode,
4745 we must really truncate to get the extended bits right. Otherwise
4746 (unsigned long) (unsigned char) ("\377"[0])
4747 would come out as ffffffff. */
4748 if (GET_MODE (op0
) == VOIDmode
4749 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4750 < GET_MODE_BITSIZE (mode
)))
4752 /* MODE must be narrower than HOST_BITS_PER_INT. */
4753 int width
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4755 if (width
< HOST_BITS_PER_WIDE_INT
)
4757 HOST_WIDE_INT val
= (GET_CODE (op0
) == CONST_INT
? INTVAL (op0
)
4758 : CONST_DOUBLE_LOW (op0
));
4759 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4760 || !(val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
4761 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
4763 val
|= ~(((HOST_WIDE_INT
) 1 << width
) - 1);
4765 op0
= GEN_INT (val
);
4769 op0
= (simplify_unary_operation
4770 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4771 ? ZERO_EXTEND
: SIGN_EXTEND
),
4773 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))));
4778 if (GET_MODE (op0
) == VOIDmode
)
4780 if (modifier
== EXPAND_INITIALIZER
)
4781 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
4782 if (flag_force_mem
&& GET_CODE (op0
) == MEM
)
4783 op0
= copy_to_reg (op0
);
4786 return convert_to_mode (mode
, op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4788 convert_move (target
, op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4792 /* We come here from MINUS_EXPR when the second operand is a constant. */
4794 this_optab
= add_optab
;
4796 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4797 something else, make sure we add the register to the constant and
4798 then to the other thing. This case can occur during strength
4799 reduction and doing it this way will produce better code if the
4800 frame pointer or argument pointer is eliminated.
4802 fold-const.c will ensure that the constant is always in the inner
4803 PLUS_EXPR, so the only case we need to do anything about is if
4804 sp, ap, or fp is our second argument, in which case we must swap
4805 the innermost first argument and our second argument. */
4807 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
4808 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
4809 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
4810 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
4811 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
4812 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
4814 tree t
= TREE_OPERAND (exp
, 1);
4816 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4817 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
4820 /* If the result is to be Pmode and we are adding an integer to
4821 something, we might be forming a constant. So try to use
4822 plus_constant. If it produces a sum and we can't accept it,
4823 use force_operand. This allows P = &ARR[const] to generate
4824 efficient code on machines where a SYMBOL_REF is not a valid
4827 If this is an EXPAND_SUM call, always return the sum. */
4828 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
4829 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
4830 && (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
4833 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
4835 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
4836 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
4837 op1
= force_operand (op1
, target
);
4841 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
4842 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
4843 && (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
4846 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
4848 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
4849 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
4850 op0
= force_operand (op0
, target
);
4854 /* No sense saving up arithmetic to be done
4855 if it's all in the wrong mode to form part of an address.
4856 And force_operand won't know whether to sign-extend or
4858 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
4859 || mode
!= Pmode
) goto binop
;
4861 preexpand_calls (exp
);
4862 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
4865 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
4866 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
4868 /* Make sure any term that's a sum with a constant comes last. */
4869 if (GET_CODE (op0
) == PLUS
4870 && CONSTANT_P (XEXP (op0
, 1)))
4876 /* If adding to a sum including a constant,
4877 associate it to put the constant outside. */
4878 if (GET_CODE (op1
) == PLUS
4879 && CONSTANT_P (XEXP (op1
, 1)))
4881 rtx constant_term
= const0_rtx
;
4883 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
4886 /* Ensure that MULT comes first if there is one. */
4887 else if (GET_CODE (op0
) == MULT
)
4888 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
4890 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
4892 /* Let's also eliminate constants from op0 if possible. */
4893 op0
= eliminate_constant_term (op0
, &constant_term
);
4895 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4896 their sum should be a constant. Form it into OP1, since the
4897 result we want will then be OP0 + OP1. */
4899 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
4904 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
4907 /* Put a constant term last and put a multiplication first. */
4908 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
4909 temp
= op1
, op1
= op0
, op0
= temp
;
4911 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
4912 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
4915 /* Handle difference of two symbolic constants,
4916 for the sake of an initializer. */
4917 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
4918 && really_constant_p (TREE_OPERAND (exp
, 0))
4919 && really_constant_p (TREE_OPERAND (exp
, 1)))
4921 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
4922 VOIDmode
, modifier
);
4923 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
4924 VOIDmode
, modifier
);
4925 return gen_rtx (MINUS
, mode
, op0
, op1
);
4927 /* Convert A - const to A + (-const). */
4928 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
4930 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0),
4931 fold (build1 (NEGATE_EXPR
, type
,
4932 TREE_OPERAND (exp
, 1))));
4935 this_optab
= sub_optab
;
4939 preexpand_calls (exp
);
4940 /* If first operand is constant, swap them.
4941 Thus the following special case checks need only
4942 check the second operand. */
4943 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
4945 register tree t1
= TREE_OPERAND (exp
, 0);
4946 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
4947 TREE_OPERAND (exp
, 1) = t1
;
4950 /* Attempt to return something suitable for generating an
4951 indexed address, for machines that support that. */
4953 if (modifier
== EXPAND_SUM
&& mode
== Pmode
4954 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
4955 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
4957 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
4959 /* Apply distributive law if OP0 is x+c. */
4960 if (GET_CODE (op0
) == PLUS
4961 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
4962 return gen_rtx (PLUS
, mode
,
4963 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
4964 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
4965 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
4966 * INTVAL (XEXP (op0
, 1))));
4968 if (GET_CODE (op0
) != REG
)
4969 op0
= force_operand (op0
, NULL_RTX
);
4970 if (GET_CODE (op0
) != REG
)
4971 op0
= copy_to_mode_reg (mode
, op0
);
4973 return gen_rtx (MULT
, mode
, op0
,
4974 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
4977 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
4980 /* Check for multiplying things that have been extended
4981 from a narrower type. If this machine supports multiplying
4982 in that narrower type with a result in the desired type,
4983 do it that way, and avoid the explicit type-conversion. */
4984 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
4985 && TREE_CODE (type
) == INTEGER_TYPE
4986 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
4987 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4988 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
4989 && int_fits_type_p (TREE_OPERAND (exp
, 1),
4990 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
4991 /* Don't use a widening multiply if a shift will do. */
4992 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
4993 > HOST_BITS_PER_WIDE_INT
)
4994 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
4996 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
4997 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
4999 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
5000 /* If both operands are extended, they must either both
5001 be zero-extended or both be sign-extended. */
5002 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
5004 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
5006 enum machine_mode innermode
5007 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
5008 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5009 ? umul_widen_optab
: smul_widen_optab
);
5010 if (mode
== GET_MODE_WIDER_MODE (innermode
)
5011 && this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
5013 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
5014 NULL_RTX
, VOIDmode
, 0);
5015 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
5016 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5019 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
5020 NULL_RTX
, VOIDmode
, 0);
5024 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5025 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5026 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
5028 case TRUNC_DIV_EXPR
:
5029 case FLOOR_DIV_EXPR
:
5031 case ROUND_DIV_EXPR
:
5032 case EXACT_DIV_EXPR
:
5033 preexpand_calls (exp
);
5034 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5036 /* Possible optimization: compute the dividend with EXPAND_SUM
5037 then if the divisor is constant can optimize the case
5038 where some terms of the dividend have coeffs divisible by it. */
5039 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5040 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5041 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
5044 this_optab
= flodiv_optab
;
5047 case TRUNC_MOD_EXPR
:
5048 case FLOOR_MOD_EXPR
:
5050 case ROUND_MOD_EXPR
:
5051 preexpand_calls (exp
);
5052 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5054 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5055 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5056 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
5058 case FIX_ROUND_EXPR
:
5059 case FIX_FLOOR_EXPR
:
5061 abort (); /* Not used for C. */
5063 case FIX_TRUNC_EXPR
:
5064 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
5066 target
= gen_reg_rtx (mode
);
5067 expand_fix (target
, op0
, unsignedp
);
5071 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
5073 target
= gen_reg_rtx (mode
);
5074 /* expand_float can't figure out what to do if FROM has VOIDmode.
5075 So give it the correct mode. With -O, cse will optimize this. */
5076 if (GET_MODE (op0
) == VOIDmode
)
5077 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5079 expand_float (target
, op0
,
5080 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5084 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
5085 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
5091 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5093 /* Handle complex values specially. */
5095 enum machine_mode opmode
5096 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5098 if (GET_MODE_CLASS (opmode
) == MODE_COMPLEX_INT
5099 || GET_MODE_CLASS (opmode
) == MODE_COMPLEX_FLOAT
)
5100 return expand_complex_abs (opmode
, op0
, target
, unsignedp
);
5103 /* Unsigned abs is simply the operand. Testing here means we don't
5104 risk generating incorrect code below. */
5105 if (TREE_UNSIGNED (type
))
5108 /* First try to do it with a special abs instruction. */
5109 temp
= expand_unop (mode
, abs_optab
, op0
, target
, 0);
5113 /* If this machine has expensive jumps, we can do integer absolute
5114 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5115 where W is the width of MODE. */
5117 if (GET_MODE_CLASS (mode
) == MODE_INT
&& BRANCH_COST
>= 2)
5119 rtx extended
= expand_shift (RSHIFT_EXPR
, mode
, op0
,
5120 size_int (GET_MODE_BITSIZE (mode
) - 1),
5123 temp
= expand_binop (mode
, xor_optab
, extended
, op0
, target
, 0,
5126 temp
= expand_binop (mode
, sub_optab
, temp
, extended
, target
, 0,
5133 /* If that does not win, use conditional jump and negate. */
5134 target
= original_target
;
5135 temp
= gen_label_rtx ();
5136 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 0))
5137 || (GET_CODE (target
) == REG
5138 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5139 target
= gen_reg_rtx (mode
);
5140 emit_move_insn (target
, op0
);
5141 emit_cmp_insn (target
,
5142 expand_expr (convert (type
, integer_zero_node
),
5143 NULL_RTX
, VOIDmode
, 0),
5144 GE
, NULL_RTX
, mode
, 0, 0);
5146 emit_jump_insn (gen_bge (temp
));
5147 op0
= expand_unop (mode
, neg_optab
, target
, target
, 0);
5149 emit_move_insn (target
, op0
);
5156 target
= original_target
;
5157 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
5158 || (GET_CODE (target
) == REG
5159 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5160 target
= gen_reg_rtx (mode
);
5161 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5162 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
5164 /* First try to do it with a special MIN or MAX instruction.
5165 If that does not win, use a conditional jump to select the proper
5167 this_optab
= (TREE_UNSIGNED (type
)
5168 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
5169 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
5171 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
5177 emit_move_insn (target
, op0
);
5178 op0
= gen_label_rtx ();
5179 if (code
== MAX_EXPR
)
5180 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
5181 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
5182 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
5184 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
5185 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
5186 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
5187 if (temp
== const0_rtx
)
5188 emit_move_insn (target
, op1
);
5189 else if (temp
!= const_true_rtx
)
5191 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
5192 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
5195 emit_move_insn (target
, op1
);
5200 /* ??? Can optimize when the operand of this is a bitwise operation,
5201 by using a different bitwise operation. */
5203 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5204 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
5210 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5211 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
5216 /* ??? Can optimize bitwise operations with one arg constant.
5217 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5218 and (a bitwise1 b) bitwise2 b (etc)
5219 but that is probably not worth while. */
5221 /* BIT_AND_EXPR is for bitwise anding.
5222 TRUTH_AND_EXPR is for anding two boolean values
5223 when we want in all cases to compute both of them.
5224 In general it is fastest to do TRUTH_AND_EXPR by
5225 computing both operands as actual zero-or-1 values
5226 and then bitwise anding. In cases where there cannot
5227 be any side effects, better code would be made by
5228 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5229 but the question is how to recognize those cases. */
5231 case TRUTH_AND_EXPR
:
5233 this_optab
= and_optab
;
5236 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5239 this_optab
= ior_optab
;
5242 case TRUTH_XOR_EXPR
:
5244 this_optab
= xor_optab
;
5251 preexpand_calls (exp
);
5252 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5254 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5255 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
5258 /* Could determine the answer when only additive constants differ.
5259 Also, the addition of one can be handled by changing the condition. */
5266 preexpand_calls (exp
);
5267 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
5270 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5271 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
5273 && GET_CODE (original_target
) == REG
5274 && (GET_MODE (original_target
)
5275 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5277 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
, VOIDmode
, 0);
5278 if (temp
!= original_target
)
5279 temp
= copy_to_reg (temp
);
5280 op1
= gen_label_rtx ();
5281 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
5282 GET_MODE (temp
), unsignedp
, 0);
5283 emit_jump_insn (gen_beq (op1
));
5284 emit_move_insn (temp
, const1_rtx
);
5288 /* If no set-flag instruction, must generate a conditional
5289 store into a temporary variable. Drop through
5290 and handle this like && and ||. */
5292 case TRUTH_ANDIF_EXPR
:
5293 case TRUTH_ORIF_EXPR
:
5294 if (target
== 0 || ! safe_from_p (target
, exp
)
5295 /* Make sure we don't have a hard reg (such as function's return
5296 value) live across basic blocks, if not optimizing. */
5297 || (!optimize
&& GET_CODE (target
) == REG
5298 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5299 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5300 emit_clr_insn (target
);
5301 op1
= gen_label_rtx ();
5302 jumpifnot (exp
, op1
);
5303 emit_0_to_1_insn (target
);
5307 case TRUTH_NOT_EXPR
:
5308 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
5309 /* The parser is careful to generate TRUTH_NOT_EXPR
5310 only with operands that are always zero or one. */
5311 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
5312 target
, 1, OPTAB_LIB_WIDEN
);
5318 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
5320 return expand_expr (TREE_OPERAND (exp
, 1),
5321 (ignore
? const0_rtx
: target
),
5326 /* Note that COND_EXPRs whose type is a structure or union
5327 are required to be constructed to contain assignments of
5328 a temporary variable, so that we can evaluate them here
5329 for side effect only. If type is void, we must do likewise. */
5331 /* If an arm of the branch requires a cleanup,
5332 only that cleanup is performed. */
5335 tree binary_op
= 0, unary_op
= 0;
5336 tree old_cleanups
= cleanups_this_call
;
5337 cleanups_this_call
= 0;
5339 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5340 convert it to our mode, if necessary. */
5341 if (integer_onep (TREE_OPERAND (exp
, 1))
5342 && integer_zerop (TREE_OPERAND (exp
, 2))
5343 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
5345 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
5346 if (GET_MODE (op0
) == mode
)
5349 target
= gen_reg_rtx (mode
);
5350 convert_move (target
, op0
, unsignedp
);
5354 /* If we are not to produce a result, we have no target. Otherwise,
5355 if a target was specified use it; it will not be used as an
5356 intermediate target unless it is safe. If no target, use a
5359 if (mode
== VOIDmode
|| ignore
)
5361 else if (original_target
5362 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0)))
5363 temp
= original_target
;
5364 else if (mode
== BLKmode
)
5366 if (TYPE_SIZE (type
) == 0
5367 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5369 temp
= assign_stack_temp (BLKmode
,
5370 (TREE_INT_CST_LOW (TYPE_SIZE (type
))
5371 + BITS_PER_UNIT
- 1)
5372 / BITS_PER_UNIT
, 0);
5375 temp
= gen_reg_rtx (mode
);
5377 /* Check for X ? A + B : A. If we have this, we can copy
5378 A to the output and conditionally add B. Similarly for unary
5379 operations. Don't do this if X has side-effects because
5380 those side effects might affect A or B and the "?" operation is
5381 a sequence point in ANSI. (We test for side effects later.) */
5383 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
5384 && operand_equal_p (TREE_OPERAND (exp
, 2),
5385 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
5386 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
5387 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
5388 && operand_equal_p (TREE_OPERAND (exp
, 1),
5389 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
5390 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
5391 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
5392 && operand_equal_p (TREE_OPERAND (exp
, 2),
5393 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
5394 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
5395 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
5396 && operand_equal_p (TREE_OPERAND (exp
, 1),
5397 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
5398 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
5400 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5401 operation, do this as A + (X != 0). Similarly for other simple
5402 binary operators. */
5403 if (singleton
&& binary_op
5404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
5405 && (TREE_CODE (binary_op
) == PLUS_EXPR
5406 || TREE_CODE (binary_op
) == MINUS_EXPR
5407 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
5408 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
5409 || TREE_CODE (binary_op
) == BIT_AND_EXPR
)
5410 && integer_onep (TREE_OPERAND (binary_op
, 1))
5411 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
5414 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
5415 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
5416 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
5417 : TREE_CODE (binary_op
) == BIT_XOR_EXPR
? xor_optab
5420 /* If we had X ? A : A + 1, do this as A + (X == 0).
5422 We have to invert the truth value here and then put it
5423 back later if do_store_flag fails. We cannot simply copy
5424 TREE_OPERAND (exp, 0) to another variable and modify that
5425 because invert_truthvalue can modify the tree pointed to
5427 if (singleton
== TREE_OPERAND (exp
, 1))
5428 TREE_OPERAND (exp
, 0)
5429 = invert_truthvalue (TREE_OPERAND (exp
, 0));
5431 result
= do_store_flag (TREE_OPERAND (exp
, 0),
5432 (safe_from_p (temp
, singleton
)
5434 mode
, BRANCH_COST
<= 1);
5438 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
5439 return expand_binop (mode
, boptab
, op1
, result
, temp
,
5440 unsignedp
, OPTAB_LIB_WIDEN
);
5442 else if (singleton
== TREE_OPERAND (exp
, 1))
5443 TREE_OPERAND (exp
, 0)
5444 = invert_truthvalue (TREE_OPERAND (exp
, 0));
5448 op0
= gen_label_rtx ();
5450 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
5454 /* If the target conflicts with the other operand of the
5455 binary op, we can't use it. Also, we can't use the target
5456 if it is a hard register, because evaluating the condition
5457 might clobber it. */
5459 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
5460 || (GET_CODE (temp
) == REG
5461 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
5462 temp
= gen_reg_rtx (mode
);
5463 store_expr (singleton
, temp
, 0);
5466 expand_expr (singleton
,
5467 ignore
? const1_rtx
: NULL_RTX
, VOIDmode
, 0);
5468 if (cleanups_this_call
)
5470 sorry ("aggregate value in COND_EXPR");
5471 cleanups_this_call
= 0;
5473 if (singleton
== TREE_OPERAND (exp
, 1))
5474 jumpif (TREE_OPERAND (exp
, 0), op0
);
5476 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
5478 if (binary_op
&& temp
== 0)
5479 /* Just touch the other operand. */
5480 expand_expr (TREE_OPERAND (binary_op
, 1),
5481 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
5483 store_expr (build (TREE_CODE (binary_op
), type
,
5484 make_tree (type
, temp
),
5485 TREE_OPERAND (binary_op
, 1)),
5488 store_expr (build1 (TREE_CODE (unary_op
), type
,
5489 make_tree (type
, temp
)),
5494 /* This is now done in jump.c and is better done there because it
5495 produces shorter register lifetimes. */
5497 /* Check for both possibilities either constants or variables
5498 in registers (but not the same as the target!). If so, can
5499 save branches by assigning one, branching, and assigning the
5501 else if (temp
&& GET_MODE (temp
) != BLKmode
5502 && (TREE_CONSTANT (TREE_OPERAND (exp
, 1))
5503 || ((TREE_CODE (TREE_OPERAND (exp
, 1)) == PARM_DECL
5504 || TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
)
5505 && DECL_RTL (TREE_OPERAND (exp
, 1))
5506 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 1))) == REG
5507 && DECL_RTL (TREE_OPERAND (exp
, 1)) != temp
))
5508 && (TREE_CONSTANT (TREE_OPERAND (exp
, 2))
5509 || ((TREE_CODE (TREE_OPERAND (exp
, 2)) == PARM_DECL
5510 || TREE_CODE (TREE_OPERAND (exp
, 2)) == VAR_DECL
)
5511 && DECL_RTL (TREE_OPERAND (exp
, 2))
5512 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 2))) == REG
5513 && DECL_RTL (TREE_OPERAND (exp
, 2)) != temp
)))
5515 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
5516 temp
= gen_reg_rtx (mode
);
5517 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
5518 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
5519 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
5523 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5524 comparison operator. If we have one of these cases, set the
5525 output to A, branch on A (cse will merge these two references),
5526 then set the output to FOO. */
5528 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
5529 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
5530 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
5531 TREE_OPERAND (exp
, 1), 0)
5532 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
5533 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
5535 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
5536 temp
= gen_reg_rtx (mode
);
5537 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
5538 jumpif (TREE_OPERAND (exp
, 0), op0
);
5539 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
5543 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
5544 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
5545 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
5546 TREE_OPERAND (exp
, 2), 0)
5547 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
5548 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
5550 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
5551 temp
= gen_reg_rtx (mode
);
5552 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
5553 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
5554 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
5559 op1
= gen_label_rtx ();
5560 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
5562 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
5564 expand_expr (TREE_OPERAND (exp
, 1),
5565 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
5566 if (cleanups_this_call
)
5568 sorry ("aggregate value in COND_EXPR");
5569 cleanups_this_call
= 0;
5573 emit_jump_insn (gen_jump (op1
));
5577 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
5579 expand_expr (TREE_OPERAND (exp
, 2),
5580 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
5583 if (cleanups_this_call
)
5585 sorry ("aggregate value in COND_EXPR");
5586 cleanups_this_call
= 0;
5592 cleanups_this_call
= old_cleanups
;
5598 /* Something needs to be initialized, but we didn't know
5599 where that thing was when building the tree. For example,
5600 it could be the return value of a function, or a parameter
5601 to a function which lays down in the stack, or a temporary
5602 variable which must be passed by reference.
5604 We guarantee that the expression will either be constructed
5605 or copied into our original target. */
5607 tree slot
= TREE_OPERAND (exp
, 0);
5610 if (TREE_CODE (slot
) != VAR_DECL
)
5615 if (DECL_RTL (slot
) != 0)
5617 target
= DECL_RTL (slot
);
5618 /* If we have already expanded the slot, so don't do
5620 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
5625 target
= assign_stack_temp (mode
, int_size_in_bytes (type
), 0);
5626 /* All temp slots at this level must not conflict. */
5627 preserve_temp_slots (target
);
5628 DECL_RTL (slot
) = target
;
5632 /* I bet this needs to be done, and I bet that it needs to
5633 be above, inside the else clause. The reason is
5634 simple, how else is it going to get cleaned up? (mrs)
5636 The reason is probably did not work before, and was
5637 commented out is because this was re-expanding already
5638 expanded target_exprs (target == 0 and DECL_RTL (slot)
5639 != 0) also cleaning them up many times as well. :-( */
5641 /* Since SLOT is not known to the called function
5642 to belong to its stack frame, we must build an explicit
5643 cleanup. This case occurs when we must build up a reference
5644 to pass the reference as an argument. In this case,
5645 it is very likely that such a reference need not be
5648 if (TREE_OPERAND (exp
, 2) == 0)
5649 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
5650 if (TREE_OPERAND (exp
, 2))
5651 cleanups_this_call
= tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2),
5652 cleanups_this_call
);
5657 /* This case does occur, when expanding a parameter which
5658 needs to be constructed on the stack. The target
5659 is the actual stack address that we want to initialize.
5660 The function we call will perform the cleanup in this case. */
5662 DECL_RTL (slot
) = target
;
5665 exp1
= TREE_OPERAND (exp
, 1);
5666 /* Mark it as expanded. */
5667 TREE_OPERAND (exp
, 1) = NULL_TREE
;
5669 return expand_expr (exp1
, target
, tmode
, modifier
);
5674 tree lhs
= TREE_OPERAND (exp
, 0);
5675 tree rhs
= TREE_OPERAND (exp
, 1);
5676 tree noncopied_parts
= 0;
5677 tree lhs_type
= TREE_TYPE (lhs
);
5679 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
5680 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
5681 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
5682 TYPE_NONCOPIED_PARTS (lhs_type
));
5683 while (noncopied_parts
!= 0)
5685 expand_assignment (TREE_VALUE (noncopied_parts
),
5686 TREE_PURPOSE (noncopied_parts
), 0, 0);
5687 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
5694 /* If lhs is complex, expand calls in rhs before computing it.
5695 That's so we don't compute a pointer and save it over a call.
5696 If lhs is simple, compute it first so we can give it as a
5697 target if the rhs is just a call. This avoids an extra temp and copy
5698 and that prevents a partial-subsumption which makes bad code.
5699 Actually we could treat component_ref's of vars like vars. */
5701 tree lhs
= TREE_OPERAND (exp
, 0);
5702 tree rhs
= TREE_OPERAND (exp
, 1);
5703 tree noncopied_parts
= 0;
5704 tree lhs_type
= TREE_TYPE (lhs
);
5708 if (TREE_CODE (lhs
) != VAR_DECL
5709 && TREE_CODE (lhs
) != RESULT_DECL
5710 && TREE_CODE (lhs
) != PARM_DECL
)
5711 preexpand_calls (exp
);
5713 /* Check for |= or &= of a bitfield of size one into another bitfield
5714 of size 1. In this case, (unless we need the result of the
5715 assignment) we can do this more efficiently with a
5716 test followed by an assignment, if necessary.
5718 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5719 things change so we do, this code should be enhanced to
5722 && TREE_CODE (lhs
) == COMPONENT_REF
5723 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
5724 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
5725 && TREE_OPERAND (rhs
, 0) == lhs
5726 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
5727 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
5728 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
5730 rtx label
= gen_label_rtx ();
5732 do_jump (TREE_OPERAND (rhs
, 1),
5733 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
5734 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
5735 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
5736 (TREE_CODE (rhs
) == BIT_IOR_EXPR
5738 : integer_zero_node
)),
5740 do_pending_stack_adjust ();
5745 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
5746 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
5747 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
5748 TYPE_NONCOPIED_PARTS (lhs_type
));
5750 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
5751 while (noncopied_parts
!= 0)
5753 expand_assignment (TREE_PURPOSE (noncopied_parts
),
5754 TREE_VALUE (noncopied_parts
), 0, 0);
5755 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
5760 case PREINCREMENT_EXPR
:
5761 case PREDECREMENT_EXPR
:
5762 return expand_increment (exp
, 0);
5764 case POSTINCREMENT_EXPR
:
5765 case POSTDECREMENT_EXPR
:
5766 /* Faster to treat as pre-increment if result is not used. */
5767 return expand_increment (exp
, ! ignore
);
5770 /* Are we taking the address of a nested function? */
5771 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
5772 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0)
5774 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
5775 op0
= force_operand (op0
, target
);
5779 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
,
5780 (modifier
== EXPAND_INITIALIZER
5781 ? modifier
: EXPAND_CONST_ADDRESS
));
5782 if (GET_CODE (op0
) != MEM
)
5785 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5786 return XEXP (op0
, 0);
5787 op0
= force_operand (XEXP (op0
, 0), target
);
5789 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
5790 return force_reg (Pmode
, op0
);
5793 case ENTRY_VALUE_EXPR
:
5796 /* COMPLEX type for Extended Pascal & Fortran */
5799 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
5803 /* Get the rtx code of the operands. */
5804 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
5805 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
5808 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5810 prev
= get_last_insn ();
5812 /* Tell flow that the whole of the destination is being set. */
5813 if (GET_CODE (target
) == REG
)
5814 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
5816 /* Move the real (op0) and imaginary (op1) parts to their location. */
5817 emit_move_insn (gen_realpart (mode
, target
), op0
);
5818 emit_move_insn (gen_imagpart (mode
, target
), op1
);
5820 /* Complex construction should appear as a single unit. */
5827 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
5828 return gen_realpart (mode
, op0
);
5831 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
5832 return gen_imagpart (mode
, op0
);
5836 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
5840 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
5843 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5845 prev
= get_last_insn ();
5847 /* Tell flow that the whole of the destination is being set. */
5848 if (GET_CODE (target
) == REG
)
5849 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
5851 /* Store the realpart and the negated imagpart to target. */
5852 emit_move_insn (gen_realpart (mode
, target
), gen_realpart (mode
, op0
));
5854 imag_t
= gen_imagpart (mode
, target
);
5855 temp
= expand_unop (mode
, neg_optab
,
5856 gen_imagpart (mode
, op0
), imag_t
, 0);
5858 emit_move_insn (imag_t
, temp
);
5860 /* Conjugate should appear as a single unit */
5867 op0
= CONST0_RTX (tmode
);
5873 return (*lang_expand_expr
) (exp
, target
, tmode
, modifier
);
5876 /* Here to do an ordinary binary operator, generating an instruction
5877 from the optab already placed in `this_optab'. */
5879 preexpand_calls (exp
);
5880 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5882 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5883 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5885 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
5886 unsignedp
, OPTAB_LIB_WIDEN
);
5892 /* Return the alignment in bits of EXP, a pointer valued expression.
5893 But don't return more than MAX_ALIGN no matter what.
5894 The alignment returned is, by default, the alignment of the thing that
5895 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5897 Otherwise, look at the expression to see if we can do better, i.e., if the
5898 expression is actually pointing at an object whose alignment is tighter. */
5901 get_pointer_alignment (exp
, max_align
)
5905 unsigned align
, inner
;
5907 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
5910 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
5911 align
= MIN (align
, max_align
);
5915 switch (TREE_CODE (exp
))
5919 case NON_LVALUE_EXPR
:
5920 exp
= TREE_OPERAND (exp
, 0);
5921 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
5923 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
5924 inner
= MIN (inner
, max_align
);
5925 align
= MAX (align
, inner
);
5929 /* If sum of pointer + int, restrict our maximum alignment to that
5930 imposed by the integer. If not, we can't do any better than
5932 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
5935 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
5940 exp
= TREE_OPERAND (exp
, 0);
5944 /* See what we are pointing at and look at its alignment. */
5945 exp
= TREE_OPERAND (exp
, 0);
5946 if (TREE_CODE (exp
) == FUNCTION_DECL
)
5947 align
= MAX (align
, FUNCTION_BOUNDARY
);
5948 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
5949 align
= MAX (align
, DECL_ALIGN (exp
));
5950 #ifdef CONSTANT_ALIGNMENT
5951 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
5952 align
= CONSTANT_ALIGNMENT (exp
, align
);
5954 return MIN (align
, max_align
);
5962 /* Return the tree node and offset if a given argument corresponds to
5963 a string constant. */
5966 string_constant (arg
, ptr_offset
)
5972 if (TREE_CODE (arg
) == ADDR_EXPR
5973 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
5975 *ptr_offset
= integer_zero_node
;
5976 return TREE_OPERAND (arg
, 0);
5978 else if (TREE_CODE (arg
) == PLUS_EXPR
)
5980 tree arg0
= TREE_OPERAND (arg
, 0);
5981 tree arg1
= TREE_OPERAND (arg
, 1);
5986 if (TREE_CODE (arg0
) == ADDR_EXPR
5987 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
5990 return TREE_OPERAND (arg0
, 0);
5992 else if (TREE_CODE (arg1
) == ADDR_EXPR
5993 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
5996 return TREE_OPERAND (arg1
, 0);
6003 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6004 way, because it could contain a zero byte in the middle.
6005 TREE_STRING_LENGTH is the size of the character array, not the string.
6007 Unfortunately, string_constant can't access the values of const char
6008 arrays with initializers, so neither can we do so here. */
6018 src
= string_constant (src
, &offset_node
);
6021 max
= TREE_STRING_LENGTH (src
);
6022 ptr
= TREE_STRING_POINTER (src
);
6023 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
6025 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6026 compute the offset to the following null if we don't know where to
6027 start searching for it. */
6029 for (i
= 0; i
< max
; i
++)
6032 /* We don't know the starting offset, but we do know that the string
6033 has no internal zero bytes. We can assume that the offset falls
6034 within the bounds of the string; otherwise, the programmer deserves
6035 what he gets. Subtract the offset from the length of the string,
6037 /* This would perhaps not be valid if we were dealing with named
6038 arrays in addition to literal string constants. */
6039 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
6042 /* We have a known offset into the string. Start searching there for
6043 a null character. */
6044 if (offset_node
== 0)
6048 /* Did we get a long long offset? If so, punt. */
6049 if (TREE_INT_CST_HIGH (offset_node
) != 0)
6051 offset
= TREE_INT_CST_LOW (offset_node
);
6053 /* If the offset is known to be out of bounds, warn, and call strlen at
6055 if (offset
< 0 || offset
> max
)
6057 warning ("offset outside bounds of constant string");
6060 /* Use strlen to search for the first zero byte. Since any strings
6061 constructed with build_string will have nulls appended, we win even
6062 if we get handed something like (char[4])"abcd".
6064 Since OFFSET is our starting index into the string, no further
6065 calculation is needed. */
6066 return size_int (strlen (ptr
+ offset
));
6069 /* Expand an expression EXP that calls a built-in function,
6070 with result going to TARGET if that's convenient
6071 (and in mode MODE if that's convenient).
6072 SUBTARGET may be used as the target for computing one of EXP's operands.
6073 IGNORE is nonzero if the value is to be ignored. */
6076 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
6080 enum machine_mode mode
;
6083 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6084 tree arglist
= TREE_OPERAND (exp
, 1);
6087 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
6088 optab builtin_optab
;
6090 switch (DECL_FUNCTION_CODE (fndecl
))
6095 /* build_function_call changes these into ABS_EXPR. */
6100 case BUILT_IN_FSQRT
:
6101 /* If not optimizing, call the library function. */
6106 /* Arg could be wrong type if user redeclared this fcn wrong. */
6107 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
6108 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp
)));
6110 /* Stabilize and compute the argument. */
6111 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
6112 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
6114 exp
= copy_node (exp
);
6115 arglist
= copy_node (arglist
);
6116 TREE_OPERAND (exp
, 1) = arglist
;
6117 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
6119 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
6121 /* Make a suitable register to place result in. */
6122 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6127 switch (DECL_FUNCTION_CODE (fndecl
))
6130 builtin_optab
= sin_optab
; break;
6132 builtin_optab
= cos_optab
; break;
6133 case BUILT_IN_FSQRT
:
6134 builtin_optab
= sqrt_optab
; break;
6139 /* Compute into TARGET.
6140 Set TARGET to wherever the result comes back. */
6141 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
6142 builtin_optab
, op0
, target
, 0);
6144 /* If we were unable to expand via the builtin, stop the
6145 sequence (without outputting the insns) and break, causing
6146 a call the the library function. */
6153 /* Check the results by default. But if flag_fast_math is turned on,
6154 then assume sqrt will always be called with valid arguments. */
6156 if (! flag_fast_math
)
6158 /* Don't define the builtin FP instructions
6159 if your machine is not IEEE. */
6160 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
6163 lab1
= gen_label_rtx ();
6165 /* Test the result; if it is NaN, set errno=EDOM because
6166 the argument was not in the domain. */
6167 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
6168 emit_jump_insn (gen_beq (lab1
));
6172 #ifdef GEN_ERRNO_RTX
6173 rtx errno_rtx
= GEN_ERRNO_RTX
;
6176 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "*errno"));
6179 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
6182 /* We can't set errno=EDOM directly; let the library call do it.
6183 Pop the arguments right away in case the call gets deleted. */
6185 expand_call (exp
, target
, 0);
6192 /* Output the entire sequence. */
6193 insns
= get_insns ();
6199 /* __builtin_apply_args returns block of memory allocated on
6200 the stack into which is stored the arg pointer, structure
6201 value address, static chain, and all the registers that might
6202 possibly be used in performing a function call. The code is
6203 moved to the start of the function so the incoming values are
6205 case BUILT_IN_APPLY_ARGS
:
6206 /* Don't do __builtin_apply_args more than once in a function.
6207 Save the result of the first call and reuse it. */
6208 if (apply_args_value
!= 0)
6209 return apply_args_value
;
6211 /* When this function is called, it means that registers must be
6212 saved on entry to this function. So we migrate the
6213 call to the first insn of this function. */
6218 temp
= expand_builtin_apply_args ();
6222 apply_args_value
= temp
;
6224 /* Put the sequence after the NOTE that starts the function.
6225 If this is inside a SEQUENCE, make the outer-level insn
6226 chain current, so the code is placed at the start of the
6228 push_topmost_sequence ();
6229 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
6230 pop_topmost_sequence ();
6234 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6235 FUNCTION with a copy of the parameters described by
6236 ARGUMENTS, and ARGSIZE. It returns a block of memory
6237 allocated on the stack into which is stored all the registers
6238 that might possibly be used for returning the result of a
6239 function. ARGUMENTS is the value returned by
6240 __builtin_apply_args. ARGSIZE is the number of bytes of
6241 arguments that must be copied. ??? How should this value be
6242 computed? We'll also need a safe worst case value for varargs
6244 case BUILT_IN_APPLY
:
6246 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6247 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
6248 || TREE_CHAIN (arglist
) == 0
6249 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
6250 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
6251 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
6259 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
6260 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
6262 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6265 /* __builtin_return (RESULT) causes the function to return the
6266 value described by RESULT. RESULT is address of the block of
6267 memory returned by __builtin_apply. */
6268 case BUILT_IN_RETURN
:
6270 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6271 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
6272 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
6273 NULL_RTX
, VOIDmode
, 0));
6276 case BUILT_IN_SAVEREGS
:
6277 /* Don't do __builtin_saveregs more than once in a function.
6278 Save the result of the first call and reuse it. */
6279 if (saveregs_value
!= 0)
6280 return saveregs_value
;
6282 /* When this function is called, it means that registers must be
6283 saved on entry to this function. So we migrate the
6284 call to the first insn of this function. */
6287 rtx valreg
, saved_valreg
;
6289 /* Now really call the function. `expand_call' does not call
6290 expand_builtin, so there is no danger of infinite recursion here. */
6293 #ifdef EXPAND_BUILTIN_SAVEREGS
6294 /* Do whatever the machine needs done in this case. */
6295 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
6297 /* The register where the function returns its value
6298 is likely to have something else in it, such as an argument.
6299 So preserve that register around the call. */
6300 if (value_mode
!= VOIDmode
)
6302 valreg
= hard_libcall_value (value_mode
);
6303 saved_valreg
= gen_reg_rtx (value_mode
);
6304 emit_move_insn (saved_valreg
, valreg
);
6307 /* Generate the call, putting the value in a pseudo. */
6308 temp
= expand_call (exp
, target
, ignore
);
6310 if (value_mode
!= VOIDmode
)
6311 emit_move_insn (valreg
, saved_valreg
);
6317 saveregs_value
= temp
;
6319 /* Put the sequence after the NOTE that starts the function.
6320 If this is inside a SEQUENCE, make the outer-level insn
6321 chain current, so the code is placed at the start of the
6323 push_topmost_sequence ();
6324 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
6325 pop_topmost_sequence ();
6329 /* __builtin_args_info (N) returns word N of the arg space info
6330 for the current function. The number and meanings of words
6331 is controlled by the definition of CUMULATIVE_ARGS. */
6332 case BUILT_IN_ARGS_INFO
:
6334 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
6336 int *word_ptr
= (int *) ¤t_function_args_info
;
6337 tree type
, elts
, result
;
6339 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
6340 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6341 __FILE__
, __LINE__
);
6345 tree arg
= TREE_VALUE (arglist
);
6346 if (TREE_CODE (arg
) != INTEGER_CST
)
6347 error ("argument of `__builtin_args_info' must be constant");
6350 int wordnum
= TREE_INT_CST_LOW (arg
);
6352 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
6353 error ("argument of `__builtin_args_info' out of range");
6355 return GEN_INT (word_ptr
[wordnum
]);
6359 error ("missing argument in `__builtin_args_info'");
6364 for (i
= 0; i
< nwords
; i
++)
6365 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
6367 type
= build_array_type (integer_type_node
,
6368 build_index_type (build_int_2 (nwords
, 0)));
6369 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
6370 TREE_CONSTANT (result
) = 1;
6371 TREE_STATIC (result
) = 1;
6372 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
6373 TREE_CONSTANT (result
) = 1;
6374 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
6378 /* Return the address of the first anonymous stack arg. */
6379 case BUILT_IN_NEXT_ARG
:
6381 tree fntype
= TREE_TYPE (current_function_decl
);
6382 if (!(TYPE_ARG_TYPES (fntype
) != 0
6383 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
6384 != void_type_node
)))
6386 error ("`va_start' used in function with fixed args");
6391 return expand_binop (Pmode
, add_optab
,
6392 current_function_internal_arg_pointer
,
6393 current_function_arg_offset_rtx
,
6394 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
6396 case BUILT_IN_CLASSIFY_TYPE
:
6399 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
6400 enum tree_code code
= TREE_CODE (type
);
6401 if (code
== VOID_TYPE
)
6402 return GEN_INT (void_type_class
);
6403 if (code
== INTEGER_TYPE
)
6404 return GEN_INT (integer_type_class
);
6405 if (code
== CHAR_TYPE
)
6406 return GEN_INT (char_type_class
);
6407 if (code
== ENUMERAL_TYPE
)
6408 return GEN_INT (enumeral_type_class
);
6409 if (code
== BOOLEAN_TYPE
)
6410 return GEN_INT (boolean_type_class
);
6411 if (code
== POINTER_TYPE
)
6412 return GEN_INT (pointer_type_class
);
6413 if (code
== REFERENCE_TYPE
)
6414 return GEN_INT (reference_type_class
);
6415 if (code
== OFFSET_TYPE
)
6416 return GEN_INT (offset_type_class
);
6417 if (code
== REAL_TYPE
)
6418 return GEN_INT (real_type_class
);
6419 if (code
== COMPLEX_TYPE
)
6420 return GEN_INT (complex_type_class
);
6421 if (code
== FUNCTION_TYPE
)
6422 return GEN_INT (function_type_class
);
6423 if (code
== METHOD_TYPE
)
6424 return GEN_INT (method_type_class
);
6425 if (code
== RECORD_TYPE
)
6426 return GEN_INT (record_type_class
);
6427 if (code
== UNION_TYPE
)
6428 return GEN_INT (union_type_class
);
6429 if (code
== ARRAY_TYPE
)
6430 return GEN_INT (array_type_class
);
6431 if (code
== STRING_TYPE
)
6432 return GEN_INT (string_type_class
);
6433 if (code
== SET_TYPE
)
6434 return GEN_INT (set_type_class
);
6435 if (code
== FILE_TYPE
)
6436 return GEN_INT (file_type_class
);
6437 if (code
== LANG_TYPE
)
6438 return GEN_INT (lang_type_class
);
6440 return GEN_INT (no_type_class
);
6442 case BUILT_IN_CONSTANT_P
:
6446 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist
))) == 'c'
6447 ? const1_rtx
: const0_rtx
);
6449 case BUILT_IN_FRAME_ADDRESS
:
6450 /* The argument must be a nonnegative integer constant.
6451 It counts the number of frames to scan up the stack.
6452 The value is the address of that frame. */
6453 case BUILT_IN_RETURN_ADDRESS
:
6454 /* The argument must be a nonnegative integer constant.
6455 It counts the number of frames to scan up the stack.
6456 The value is the return address saved in that frame. */
6458 /* Warning about missing arg was already issued. */
6460 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
)
6462 error ("invalid arg to `__builtin_return_address'");
6465 else if (tree_int_cst_lt (TREE_VALUE (arglist
), integer_zero_node
))
6467 error ("invalid arg to `__builtin_return_address'");
6472 int count
= TREE_INT_CST_LOW (TREE_VALUE (arglist
));
6473 rtx tem
= frame_pointer_rtx
;
6476 /* Some machines need special handling before we can access arbitrary
6477 frames. For example, on the sparc, we must first flush all
6478 register windows to the stack. */
6479 #ifdef SETUP_FRAME_ADDRESSES
6480 SETUP_FRAME_ADDRESSES ();
6483 /* On the sparc, the return address is not in the frame, it is
6484 in a register. There is no way to access it off of the current
6485 frame pointer, but it can be accessed off the previous frame
6486 pointer by reading the value from the register window save
6488 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6489 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_RETURN_ADDRESS
)
6493 /* Scan back COUNT frames to the specified frame. */
6494 for (i
= 0; i
< count
; i
++)
6496 /* Assume the dynamic chain pointer is in the word that
6497 the frame address points to, unless otherwise specified. */
6498 #ifdef DYNAMIC_CHAIN_ADDRESS
6499 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
6501 tem
= memory_address (Pmode
, tem
);
6502 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
6505 /* For __builtin_frame_address, return what we've got. */
6506 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
6509 /* For __builtin_return_address,
6510 Get the return address from that frame. */
6511 #ifdef RETURN_ADDR_RTX
6512 return RETURN_ADDR_RTX (count
, tem
);
6514 tem
= memory_address (Pmode
,
6515 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
6516 return copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
6520 case BUILT_IN_ALLOCA
:
6522 /* Arg could be non-integer if user redeclared this fcn wrong. */
6523 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
6525 current_function_calls_alloca
= 1;
6526 /* Compute the argument. */
6527 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
6529 /* Allocate the desired space. */
6530 target
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
6532 /* Record the new stack level for nonlocal gotos. */
6533 if (nonlocal_goto_handler_slot
!= 0)
6534 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
6538 /* If not optimizing, call the library function. */
6543 /* Arg could be non-integer if user redeclared this fcn wrong. */
6544 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
6547 /* Compute the argument. */
6548 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
6549 /* Compute ffs, into TARGET if possible.
6550 Set TARGET to wherever the result comes back. */
6551 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
6552 ffs_optab
, op0
, target
, 1);
6557 case BUILT_IN_STRLEN
:
6558 /* If not optimizing, call the library function. */
6563 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6564 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
6568 tree src
= TREE_VALUE (arglist
);
6569 tree len
= c_strlen (src
);
6572 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
6574 rtx result
, src_rtx
, char_rtx
;
6575 enum machine_mode insn_mode
= value_mode
, char_mode
;
6576 enum insn_code icode
;
6578 /* If the length is known, just return it. */
6580 return expand_expr (len
, target
, mode
, 0);
6582 /* If SRC is not a pointer type, don't do this operation inline. */
6586 /* Call a function if we can't compute strlen in the right mode. */
6588 while (insn_mode
!= VOIDmode
)
6590 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
6591 if (icode
!= CODE_FOR_nothing
)
6594 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
6596 if (insn_mode
== VOIDmode
)
6599 /* Make a place to write the result of the instruction. */
6602 && GET_CODE (result
) == REG
6603 && GET_MODE (result
) == insn_mode
6604 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
6605 result
= gen_reg_rtx (insn_mode
);
6607 /* Make sure the operands are acceptable to the predicates. */
6609 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
6610 result
= gen_reg_rtx (insn_mode
);
6612 src_rtx
= memory_address (BLKmode
,
6613 expand_expr (src
, NULL_RTX
, Pmode
,
6615 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
6616 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
6618 char_rtx
= const0_rtx
;
6619 char_mode
= insn_operand_mode
[(int)icode
][2];
6620 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
6621 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
6623 emit_insn (GEN_FCN (icode
) (result
,
6624 gen_rtx (MEM
, BLKmode
, src_rtx
),
6625 char_rtx
, GEN_INT (align
)));
6627 /* Return the value in the proper mode for this function. */
6628 if (GET_MODE (result
) == value_mode
)
6630 else if (target
!= 0)
6632 convert_move (target
, result
, 0);
6636 return convert_to_mode (value_mode
, result
, 0);
6639 case BUILT_IN_STRCPY
:
6640 /* If not optimizing, call the library function. */
6645 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6646 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
6647 || TREE_CHAIN (arglist
) == 0
6648 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
6652 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
6657 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
6659 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
6663 case BUILT_IN_MEMCPY
:
6664 /* If not optimizing, call the library function. */
6669 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6670 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
6671 || TREE_CHAIN (arglist
) == 0
6672 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
6673 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
6674 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
6678 tree dest
= TREE_VALUE (arglist
);
6679 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
6680 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6683 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
6685 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
6686 rtx dest_rtx
, dest_mem
, src_mem
;
6688 /* If either SRC or DEST is not a pointer type, don't do
6689 this operation in-line. */
6690 if (src_align
== 0 || dest_align
== 0)
6692 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
6693 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
6697 dest_rtx
= expand_expr (dest
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
6698 dest_mem
= gen_rtx (MEM
, BLKmode
,
6699 memory_address (BLKmode
, dest_rtx
));
6700 src_mem
= gen_rtx (MEM
, BLKmode
,
6701 memory_address (BLKmode
,
6702 expand_expr (src
, NULL_RTX
,
6706 /* Copy word part most expediently. */
6707 emit_block_move (dest_mem
, src_mem
,
6708 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
6709 MIN (src_align
, dest_align
));
6713 /* These comparison functions need an instruction that returns an actual
6714 index. An ordinary compare that just sets the condition codes
6716 #ifdef HAVE_cmpstrsi
6717 case BUILT_IN_STRCMP
:
6718 /* If not optimizing, call the library function. */
6723 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6724 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
6725 || TREE_CHAIN (arglist
) == 0
6726 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
6728 else if (!HAVE_cmpstrsi
)
6731 tree arg1
= TREE_VALUE (arglist
);
6732 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
6736 len
= c_strlen (arg1
);
6738 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
6739 len2
= c_strlen (arg2
);
6741 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
6743 /* If we don't have a constant length for the first, use the length
6744 of the second, if we know it. We don't require a constant for
6745 this case; some cost analysis could be done if both are available
6746 but neither is constant. For now, assume they're equally cheap.
6748 If both strings have constant lengths, use the smaller. This
6749 could arise if optimization results in strcpy being called with
6750 two fixed strings, or if the code was machine-generated. We should
6751 add some code to the `memcmp' handler below to deal with such
6752 situations, someday. */
6753 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
6760 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
6762 if (tree_int_cst_lt (len2
, len
))
6766 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
6770 case BUILT_IN_MEMCMP
:
6771 /* If not optimizing, call the library function. */
6776 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6777 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
6778 || TREE_CHAIN (arglist
) == 0
6779 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
6780 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
6781 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
6783 else if (!HAVE_cmpstrsi
)
6786 tree arg1
= TREE_VALUE (arglist
);
6787 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
6788 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6792 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
6794 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
6795 enum machine_mode insn_mode
6796 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
6798 /* If we don't have POINTER_TYPE, call the function. */
6799 if (arg1_align
== 0 || arg2_align
== 0)
6801 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
6802 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
6806 /* Make a place to write the result of the instruction. */
6809 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
6810 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
6811 result
= gen_reg_rtx (insn_mode
);
6813 emit_insn (gen_cmpstrsi (result
,
6814 gen_rtx (MEM
, BLKmode
,
6815 expand_expr (arg1
, NULL_RTX
, Pmode
,
6817 gen_rtx (MEM
, BLKmode
,
6818 expand_expr (arg2
, NULL_RTX
, Pmode
,
6820 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
6821 GEN_INT (MIN (arg1_align
, arg2_align
))));
6823 /* Return the value in the proper mode for this function. */
6824 mode
= TYPE_MODE (TREE_TYPE (exp
));
6825 if (GET_MODE (result
) == mode
)
6827 else if (target
!= 0)
6829 convert_move (target
, result
, 0);
6833 return convert_to_mode (mode
, result
, 0);
6836 case BUILT_IN_STRCMP
:
6837 case BUILT_IN_MEMCMP
:
6841 default: /* just do library call, if unknown builtin */
6842 error ("built-in function `%s' not currently supported",
6843 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
6846 /* The switch statement above can drop through to cause the function
6847 to be called normally. */
6849 return expand_call (exp
, target
, ignore
);
6852 /* Built-in functions to perform an untyped call and return. */
6854 /* For each register that may be used for calling a function, this
6855 gives a mode used to copy the register's value. VOIDmode indicates
6856 the register is not used for calling a function. If the machine
6857 has register windows, this gives only the outbound registers.
6858 INCOMING_REGNO gives the corresponding inbound register. */
6859 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
6861 /* For each register that may be used for returning values, this gives
6862 a mode used to copy the register's value. VOIDmode indicates the
6863 register is not used for returning values. If the machine has
6864 register windows, this gives only the outbound registers.
6865 INCOMING_REGNO gives the corresponding inbound register. */
6866 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
6868 /* Return the size required for the block returned by __builtin_apply_args,
6869 and initialize apply_args_mode. */
6873 static int size
= -1;
6875 enum machine_mode mode
;
6877 /* The values computed by this function never change. */
6880 /* The first value is the incoming arg-pointer. */
6881 size
= GET_MODE_SIZE (Pmode
);
6883 /* The second value is the structure value address unless this is
6884 passed as an "invisible" first argument. */
6885 if (struct_value_rtx
)
6886 size
+= GET_MODE_SIZE (Pmode
);
6888 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
6889 if (FUNCTION_ARG_REGNO_P (regno
))
6891 /* Search for the proper mode for copying this register's
6892 value. I'm not sure this is right, but it works so far. */
6893 enum machine_mode best_mode
= VOIDmode
;
6895 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
6897 mode
= GET_MODE_WIDER_MODE (mode
))
6898 if (HARD_REGNO_MODE_OK (regno
, mode
)
6899 && HARD_REGNO_NREGS (regno
, mode
) == 1)
6902 if (best_mode
== VOIDmode
)
6903 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
6905 mode
= GET_MODE_WIDER_MODE (mode
))
6906 if (HARD_REGNO_MODE_OK (regno
, mode
)
6907 && (mov_optab
->handlers
[(int) mode
].insn_code
6908 != CODE_FOR_nothing
))
6912 if (mode
== VOIDmode
)
6915 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6916 if (size
% align
!= 0)
6917 size
= CEIL (size
, align
) * align
;
6918 size
+= GET_MODE_SIZE (mode
);
6919 apply_args_mode
[regno
] = mode
;
6922 apply_args_mode
[regno
] = VOIDmode
;
6927 /* Return the size required for the block returned by __builtin_apply,
6928 and initialize apply_result_mode. */
6930 apply_result_size ()
6932 static int size
= -1;
6934 enum machine_mode mode
;
6936 /* The values computed by this function never change. */
6941 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
6942 if (FUNCTION_VALUE_REGNO_P (regno
))
6944 /* Search for the proper mode for copying this register's
6945 value. I'm not sure this is right, but it works so far. */
6946 enum machine_mode best_mode
= VOIDmode
;
6948 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
6950 mode
= GET_MODE_WIDER_MODE (mode
))
6951 if (HARD_REGNO_MODE_OK (regno
, mode
))
6954 if (best_mode
== VOIDmode
)
6955 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
6957 mode
= GET_MODE_WIDER_MODE (mode
))
6958 if (HARD_REGNO_MODE_OK (regno
, mode
)
6959 && (mov_optab
->handlers
[(int) mode
].insn_code
6960 != CODE_FOR_nothing
))
6964 if (mode
== VOIDmode
)
6967 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
6968 if (size
% align
!= 0)
6969 size
= CEIL (size
, align
) * align
;
6970 size
+= GET_MODE_SIZE (mode
);
6971 apply_result_mode
[regno
] = mode
;
6974 apply_result_mode
[regno
] = VOIDmode
;
6976 /* Allow targets that use untyped_call and untyped_return to override
6977 the size so that machine-specific information can be stored here. */
6978 #ifdef APPLY_RESULT_SIZE
6979 size
= APPLY_RESULT_SIZE
;
6985 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6986 /* Create a vector describing the result block RESULT. If SAVEP is true,
6987 the result block is used to save the values; otherwise it is used to
6988 restore the values. */
6990 result_vector (savep
, result
)
6994 int regno
, size
, align
, nelts
;
6995 enum machine_mode mode
;
6997 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
7000 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7001 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
7003 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7004 if (size
% align
!= 0)
7005 size
= CEIL (size
, align
) * align
;
7006 reg
= gen_rtx (REG
, mode
, savep
? INCOMING_REGNO (regno
) : regno
);
7007 mem
= change_address (result
, mode
,
7008 plus_constant (XEXP (result
, 0), size
));
7009 savevec
[nelts
++] = (savep
7010 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
7011 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
7012 size
+= GET_MODE_SIZE (mode
);
7014 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
7016 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7019 /* Save the state required to perform an untyped call with the same
7020 arguments as were passed to the current function. */
7022 expand_builtin_apply_args ()
7025 int size
, align
, regno
;
7026 enum machine_mode mode
;
7028 /* Create a block where the arg-pointer, structure value address,
7029 and argument registers can be saved. */
7030 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
7032 /* Walk past the arg-pointer and structure value address. */
7033 size
= GET_MODE_SIZE (Pmode
);
7034 if (struct_value_rtx
)
7035 size
+= GET_MODE_SIZE (Pmode
);
7037 /* Save each register used in calling a function to the block. */
7038 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7039 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
7041 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7042 if (size
% align
!= 0)
7043 size
= CEIL (size
, align
) * align
;
7044 emit_move_insn (change_address (registers
, mode
,
7045 plus_constant (XEXP (registers
, 0),
7047 gen_rtx (REG
, mode
, INCOMING_REGNO (regno
)));
7048 size
+= GET_MODE_SIZE (mode
);
7051 /* Save the arg pointer to the block. */
7052 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
7053 copy_to_reg (virtual_incoming_args_rtx
));
7054 size
= GET_MODE_SIZE (Pmode
);
7056 /* Save the structure value address unless this is passed as an
7057 "invisible" first argument. */
7058 if (struct_value_incoming_rtx
)
7060 emit_move_insn (change_address (registers
, Pmode
,
7061 plus_constant (XEXP (registers
, 0),
7063 copy_to_reg (struct_value_incoming_rtx
));
7064 size
+= GET_MODE_SIZE (Pmode
);
7067 /* Return the address of the block. */
7068 return copy_addr_to_reg (XEXP (registers
, 0));
7071 /* Perform an untyped call and save the state required to perform an
7072 untyped return of whatever value was returned by the given function. */
7074 expand_builtin_apply (function
, arguments
, argsize
)
7075 rtx function
, arguments
, argsize
;
7077 int size
, align
, regno
;
7078 enum machine_mode mode
;
7079 rtx incoming_args
, result
, reg
, dest
, call_insn
;
7080 rtx old_stack_level
= 0;
7083 /* Create a block where the return registers can be saved. */
7084 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
7086 /* ??? The argsize value should be adjusted here. */
7088 /* Fetch the arg pointer from the ARGUMENTS block. */
7089 incoming_args
= gen_reg_rtx (Pmode
);
7090 emit_move_insn (incoming_args
,
7091 gen_rtx (MEM
, Pmode
, arguments
));
7092 #ifndef STACK_GROWS_DOWNWARD
7093 incoming_args
= expand_binop (Pmode
, add_optab
, incoming_args
, argsize
,
7094 incoming_args
, 0, OPTAB_LIB_WIDEN
);
7097 /* Perform postincrements before actually calling the function. */
7100 /* Push a new argument block and copy the arguments. */
7101 do_pending_stack_adjust ();
7102 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
7104 /* Push a block of memory onto the stack to store the memory arguments.
7105 Save the address in a register, and copy the memory arguments. ??? I
7106 haven't figured out how the calling convention macros effect this,
7107 but it's likely that the source and/or destination addresses in
7108 the block copy will need updating in machine specific ways. */
7109 dest
= copy_addr_to_reg (push_block (argsize
, 0, 0));
7110 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
7111 gen_rtx (MEM
, BLKmode
, incoming_args
),
7113 PARM_BOUNDARY
/ BITS_PER_UNIT
);
7115 /* Refer to the argument block. */
7117 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
7119 /* Walk past the arg-pointer and structure value address. */
7120 size
= GET_MODE_SIZE (Pmode
);
7121 if (struct_value_rtx
)
7122 size
+= GET_MODE_SIZE (Pmode
);
7124 /* Restore each of the registers previously saved. Make USE insns
7125 for each of these registers for use in making the call. */
7126 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7127 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
7129 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7130 if (size
% align
!= 0)
7131 size
= CEIL (size
, align
) * align
;
7132 reg
= gen_rtx (REG
, mode
, regno
);
7133 emit_move_insn (reg
,
7134 change_address (arguments
, mode
,
7135 plus_constant (XEXP (arguments
, 0),
7138 push_to_sequence (use_insns
);
7139 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
7140 use_insns
= get_insns ();
7142 size
+= GET_MODE_SIZE (mode
);
7145 /* Restore the structure value address unless this is passed as an
7146 "invisible" first argument. */
7147 size
= GET_MODE_SIZE (Pmode
);
7148 if (struct_value_rtx
)
7150 rtx value
= gen_reg_rtx (Pmode
);
7151 emit_move_insn (value
,
7152 change_address (arguments
, Pmode
,
7153 plus_constant (XEXP (arguments
, 0),
7155 emit_move_insn (struct_value_rtx
, value
);
7156 if (GET_CODE (struct_value_rtx
) == REG
)
7158 push_to_sequence (use_insns
);
7159 emit_insn (gen_rtx (USE
, VOIDmode
, struct_value_rtx
));
7160 use_insns
= get_insns ();
7163 size
+= GET_MODE_SIZE (Pmode
);
7166 /* All arguments and registers used for the call are set up by now! */
7167 function
= prepare_call_address (function
, NULL_TREE
, &use_insns
);
7169 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7170 and we don't want to load it into a register as an optimization,
7171 because prepare_call_address already did it if it should be done. */
7172 if (GET_CODE (function
) != SYMBOL_REF
)
7173 function
= memory_address (FUNCTION_MODE
, function
);
7175 /* Generate the actual call instruction and save the return value. */
7176 #ifdef HAVE_untyped_call
7177 if (HAVE_untyped_call
)
7178 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
7179 result
, result_vector (1, result
)));
7182 #ifdef HAVE_call_value
7183 if (HAVE_call_value
)
7187 /* Locate the unique return register. It is not possible to
7188 express a call that sets more than one return register using
7189 call_value; use untyped_call for that. In fact, untyped_call
7190 only needs to save the return registers in the given block. */
7191 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7192 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
7195 abort (); /* HAVE_untyped_call required. */
7196 valreg
= gen_rtx (REG
, mode
, regno
);
7199 emit_call_insn (gen_call_value (valreg
,
7200 gen_rtx (MEM
, FUNCTION_MODE
, function
),
7201 const0_rtx
, NULL_RTX
, const0_rtx
));
7203 emit_move_insn (change_address (result
, GET_MODE (valreg
),
7211 /* Find the CALL insn we just emitted and write the USE insns before it. */
7212 for (call_insn
= get_last_insn ();
7213 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
7214 call_insn
= PREV_INSN (call_insn
))
7220 /* Put the USE insns before the CALL. */
7221 emit_insns_before (use_insns
, call_insn
);
7223 /* Restore the stack. */
7224 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
7226 /* Return the address of the result block. */
7227 return copy_addr_to_reg (XEXP (result
, 0));
7230 /* Perform an untyped return. */
7232 expand_builtin_return (result
)
7235 int size
, align
, regno
;
7236 enum machine_mode mode
;
7240 apply_result_size ();
7241 result
= gen_rtx (MEM
, BLKmode
, result
);
7243 #ifdef HAVE_untyped_return
7244 if (HAVE_untyped_return
)
7246 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
7252 /* Restore the return value and note that each value is used. */
7254 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7255 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
7257 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
7258 if (size
% align
!= 0)
7259 size
= CEIL (size
, align
) * align
;
7260 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
7261 emit_move_insn (reg
,
7262 change_address (result
, mode
,
7263 plus_constant (XEXP (result
, 0),
7266 push_to_sequence (use_insns
);
7267 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
7268 use_insns
= get_insns ();
7270 size
+= GET_MODE_SIZE (mode
);
7273 /* Put the USE insns before the return. */
7274 emit_insns (use_insns
);
7276 /* Return whatever values was restored by jumping directly to the end
7278 expand_null_return ();
7281 /* Expand code for a post- or pre- increment or decrement
7282 and return the RTX for the result.
7283 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7286 expand_increment (exp
, post
)
7290 register rtx op0
, op1
;
7291 register rtx temp
, value
;
7292 register tree incremented
= TREE_OPERAND (exp
, 0);
7293 optab this_optab
= add_optab
;
7295 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
7296 int op0_is_copy
= 0;
7298 /* Stabilize any component ref that might need to be
7299 evaluated more than once below. */
7301 || TREE_CODE (incremented
) == BIT_FIELD_REF
7302 || (TREE_CODE (incremented
) == COMPONENT_REF
7303 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
7304 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
7305 incremented
= stabilize_reference (incremented
);
7307 /* Compute the operands as RTX.
7308 Note whether OP0 is the actual lvalue or a copy of it:
7309 I believe it is a copy iff it is a register or subreg
7310 and insns were generated in computing it. */
7312 temp
= get_last_insn ();
7313 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
7315 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7316 in place but intead must do sign- or zero-extension during assignment,
7317 so we copy it into a new register and let the code below use it as
7320 Note that we can safely modify this SUBREG since it is know not to be
7321 shared (it was made by the expand_expr call above). */
7323 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
7324 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
7326 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
7327 && temp
!= get_last_insn ());
7328 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7330 /* Decide whether incrementing or decrementing. */
7331 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
7332 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
7333 this_optab
= sub_optab
;
7335 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7336 then we cannot just increment OP0. We must therefore contrive to
7337 increment the original value. Then, for postincrement, we can return
7338 OP0 since it is a copy of the old value. For preincrement, we want
7339 to always expand here, since this generates better or equivalent code. */
7340 if (!post
|| op0_is_copy
)
7342 /* This is the easiest way to increment the value wherever it is.
7343 Problems with multiple evaluation of INCREMENTED are prevented
7344 because either (1) it is a component_ref or preincrement,
7345 in which case it was stabilized above, or (2) it is an array_ref
7346 with constant index in an array in a register, which is
7347 safe to reevaluate. */
7348 tree newexp
= build ((this_optab
== add_optab
7349 ? PLUS_EXPR
: MINUS_EXPR
),
7352 TREE_OPERAND (exp
, 1));
7353 temp
= expand_assignment (incremented
, newexp
, ! post
, 0);
7354 return post
? op0
: temp
;
7357 /* Convert decrement by a constant into a negative increment. */
7358 if (this_optab
== sub_optab
7359 && GET_CODE (op1
) == CONST_INT
)
7361 op1
= GEN_INT (- INTVAL (op1
));
7362 this_optab
= add_optab
;
7367 /* We have a true reference to the value in OP0.
7368 If there is an insn to add or subtract in this mode, queue it. */
7370 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
7371 op0
= stabilize (op0
);
7374 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
7375 if (icode
!= (int) CODE_FOR_nothing
7376 /* Make sure that OP0 is valid for operands 0 and 1
7377 of the insn we want to queue. */
7378 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
7379 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
7381 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
7382 op1
= force_reg (mode
, op1
);
7384 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
7388 /* Preincrement, or we can't increment with one simple insn. */
7390 /* Save a copy of the value before inc or dec, to return it later. */
7391 temp
= value
= copy_to_reg (op0
);
7393 /* Arrange to return the incremented value. */
7394 /* Copy the rtx because expand_binop will protect from the queue,
7395 and the results of that would be invalid for us to return
7396 if our caller does emit_queue before using our result. */
7397 temp
= copy_rtx (value
= op0
);
7399 /* Increment however we can. */
7400 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
7401 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
7402 /* Make sure the value is stored into OP0. */
7404 emit_move_insn (op0
, op1
);
7409 /* Expand all function calls contained within EXP, innermost ones first.
7410 But don't look within expressions that have sequence points.
7411 For each CALL_EXPR, record the rtx for its value
7412 in the CALL_EXPR_RTL field. */
7415 preexpand_calls (exp
)
7418 register int nops
, i
;
7419 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
7421 if (! do_preexpand_calls
)
7424 /* Only expressions and references can contain calls. */
7426 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
7429 switch (TREE_CODE (exp
))
7432 /* Do nothing if already expanded. */
7433 if (CALL_EXPR_RTL (exp
) != 0)
7436 /* Do nothing to built-in functions. */
7437 if (TREE_CODE (TREE_OPERAND (exp
, 0)) != ADDR_EXPR
7438 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != FUNCTION_DECL
7439 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7440 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
7445 case TRUTH_ANDIF_EXPR
:
7446 case TRUTH_ORIF_EXPR
:
7447 /* If we find one of these, then we can be sure
7448 the adjust will be done for it (since it makes jumps).
7449 Do it now, so that if this is inside an argument
7450 of a function, we don't get the stack adjustment
7451 after some other args have already been pushed. */
7452 do_pending_stack_adjust ();
7457 case WITH_CLEANUP_EXPR
:
7461 if (SAVE_EXPR_RTL (exp
) != 0)
7465 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
7466 for (i
= 0; i
< nops
; i
++)
7467 if (TREE_OPERAND (exp
, i
) != 0)
7469 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
7470 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
7472 preexpand_calls (TREE_OPERAND (exp
, i
));
7476 /* At the start of a function, record that we have no previously-pushed
7477 arguments waiting to be popped. */
7480 init_pending_stack_adjust ()
7482 pending_stack_adjust
= 0;
7485 /* When exiting from function, if safe, clear out any pending stack adjust
7486 so the adjustment won't get done. */
7489 clear_pending_stack_adjust ()
7491 #ifdef EXIT_IGNORE_STACK
7492 if (! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
7493 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
7494 && ! flag_inline_functions
)
7495 pending_stack_adjust
= 0;
7499 /* Pop any previously-pushed arguments that have not been popped yet. */
7502 do_pending_stack_adjust ()
7504 if (inhibit_defer_pop
== 0)
7506 if (pending_stack_adjust
!= 0)
7507 adjust_stack (GEN_INT (pending_stack_adjust
));
7508 pending_stack_adjust
= 0;
7512 /* Expand all cleanups up to OLD_CLEANUPS.
7513 Needed here, and also for language-dependent calls. */
7516 expand_cleanups_to (old_cleanups
)
7519 while (cleanups_this_call
!= old_cleanups
)
7521 expand_expr (TREE_VALUE (cleanups_this_call
), NULL_RTX
, VOIDmode
, 0);
7522 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
7526 /* Expand conditional expressions. */
7528 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7529 LABEL is an rtx of code CODE_LABEL, in this function and all the
7533 jumpifnot (exp
, label
)
7537 do_jump (exp
, label
, NULL_RTX
);
7540 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7547 do_jump (exp
, NULL_RTX
, label
);
7550 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7551 the result is zero, or IF_TRUE_LABEL if the result is one.
7552 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7553 meaning fall through in that case.
7555 do_jump always does any pending stack adjust except when it does not
7556 actually perform a jump. An example where there is no jump
7557 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7559 This function is responsible for optimizing cases such as
7560 &&, || and comparison operators in EXP. */
7563 do_jump (exp
, if_false_label
, if_true_label
)
7565 rtx if_false_label
, if_true_label
;
7567 register enum tree_code code
= TREE_CODE (exp
);
7568 /* Some cases need to create a label to jump to
7569 in order to properly fall through.
7570 These cases set DROP_THROUGH_LABEL nonzero. */
7571 rtx drop_through_label
= 0;
7585 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
7591 /* This is not true with #pragma weak */
7593 /* The address of something can never be zero. */
7595 emit_jump (if_true_label
);
7600 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
7601 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
7602 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
7605 /* If we are narrowing the operand, we have to do the compare in the
7607 if ((TYPE_PRECISION (TREE_TYPE (exp
))
7608 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7610 case NON_LVALUE_EXPR
:
7611 case REFERENCE_EXPR
:
7616 /* These cannot change zero->non-zero or vice versa. */
7617 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
7621 /* This is never less insns than evaluating the PLUS_EXPR followed by
7622 a test and can be longer if the test is eliminated. */
7624 /* Reduce to minus. */
7625 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
7626 TREE_OPERAND (exp
, 0),
7627 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
7628 TREE_OPERAND (exp
, 1))));
7629 /* Process as MINUS. */
7633 /* Non-zero iff operands of minus differ. */
7634 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
7635 TREE_OPERAND (exp
, 0),
7636 TREE_OPERAND (exp
, 1)),
7641 /* If we are AND'ing with a small constant, do this comparison in the
7642 smallest type that fits. If the machine doesn't have comparisons
7643 that small, it will be converted back to the wider comparison.
7644 This helps if we are testing the sign bit of a narrower object.
7645 combine can't do this for us because it can't know whether a
7646 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7648 if (! SLOW_BYTE_ACCESS
7649 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7650 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
7651 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
7652 && (type
= type_for_size (i
+ 1, 1)) != 0
7653 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
7654 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
7655 != CODE_FOR_nothing
))
7657 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
7662 case TRUTH_NOT_EXPR
:
7663 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
7666 case TRUTH_ANDIF_EXPR
:
7667 if (if_false_label
== 0)
7668 if_false_label
= drop_through_label
= gen_label_rtx ();
7669 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
7670 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
7673 case TRUTH_ORIF_EXPR
:
7674 if (if_true_label
== 0)
7675 if_true_label
= drop_through_label
= gen_label_rtx ();
7676 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
7677 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
7681 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7684 do_pending_stack_adjust ();
7685 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
7692 int bitsize
, bitpos
, unsignedp
;
7693 enum machine_mode mode
;
7698 /* Get description of this reference. We don't actually care
7699 about the underlying object here. */
7700 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7701 &mode
, &unsignedp
, &volatilep
);
7703 type
= type_for_size (bitsize
, unsignedp
);
7704 if (! SLOW_BYTE_ACCESS
7705 && type
!= 0 && bitsize
>= 0
7706 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
7707 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
7708 != CODE_FOR_nothing
))
7710 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
7717 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7718 if (integer_onep (TREE_OPERAND (exp
, 1))
7719 && integer_zerop (TREE_OPERAND (exp
, 2)))
7720 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
7722 else if (integer_zerop (TREE_OPERAND (exp
, 1))
7723 && integer_onep (TREE_OPERAND (exp
, 2)))
7724 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
7728 register rtx label1
= gen_label_rtx ();
7729 drop_through_label
= gen_label_rtx ();
7730 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
7731 /* Now the THEN-expression. */
7732 do_jump (TREE_OPERAND (exp
, 1),
7733 if_false_label
? if_false_label
: drop_through_label
,
7734 if_true_label
? if_true_label
: drop_through_label
);
7735 /* In case the do_jump just above never jumps. */
7736 do_pending_stack_adjust ();
7737 emit_label (label1
);
7738 /* Now the ELSE-expression. */
7739 do_jump (TREE_OPERAND (exp
, 2),
7740 if_false_label
? if_false_label
: drop_through_label
,
7741 if_true_label
? if_true_label
: drop_through_label
);
7746 if (integer_zerop (TREE_OPERAND (exp
, 1)))
7747 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
7748 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7751 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7752 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
7754 comparison
= compare (exp
, EQ
, EQ
);
7758 if (integer_zerop (TREE_OPERAND (exp
, 1)))
7759 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
7760 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7763 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7764 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
7766 comparison
= compare (exp
, NE
, NE
);
7770 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7772 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7773 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
7775 comparison
= compare (exp
, LT
, LTU
);
7779 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7781 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7782 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
7784 comparison
= compare (exp
, LE
, LEU
);
7788 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7790 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7791 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
7793 comparison
= compare (exp
, GT
, GTU
);
7797 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7799 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7800 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
7802 comparison
= compare (exp
, GE
, GEU
);
7807 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
7809 /* This is not needed any more and causes poor code since it causes
7810 comparisons and tests from non-SI objects to have different code
7812 /* Copy to register to avoid generating bad insns by cse
7813 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7814 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
7815 temp
= copy_to_reg (temp
);
7817 do_pending_stack_adjust ();
7818 if (GET_CODE (temp
) == CONST_INT
)
7819 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
7820 else if (GET_CODE (temp
) == LABEL_REF
)
7821 comparison
= const_true_rtx
;
7822 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
7823 && !can_compare_p (GET_MODE (temp
)))
7824 /* Note swapping the labels gives us not-equal. */
7825 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
7826 else if (GET_MODE (temp
) != VOIDmode
)
7827 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
7828 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
7829 GET_MODE (temp
), NULL_RTX
, 0);
7834 /* Do any postincrements in the expression that was tested. */
7837 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7838 straight into a conditional jump instruction as the jump condition.
7839 Otherwise, all the work has been done already. */
7841 if (comparison
== const_true_rtx
)
7844 emit_jump (if_true_label
);
7846 else if (comparison
== const0_rtx
)
7849 emit_jump (if_false_label
);
7851 else if (comparison
)
7852 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
7856 if (drop_through_label
)
7858 /* If do_jump produces code that might be jumped around,
7859 do any stack adjusts from that code, before the place
7860 where control merges in. */
7861 do_pending_stack_adjust ();
7862 emit_label (drop_through_label
);
7866 /* Given a comparison expression EXP for values too wide to be compared
7867 with one insn, test the comparison and jump to the appropriate label.
7868 The code of EXP is ignored; we always test GT if SWAP is 0,
7869 and LT if SWAP is 1. */
7872 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
7875 rtx if_false_label
, if_true_label
;
7877 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
7878 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
7879 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7880 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
7881 rtx drop_through_label
= 0;
7882 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7885 if (! if_true_label
|| ! if_false_label
)
7886 drop_through_label
= gen_label_rtx ();
7887 if (! if_true_label
)
7888 if_true_label
= drop_through_label
;
7889 if (! if_false_label
)
7890 if_false_label
= drop_through_label
;
7892 /* Compare a word at a time, high order first. */
7893 for (i
= 0; i
< nwords
; i
++)
7896 rtx op0_word
, op1_word
;
7898 if (WORDS_BIG_ENDIAN
)
7900 op0_word
= operand_subword_force (op0
, i
, mode
);
7901 op1_word
= operand_subword_force (op1
, i
, mode
);
7905 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
7906 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
7909 /* All but high-order word must be compared as unsigned. */
7910 comp
= compare_from_rtx (op0_word
, op1_word
,
7911 (unsignedp
|| i
> 0) ? GTU
: GT
,
7912 unsignedp
, word_mode
, NULL_RTX
, 0);
7913 if (comp
== const_true_rtx
)
7914 emit_jump (if_true_label
);
7915 else if (comp
!= const0_rtx
)
7916 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
7918 /* Consider lower words only if these are equal. */
7919 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
7921 if (comp
== const_true_rtx
)
7922 emit_jump (if_false_label
);
7923 else if (comp
!= const0_rtx
)
7924 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
7928 emit_jump (if_false_label
);
7929 if (drop_through_label
)
7930 emit_label (drop_through_label
);
7933 /* Given an EQ_EXPR expression EXP for values too wide to be compared
7934 with one insn, test the comparison and jump to the appropriate label. */
7937 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
7939 rtx if_false_label
, if_true_label
;
7941 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7942 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7943 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7944 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
7946 rtx drop_through_label
= 0;
7948 if (! if_false_label
)
7949 drop_through_label
= if_false_label
= gen_label_rtx ();
7951 for (i
= 0; i
< nwords
; i
++)
7953 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
7954 operand_subword_force (op1
, i
, mode
),
7955 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
7956 word_mode
, NULL_RTX
, 0);
7957 if (comp
== const_true_rtx
)
7958 emit_jump (if_false_label
);
7959 else if (comp
!= const0_rtx
)
7960 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
7964 emit_jump (if_true_label
);
7965 if (drop_through_label
)
7966 emit_label (drop_through_label
);
7969 /* Jump according to whether OP0 is 0.
7970 We assume that OP0 has an integer mode that is too wide
7971 for the available compare insns. */
7974 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
7976 rtx if_false_label
, if_true_label
;
7978 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
7980 rtx drop_through_label
= 0;
7982 if (! if_false_label
)
7983 drop_through_label
= if_false_label
= gen_label_rtx ();
7985 for (i
= 0; i
< nwords
; i
++)
7987 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
7989 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
7990 if (comp
== const_true_rtx
)
7991 emit_jump (if_false_label
);
7992 else if (comp
!= const0_rtx
)
7993 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
7997 emit_jump (if_true_label
);
7998 if (drop_through_label
)
7999 emit_label (drop_through_label
);
8002 /* Given a comparison expression in rtl form, output conditional branches to
8003 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8006 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
8007 rtx comparison
, if_false_label
, if_true_label
;
8011 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
8012 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
8017 emit_jump (if_false_label
);
8019 else if (if_false_label
)
8022 rtx prev
= PREV_INSN (get_last_insn ());
8025 /* Output the branch with the opposite condition. Then try to invert
8026 what is generated. If more than one insn is a branch, or if the
8027 branch is not the last insn written, abort. If we can't invert
8028 the branch, emit make a true label, redirect this jump to that,
8029 emit a jump to the false label and define the true label. */
8031 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
8032 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_false_label
));
8036 /* Here we get the insn before what was just emitted.
8037 On some machines, emitting the branch can discard
8038 the previous compare insn and emit a replacement. */
8040 /* If there's only one preceding insn... */
8041 insn
= get_insns ();
8043 insn
= NEXT_INSN (prev
);
8045 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
8046 if (GET_CODE (insn
) == JUMP_INSN
)
8053 if (branch
!= get_last_insn ())
8056 if (! invert_jump (branch
, if_false_label
))
8058 if_true_label
= gen_label_rtx ();
8059 redirect_jump (branch
, if_true_label
);
8060 emit_jump (if_false_label
);
8061 emit_label (if_true_label
);
8066 /* Generate code for a comparison expression EXP
8067 (including code to compute the values to be compared)
8068 and set (CC0) according to the result.
8069 SIGNED_CODE should be the rtx operation for this comparison for
8070 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8072 We force a stack adjustment unless there are currently
8073 things pushed on the stack that aren't yet used. */
8076 compare (exp
, signed_code
, unsigned_code
)
8078 enum rtx_code signed_code
, unsigned_code
;
8081 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
8083 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8084 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8085 register enum machine_mode mode
= TYPE_MODE (type
);
8086 int unsignedp
= TREE_UNSIGNED (type
);
8087 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
8089 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
8091 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
8092 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
8095 /* Like compare but expects the values to compare as two rtx's.
8096 The decision as to signed or unsigned comparison must be made by the caller.
8098 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8101 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8102 size of MODE should be used. */
8105 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
8106 register rtx op0
, op1
;
8109 enum machine_mode mode
;
8115 /* If one operand is constant, make it the second one. Only do this
8116 if the other operand is not constant as well. */
8118 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
8119 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
8124 code
= swap_condition (code
);
8129 op0
= force_not_mem (op0
);
8130 op1
= force_not_mem (op1
);
8133 do_pending_stack_adjust ();
8135 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
8136 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
8140 /* There's no need to do this now that combine.c can eliminate lots of
8141 sign extensions. This can be less efficient in certain cases on other
8144 /* If this is a signed equality comparison, we can do it as an
8145 unsigned comparison since zero-extension is cheaper than sign
8146 extension and comparisons with zero are done as unsigned. This is
8147 the case even on machines that can do fast sign extension, since
8148 zero-extension is easier to combine with other operations than
8149 sign-extension is. If we are comparing against a constant, we must
8150 convert it to what it would look like unsigned. */
8151 if ((code
== EQ
|| code
== NE
) && ! unsignedp
8152 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
8154 if (GET_CODE (op1
) == CONST_INT
8155 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
8156 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
8161 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
8163 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
8166 /* Generate code to calculate EXP using a store-flag instruction
8167 and return an rtx for the result. EXP is either a comparison
8168 or a TRUTH_NOT_EXPR whose operand is a comparison.
8170 If TARGET is nonzero, store the result there if convenient.
8172 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8175 Return zero if there is no suitable set-flag instruction
8176 available on this machine.
8178 Once expand_expr has been called on the arguments of the comparison,
8179 we are committed to doing the store flag, since it is not safe to
8180 re-evaluate the expression. We emit the store-flag insn by calling
8181 emit_store_flag, but only expand the arguments if we have a reason
8182 to believe that emit_store_flag will be successful. If we think that
8183 it will, but it isn't, we have to simulate the store-flag with a
8184 set/jump/set sequence. */
8187 do_store_flag (exp
, target
, mode
, only_cheap
)
8190 enum machine_mode mode
;
8194 tree arg0
, arg1
, type
;
8196 enum machine_mode operand_mode
;
8200 enum insn_code icode
;
8201 rtx subtarget
= target
;
8202 rtx result
, label
, pattern
, jump_pat
;
8204 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8205 result at the end. We can't simply invert the test since it would
8206 have already been inverted if it were valid. This case occurs for
8207 some floating-point comparisons. */
8209 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
8210 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
8212 arg0
= TREE_OPERAND (exp
, 0);
8213 arg1
= TREE_OPERAND (exp
, 1);
8214 type
= TREE_TYPE (arg0
);
8215 operand_mode
= TYPE_MODE (type
);
8216 unsignedp
= TREE_UNSIGNED (type
);
8218 /* We won't bother with BLKmode store-flag operations because it would mean
8219 passing a lot of information to emit_store_flag. */
8220 if (operand_mode
== BLKmode
)
8226 /* Get the rtx comparison code to use. We know that EXP is a comparison
8227 operation of some type. Some comparisons against 1 and -1 can be
8228 converted to comparisons with zero. Do so here so that the tests
8229 below will be aware that we have a comparison with zero. These
8230 tests will not catch constants in the first operand, but constants
8231 are rarely passed as the first operand. */
8233 switch (TREE_CODE (exp
))
8242 if (integer_onep (arg1
))
8243 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
8245 code
= unsignedp
? LTU
: LT
;
8248 if (integer_all_onesp (arg1
))
8249 arg1
= integer_zero_node
, code
= unsignedp
? LTU
: LT
;
8251 code
= unsignedp
? LEU
: LE
;
8254 if (integer_all_onesp (arg1
))
8255 arg1
= integer_zero_node
, code
= unsignedp
? GEU
: GE
;
8257 code
= unsignedp
? GTU
: GT
;
8260 if (integer_onep (arg1
))
8261 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
8263 code
= unsignedp
? GEU
: GE
;
8269 /* Put a constant second. */
8270 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
8272 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
8273 code
= swap_condition (code
);
8276 /* If this is an equality or inequality test of a single bit, we can
8277 do this by shifting the bit being tested to the low-order bit and
8278 masking the result with the constant 1. If the condition was EQ,
8279 we xor it with 1. This does not require an scc insn and is faster
8280 than an scc insn even if we have it. */
8282 if ((code
== NE
|| code
== EQ
)
8283 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
8284 && integer_pow2p (TREE_OPERAND (arg0
, 1))
8285 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
8287 int bitnum
= exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
8288 NULL_RTX
, VOIDmode
, 0)));
8290 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
8291 || GET_MODE (subtarget
) != operand_mode
8292 || ! safe_from_p (subtarget
, TREE_OPERAND (arg0
, 0)))
8295 op0
= expand_expr (TREE_OPERAND (arg0
, 0), subtarget
, VOIDmode
, 0);
8298 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
8299 size_int (bitnum
), target
, 1);
8301 if (GET_MODE (op0
) != mode
)
8302 op0
= convert_to_mode (mode
, op0
, 1);
8304 if (bitnum
!= TYPE_PRECISION (type
) - 1)
8305 op0
= expand_and (op0
, const1_rtx
, target
);
8307 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
8308 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, target
, 0,
8314 /* Now see if we are likely to be able to do this. Return if not. */
8315 if (! can_compare_p (operand_mode
))
8317 icode
= setcc_gen_code
[(int) code
];
8318 if (icode
== CODE_FOR_nothing
8319 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
8321 /* We can only do this if it is one of the special cases that
8322 can be handled without an scc insn. */
8323 if ((code
== LT
&& integer_zerop (arg1
))
8324 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
8326 else if (BRANCH_COST
>= 0
8327 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
8328 && TREE_CODE (type
) != REAL_TYPE
8329 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
8330 != CODE_FOR_nothing
)
8331 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
8332 != CODE_FOR_nothing
)))
8338 preexpand_calls (exp
);
8339 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
8340 || GET_MODE (subtarget
) != operand_mode
8341 || ! safe_from_p (subtarget
, arg1
))
8344 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
8345 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
8348 target
= gen_reg_rtx (mode
);
8350 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8351 because, if the emit_store_flag does anything it will succeed and
8352 OP0 and OP1 will not be used subsequently. */
8354 result
= emit_store_flag (target
, code
,
8355 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
8356 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
8357 operand_mode
, unsignedp
, 1);
8362 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
8363 result
, 0, OPTAB_LIB_WIDEN
);
8367 /* If this failed, we have to do this with set/compare/jump/set code. */
8368 if (target
== 0 || GET_CODE (target
) != REG
8369 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
8370 target
= gen_reg_rtx (GET_MODE (target
));
8372 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
8373 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
8374 operand_mode
, NULL_RTX
, 0);
8375 if (GET_CODE (result
) == CONST_INT
)
8376 return (((result
== const0_rtx
&& ! invert
)
8377 || (result
!= const0_rtx
&& invert
))
8378 ? const0_rtx
: const1_rtx
);
8380 label
= gen_label_rtx ();
8381 if (bcc_gen_fctn
[(int) code
] == 0)
8384 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
8385 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
8391 /* Generate a tablejump instruction (used for switch statements). */
8393 #ifdef HAVE_tablejump
8395 /* INDEX is the value being switched on, with the lowest value
8396 in the table already subtracted.
8397 MODE is its expected mode (needed if INDEX is constant).
8398 RANGE is the length of the jump table.
8399 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8401 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8402 index value is out of range. */
8405 do_tablejump (index
, mode
, range
, table_label
, default_label
)
8406 rtx index
, range
, table_label
, default_label
;
8407 enum machine_mode mode
;
8409 register rtx temp
, vector
;
8411 /* Do an unsigned comparison (in the proper mode) between the index
8412 expression and the value which represents the length of the range.
8413 Since we just finished subtracting the lower bound of the range
8414 from the index expression, this comparison allows us to simultaneously
8415 check that the original index expression value is both greater than
8416 or equal to the minimum value of the range and less than or equal to
8417 the maximum value of the range. */
8419 emit_cmp_insn (range
, index
, LTU
, NULL_RTX
, mode
, 1, 0);
8420 emit_jump_insn (gen_bltu (default_label
));
8422 /* If index is in range, it must fit in Pmode.
8423 Convert to Pmode so we can index with it. */
8425 index
= convert_to_mode (Pmode
, index
, 1);
8427 /* If flag_force_addr were to affect this address
8428 it could interfere with the tricky assumptions made
8429 about addresses that contain label-refs,
8430 which may be valid only very near the tablejump itself. */
8431 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8432 GET_MODE_SIZE, because this indicates how large insns are. The other
8433 uses should all be Pmode, because they are addresses. This code
8434 could fail if addresses and insns are not the same size. */
8435 index
= memory_address_noforce
8437 gen_rtx (PLUS
, Pmode
,
8438 gen_rtx (MULT
, Pmode
, index
,
8439 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
8440 gen_rtx (LABEL_REF
, Pmode
, table_label
)));
8441 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
8442 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
8443 RTX_UNCHANGING_P (vector
) = 1;
8444 convert_move (temp
, vector
, 0);
8446 emit_jump_insn (gen_tablejump (temp
, table_label
));
8448 #ifndef CASE_VECTOR_PC_RELATIVE
8449 /* If we are generating PIC code or if the table is PC-relative, the
8450 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8456 #endif /* HAVE_tablejump */