1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls
= 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust
;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop
;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call
;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level
;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value
;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value
;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from
;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size
;
155 extern int stack_depth
;
156 extern int max_stack_depth
;
157 extern struct obstack permanent_obstack
;
158 extern rtx arg_pointer_save_area
;
160 static rtx enqueue_insn
PROTO((rtx
, rtx
));
161 static int queued_subexp_p
PROTO((rtx
));
162 static void init_queue
PROTO((void));
163 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
164 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
165 static void move_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
166 struct move_by_pieces
*));
167 static void clear_by_pieces
PROTO((rtx
, int, int));
168 static void clear_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
169 struct clear_by_pieces
*));
170 static int is_zeros_p
PROTO((tree
));
171 static int mostly_zeros_p
PROTO((tree
));
172 static void store_constructor
PROTO((tree
, rtx
, int));
173 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
174 enum machine_mode
, int, int, int));
175 static int get_inner_unaligned_p
PROTO((tree
));
176 static tree save_noncopied_parts
PROTO((tree
, tree
));
177 static tree init_noncopied_parts
PROTO((tree
, tree
));
178 static int safe_from_p
PROTO((rtx
, tree
));
179 static int fixed_type_p
PROTO((tree
));
180 static rtx var_rtx
PROTO((tree
));
181 static int get_pointer_alignment
PROTO((tree
, unsigned));
182 static tree string_constant
PROTO((tree
, tree
*));
183 static tree c_strlen
PROTO((tree
));
184 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
185 enum machine_mode
, int));
186 static int apply_args_size
PROTO((void));
187 static int apply_result_size
PROTO((void));
188 static rtx result_vector
PROTO((int, rtx
));
189 static rtx expand_builtin_apply_args
PROTO((void));
190 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
191 static void expand_builtin_return
PROTO((rtx
));
192 static rtx expand_increment
PROTO((tree
, int, int));
193 void bc_expand_increment
PROTO((struct increment_operator
*, tree
));
194 rtx bc_allocate_local
PROTO((int, int));
195 void bc_store_memory
PROTO((tree
, tree
));
196 tree bc_expand_component_address
PROTO((tree
));
197 tree bc_expand_address
PROTO((tree
));
198 void bc_expand_constructor
PROTO((tree
));
199 void bc_adjust_stack
PROTO((int));
200 tree bc_canonicalize_array_ref
PROTO((tree
));
201 void bc_load_memory
PROTO((tree
, tree
));
202 void bc_load_externaddr
PROTO((rtx
));
203 void bc_load_externaddr_id
PROTO((tree
, int));
204 void bc_load_localaddr
PROTO((rtx
));
205 void bc_load_parmaddr
PROTO((rtx
));
206 static void preexpand_calls
PROTO((tree
));
207 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
208 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
209 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
210 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
211 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
212 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
213 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
214 static tree defer_cleanups_to
PROTO((tree
));
215 extern tree truthvalue_conversion
PROTO((tree
));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load
[NUM_MACHINE_MODES
];
222 static char direct_store
[NUM_MACHINE_MODES
];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map
[MAX_MACHINE_MODE
];
259 enum bytecode_opcode mode_to_load_map
[MAX_MACHINE_MODE
];
260 enum bytecode_opcode mode_to_store_map
[MAX_MACHINE_MODE
];
262 /* Initialize maps used to convert modes to const, load, and store
266 bc_init_mode_to_opcode_maps ()
270 for (mode
= 0; mode
< (int) MAX_MACHINE_MODE
; mode
++)
271 mode_to_const_map
[mode
] =
272 mode_to_load_map
[mode
] =
273 mode_to_store_map
[mode
] = neverneverland
;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
291 enum machine_mode mode
;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
296 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
299 insn
= emit_insn (gen_rtx (SET
, 0, 0));
300 pat
= PATTERN (insn
);
302 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
303 mode
= (enum machine_mode
) ((int) mode
+ 1))
309 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
310 PUT_MODE (mem
, mode
);
311 PUT_MODE (mem1
, mode
);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
317 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
318 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
321 if (! HARD_REGNO_MODE_OK (regno
, mode
))
324 reg
= gen_rtx (REG
, mode
, regno
);
327 SET_DEST (pat
) = reg
;
328 if (recog (pat
, insn
, &num_clobbers
) >= 0)
329 direct_load
[(int) mode
] = 1;
331 SET_SRC (pat
) = mem1
;
332 SET_DEST (pat
) = reg
;
333 if (recog (pat
, insn
, &num_clobbers
) >= 0)
334 direct_load
[(int) mode
] = 1;
337 SET_DEST (pat
) = mem
;
338 if (recog (pat
, insn
, &num_clobbers
) >= 0)
339 direct_store
[(int) mode
] = 1;
342 SET_DEST (pat
) = mem1
;
343 if (recog (pat
, insn
, &num_clobbers
) >= 0)
344 direct_store
[(int) mode
] = 1;
351 /* This is run at the start of compiling a function. */
358 pending_stack_adjust
= 0;
359 inhibit_defer_pop
= 0;
360 cleanups_this_call
= 0;
362 apply_args_value
= 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
373 /* Instead of saving the postincrement queue, empty it. */
376 p
->pending_stack_adjust
= pending_stack_adjust
;
377 p
->inhibit_defer_pop
= inhibit_defer_pop
;
378 p
->cleanups_this_call
= cleanups_this_call
;
379 p
->saveregs_value
= saveregs_value
;
380 p
->apply_args_value
= apply_args_value
;
381 p
->forced_labels
= forced_labels
;
383 pending_stack_adjust
= 0;
384 inhibit_defer_pop
= 0;
385 cleanups_this_call
= 0;
387 apply_args_value
= 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
395 restore_expr_status (p
)
398 pending_stack_adjust
= p
->pending_stack_adjust
;
399 inhibit_defer_pop
= p
->inhibit_defer_pop
;
400 cleanups_this_call
= p
->cleanups_this_call
;
401 saveregs_value
= p
->saveregs_value
;
402 apply_args_value
= p
->apply_args_value
;
403 forced_labels
= p
->forced_labels
;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain
;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
419 enqueue_insn (var
, body
)
422 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
423 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
424 return pending_chain
;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x
, modify
)
447 register RTX_CODE code
= GET_CODE (x
);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain
== 0)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
462 if (code
== MEM
&& GET_MODE (x
) != BLKmode
463 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
465 register rtx y
= XEXP (x
, 0);
466 register rtx
new = gen_rtx (MEM
, GET_MODE (x
), QUEUED_VAR (y
));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
474 register rtx temp
= gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp
, new),
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
485 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
486 if (tem
!= XEXP (x
, 0))
492 else if (code
== PLUS
|| code
== MULT
)
494 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
495 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
496 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x
) == 0)
507 return QUEUED_VAR (x
);
508 /* If the increment has happened and a pre-increment copy exists,
510 if (QUEUED_COPY (x
) != 0)
511 return QUEUED_COPY (x
);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
517 return QUEUED_COPY (x
);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
529 register enum rtx_code code
= GET_CODE (x
);
535 return queued_subexp_p (XEXP (x
, 0));
539 return queued_subexp_p (XEXP (x
, 0))
540 || queued_subexp_p (XEXP (x
, 1));
545 /* Perform all the pending incrementations. */
551 while (p
= pending_chain
)
553 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
554 pending_chain
= QUEUED_NEXT (p
);
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
571 convert_move (to
, from
, unsignedp
)
572 register rtx to
, from
;
575 enum machine_mode to_mode
= GET_MODE (to
);
576 enum machine_mode from_mode
= GET_MODE (from
);
577 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
578 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
585 to
= protect_from_queue (to
, 1);
586 from
= protect_from_queue (from
, 0);
588 if (to_real
!= from_real
)
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
595 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
597 >= GET_MODE_SIZE (to_mode
))
598 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
599 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
601 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
604 if (to_mode
== from_mode
605 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
607 emit_move_insn (to
, from
);
615 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
617 /* Try converting directly if the insn is supported. */
618 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
621 emit_unop_insn (code
, to
, from
, UNKNOWN
);
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
629 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
636 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
643 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
650 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
657 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
665 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
672 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
679 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
686 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
693 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
701 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
708 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
715 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
722 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
730 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
737 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
744 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
751 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
758 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
770 libcall
= extendsfdf2_libfunc
;
774 libcall
= extendsfxf2_libfunc
;
778 libcall
= extendsftf2_libfunc
;
787 libcall
= truncdfsf2_libfunc
;
791 libcall
= extenddfxf2_libfunc
;
795 libcall
= extenddftf2_libfunc
;
804 libcall
= truncxfsf2_libfunc
;
808 libcall
= truncxfdf2_libfunc
;
817 libcall
= trunctfsf2_libfunc
;
821 libcall
= trunctfdf2_libfunc
;
827 if (libcall
== (rtx
) 0)
828 /* This conversion is not implemented yet. */
831 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
833 emit_move_insn (to
, value
);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
841 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
848 enum machine_mode lowpart_mode
;
849 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
851 /* Try converting directly if the insn is supported. */
852 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
860 from
= force_reg (from_mode
, from
);
861 emit_unop_insn (code
, to
, from
, equiv_code
);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
866 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
867 != CODE_FOR_nothing
))
869 if (GET_CODE (to
) == REG
)
870 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, to
));
871 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
872 emit_unop_insn (code
, to
,
873 gen_lowpart (word_mode
, to
), equiv_code
);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to
, from
))
884 from
= force_reg (from_mode
, from
);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
888 lowpart_mode
= word_mode
;
890 lowpart_mode
= from_mode
;
892 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
894 lowpart
= gen_lowpart (lowpart_mode
, to
);
895 emit_move_insn (lowpart
, lowfrom
);
897 /* Compute the value to put in each remaining word. */
899 fill_value
= const0_rtx
;
904 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
905 && STORE_FLAG_VALUE
== -1)
907 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
909 fill_value
= gen_reg_rtx (word_mode
);
910 emit_insn (gen_slt (fill_value
));
916 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
917 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
919 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
923 /* Fill the remaining words. */
924 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
926 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
927 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
932 if (fill_value
!= subword
)
933 emit_move_insn (subword
, fill_value
);
936 insns
= get_insns ();
939 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
940 gen_rtx (equiv_code
, to_mode
, copy_rtx (from
)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
948 if (!((GET_CODE (from
) == MEM
949 && ! MEM_VOLATILE_P (from
)
950 && direct_load
[(int) to_mode
]
951 && ! mode_dependent_address_p (XEXP (from
, 0)))
952 || GET_CODE (from
) == REG
953 || GET_CODE (from
) == SUBREG
))
954 from
= force_reg (from_mode
, from
);
955 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode
== PSImode
)
962 if (from_mode
!= SImode
)
963 from
= convert_to_mode (SImode
, from
, unsignedp
);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2
)
968 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode
== PSImode
)
977 if (to_mode
!= SImode
)
979 from
= convert_to_mode (SImode
, from
, unsignedp
);
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2
)
987 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
990 #endif /* HAVE_extendpsisi2 */
995 if (to_mode
== PDImode
)
997 if (from_mode
!= DImode
)
998 from
= convert_to_mode (DImode
, from
, unsignedp
);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2
)
1003 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1006 #endif /* HAVE_truncdipdi2 */
1010 if (from_mode
== PDImode
)
1012 if (to_mode
!= DImode
)
1014 from
= convert_to_mode (DImode
, from
, unsignedp
);
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2
)
1022 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1025 #endif /* HAVE_extendpdidi2 */
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1036 GET_MODE_BITSIZE (from_mode
)))
1038 if (!((GET_CODE (from
) == MEM
1039 && ! MEM_VOLATILE_P (from
)
1040 && direct_load
[(int) to_mode
]
1041 && ! mode_dependent_address_p (XEXP (from
, 0)))
1042 || GET_CODE (from
) == REG
1043 || GET_CODE (from
) == SUBREG
))
1044 from
= force_reg (from_mode
, from
);
1045 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1047 from
= copy_to_reg (from
);
1048 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1055 /* Convert directly if that works. */
1056 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1057 != CODE_FOR_nothing
)
1059 emit_unop_insn (code
, to
, from
, equiv_code
);
1064 enum machine_mode intermediate
;
1066 /* Search for a mode to convert via. */
1067 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1068 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1069 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1070 != CODE_FOR_nothing
)
1071 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1072 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1073 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1074 != CODE_FOR_nothing
))
1076 convert_move (to
, convert_to_mode (intermediate
, from
,
1077 unsignedp
), unsignedp
);
1081 /* No suitable intermediate mode. */
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode
== DImode
&& to_mode
== SImode
)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2
)
1093 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1097 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1101 if (from_mode
== DImode
&& to_mode
== HImode
)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2
)
1106 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1110 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1114 if (from_mode
== DImode
&& to_mode
== QImode
)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2
)
1119 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1123 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1127 if (from_mode
== SImode
&& to_mode
== HImode
)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2
)
1132 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1136 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1140 if (from_mode
== SImode
&& to_mode
== QImode
)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2
)
1145 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1149 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1153 if (from_mode
== HImode
&& to_mode
== QImode
)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2
)
1158 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1162 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1166 if (from_mode
== TImode
&& to_mode
== DImode
)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2
)
1171 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== TImode
&& to_mode
== SImode
)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2
)
1184 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== TImode
&& to_mode
== HImode
)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2
)
1197 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== TImode
&& to_mode
== QImode
)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2
)
1210 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1223 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1224 emit_move_insn (to
, temp
);
1228 /* Mode combination is not recognized. */
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode
, x
, unsignedp
)
1244 enum machine_mode mode
;
1248 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode
, oldmode
, x
, unsignedp
)
1266 enum machine_mode mode
, oldmode
;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1278 x
= gen_lowpart (mode
, x
);
1280 if (GET_MODE (x
) != VOIDmode
)
1281 oldmode
= GET_MODE (x
);
1283 if (mode
== oldmode
)
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1293 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1296 HOST_WIDE_INT val
= INTVAL (x
);
1298 if (oldmode
!= VOIDmode
1299 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1301 int width
= GET_MODE_BITSIZE (oldmode
);
1303 /* We need to zero extend VAL. */
1304 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1307 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1315 if ((GET_CODE (x
) == CONST_INT
1316 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1317 || (GET_MODE_CLASS (mode
) == MODE_INT
1318 && GET_MODE_CLASS (oldmode
) == MODE_INT
1319 && (GET_CODE (x
) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1321 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1322 && direct_load
[(int) mode
])
1323 || (GET_CODE (x
) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1325 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1331 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1333 HOST_WIDE_INT val
= INTVAL (x
);
1334 int width
= GET_MODE_BITSIZE (oldmode
);
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1340 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1341 val
|= (HOST_WIDE_INT
) (-1) << width
;
1343 return GEN_INT (val
);
1346 return gen_lowpart (mode
, x
);
1349 temp
= gen_reg_rtx (mode
);
1350 convert_move (temp
, x
, unsignedp
);
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1361 move_by_pieces (to
, from
, len
, align
)
1365 struct move_by_pieces data
;
1366 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1367 int max_size
= MOVE_MAX
+ 1;
1370 data
.to_addr
= to_addr
;
1371 data
.from_addr
= from_addr
;
1375 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1376 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1378 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1379 || GET_CODE (from_addr
) == POST_INC
1380 || GET_CODE (from_addr
) == POST_DEC
);
1382 data
.explicit_inc_from
= 0;
1383 data
.explicit_inc_to
= 0;
1385 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1386 if (data
.reverse
) data
.offset
= len
;
1389 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1390 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data
.autinc_from
&& data
.autinc_to
)
1396 && move_by_pieces_ninsns (len
, align
) > 2)
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data
.reverse
&& ! data
.autinc_from
)
1401 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1402 data
.autinc_from
= 1;
1403 data
.explicit_inc_from
= -1;
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data
.autinc_from
)
1409 data
.from_addr
= copy_addr_to_reg (from_addr
);
1410 data
.autinc_from
= 1;
1411 data
.explicit_inc_from
= 1;
1414 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1415 data
.from_addr
= copy_addr_to_reg (from_addr
);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data
.reverse
&& ! data
.autinc_to
)
1419 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1421 data
.explicit_inc_to
= -1;
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data
.reverse
&& ! data
.autinc_to
)
1427 data
.to_addr
= copy_addr_to_reg (to_addr
);
1429 data
.explicit_inc_to
= 1;
1432 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1433 data
.to_addr
= copy_addr_to_reg (to_addr
);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size
> 1)
1445 enum machine_mode mode
= VOIDmode
, tmode
;
1446 enum insn_code icode
;
1448 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1449 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1450 if (GET_MODE_SIZE (tmode
) < max_size
)
1453 if (mode
== VOIDmode
)
1456 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1457 if (icode
!= CODE_FOR_nothing
1458 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1459 GET_MODE_SIZE (mode
)))
1460 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1462 max_size
= GET_MODE_SIZE (mode
);
1465 /* The code above should have handled everything. */
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1474 move_by_pieces_ninsns (l
, align
)
1478 register int n_insns
= 0;
1479 int max_size
= MOVE_MAX
+ 1;
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1485 while (max_size
> 1)
1487 enum machine_mode mode
= VOIDmode
, tmode
;
1488 enum insn_code icode
;
1490 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1491 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1492 if (GET_MODE_SIZE (tmode
) < max_size
)
1495 if (mode
== VOIDmode
)
1498 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1499 if (icode
!= CODE_FOR_nothing
1500 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1501 GET_MODE_SIZE (mode
)))
1502 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1504 max_size
= GET_MODE_SIZE (mode
);
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1515 move_by_pieces_1 (genfun
, mode
, data
)
1517 enum machine_mode mode
;
1518 struct move_by_pieces
*data
;
1520 register int size
= GET_MODE_SIZE (mode
);
1521 register rtx to1
, from1
;
1523 while (data
->len
>= size
)
1525 if (data
->reverse
) data
->offset
-= size
;
1527 to1
= (data
->autinc_to
1528 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1529 : change_address (data
->to
, mode
,
1530 plus_constant (data
->to_addr
, data
->offset
)));
1531 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1534 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1535 : change_address (data
->from
, mode
,
1536 plus_constant (data
->from_addr
, data
->offset
)));
1537 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data
->explicit_inc_to
< 0)
1541 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1542 if (data
->explicit_inc_from
< 0)
1543 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1546 emit_insn ((*genfun
) (to1
, from1
));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data
->explicit_inc_to
> 0)
1549 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1550 if (data
->explicit_inc_from
> 0)
1551 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1554 if (! data
->reverse
) data
->offset
+= size
;
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1571 emit_block_move (x
, y
, size
, align
)
1576 if (GET_MODE (x
) != BLKmode
)
1579 if (GET_MODE (y
) != BLKmode
)
1582 x
= protect_from_queue (x
, 1);
1583 y
= protect_from_queue (y
, 0);
1584 size
= protect_from_queue (size
, 0);
1586 if (GET_CODE (x
) != MEM
)
1588 if (GET_CODE (y
) != MEM
)
1593 if (GET_CODE (size
) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1595 move_by_pieces (x
, y
, INTVAL (size
), align
);
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1602 rtx opalign
= GEN_INT (align
);
1603 enum machine_mode mode
;
1605 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1606 mode
= GET_MODE_WIDER_MODE (mode
))
1608 enum insn_code code
= movstr_optab
[(int) mode
];
1610 if (code
!= CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size
) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1617 <= GET_MODE_MASK (mode
)))
1618 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1619 && (insn_operand_predicate
[(int) code
][0] == 0
1620 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1621 && (insn_operand_predicate
[(int) code
][1] == 0
1622 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1623 && (insn_operand_predicate
[(int) code
][3] == 0
1624 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1628 rtx last
= get_last_insn ();
1631 op2
= convert_to_mode (mode
, size
, 1);
1632 if (insn_operand_predicate
[(int) code
][2] != 0
1633 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1634 op2
= copy_to_mode_reg (mode
, op2
);
1636 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1643 delete_insns_since (last
);
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc
, 0,
1649 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1651 convert_to_mode (TYPE_MODE (sizetype
), size
,
1652 TREE_UNSIGNED (sizetype
)),
1653 TYPE_MODE (sizetype
));
1655 emit_library_call (bcopy_libfunc
, 0,
1656 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1658 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1659 TREE_UNSIGNED (integer_type_node
)),
1660 TYPE_MODE (integer_type_node
));
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1669 move_block_to_reg (regno
, x
, nregs
, mode
)
1673 enum machine_mode mode
;
1681 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1682 x
= validize_mem (force_const_mem (mode
, x
));
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple
)
1688 last
= get_last_insn ();
1689 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1697 delete_insns_since (last
);
1701 for (i
= 0; i
< nregs
; i
++)
1702 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1703 operand_subword_force (x
, i
, mode
));
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1712 move_block_from_reg (regno
, x
, nregs
, size
)
1721 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1722 to the left before storing to memory. */
1723 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1725 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1731 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1732 gen_rtx (REG
, word_mode
, regno
),
1733 build_int_2 ((UNITS_PER_WORD
- size
)
1734 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1735 emit_move_insn (tem
, shift
);
1739 /* See if the machine can do this with a store multiple insn. */
1740 #ifdef HAVE_store_multiple
1741 if (HAVE_store_multiple
)
1743 last
= get_last_insn ();
1744 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1752 delete_insns_since (last
);
1756 for (i
= 0; i
< nregs
; i
++)
1758 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1763 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1767 /* Emit code to move a block Y to a block X, where X is non-consecutive
1768 registers represented by a PARALLEL. */
1771 emit_group_load (x
, y
)
1774 rtx target_reg
, source
;
1777 if (GET_CODE (x
) != PARALLEL
)
1780 /* Check for a NULL entry, used to indicate that the parameter goes
1781 both on the stack and in registers. */
1782 if (XEXP (XVECEXP (x
, 0, 0), 0))
1787 for (; i
< XVECLEN (x
, 0); i
++)
1789 rtx element
= XVECEXP (x
, 0, i
);
1791 target_reg
= XEXP (element
, 0);
1793 if (GET_CODE (y
) == MEM
)
1794 source
= change_address (y
, GET_MODE (target_reg
),
1795 plus_constant (XEXP (y
, 0),
1796 INTVAL (XEXP (element
, 1))));
1797 else if (XEXP (element
, 1) == const0_rtx
)
1799 if (GET_MODE (target_reg
) == GET_MODE (y
))
1801 /* Allow for the target_reg to be smaller than the input register
1802 to allow for AIX with 4 DF arguments after a single SI arg. The
1803 last DF argument will only load 1 word into the integer registers,
1804 but load a DF value into the float registers. */
1805 else if (GET_MODE_SIZE (GET_MODE (target_reg
))
1806 <= GET_MODE_SIZE (GET_MODE (y
)))
1807 source
= gen_rtx (SUBREG
, GET_MODE (target_reg
), y
, 0);
1814 emit_move_insn (target_reg
, source
);
1818 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1819 registers represented by a PARALLEL. */
1822 emit_group_store (x
, y
)
1825 rtx source_reg
, target
;
1828 if (GET_CODE (y
) != PARALLEL
)
1831 /* Check for a NULL entry, used to indicate that the parameter goes
1832 both on the stack and in registers. */
1833 if (XEXP (XVECEXP (y
, 0, 0), 0))
1838 for (; i
< XVECLEN (y
, 0); i
++)
1840 rtx element
= XVECEXP (y
, 0, i
);
1842 source_reg
= XEXP (element
, 0);
1844 if (GET_CODE (x
) == MEM
)
1845 target
= change_address (x
, GET_MODE (source_reg
),
1846 plus_constant (XEXP (x
, 0),
1847 INTVAL (XEXP (element
, 1))));
1848 else if (XEXP (element
, 1) == const0_rtx
)
1851 if (GET_MODE (target
) != GET_MODE (source_reg
))
1852 target
= gen_lowpart (GET_MODE (source_reg
), target
);
1857 emit_move_insn (target
, source_reg
);
1861 /* Add a USE expression for REG to the (possibly empty) list pointed
1862 to by CALL_FUSAGE. REG must denote a hard register. */
1865 use_reg (call_fusage
, reg
)
1866 rtx
*call_fusage
, reg
;
1868 if (GET_CODE (reg
) != REG
1869 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1873 = gen_rtx (EXPR_LIST
, VOIDmode
,
1874 gen_rtx (USE
, VOIDmode
, reg
), *call_fusage
);
1877 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1878 starting at REGNO. All of these registers must be hard registers. */
1881 use_regs (call_fusage
, regno
, nregs
)
1888 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1891 for (i
= 0; i
< nregs
; i
++)
1892 use_reg (call_fusage
, gen_rtx (REG
, reg_raw_mode
[regno
+ i
], regno
+ i
));
1895 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1896 PARALLEL REGS. This is for calls that pass values in multiple
1897 non-contiguous locations. The Irix 6 ABI has examples of this. */
1900 use_group_regs (call_fusage
, regs
)
1906 /* Check for a NULL entry, used to indicate that the parameter goes
1907 both on the stack and in registers. */
1908 if (XEXP (XVECEXP (regs
, 0, 0), 0))
1913 for (; i
< XVECLEN (regs
, 0); i
++)
1914 use_reg (call_fusage
, XEXP (XVECEXP (regs
, 0, i
), 0));
1917 /* Generate several move instructions to clear LEN bytes of block TO.
1918 (A MEM rtx with BLKmode). The caller must pass TO through
1919 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1923 clear_by_pieces (to
, len
, align
)
1927 struct clear_by_pieces data
;
1928 rtx to_addr
= XEXP (to
, 0);
1929 int max_size
= MOVE_MAX
+ 1;
1932 data
.to_addr
= to_addr
;
1935 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1936 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1938 data
.explicit_inc_to
= 0;
1940 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1941 if (data
.reverse
) data
.offset
= len
;
1944 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1946 /* If copying requires more than two move insns,
1947 copy addresses to registers (to make displacements shorter)
1948 and use post-increment if available. */
1950 && move_by_pieces_ninsns (len
, align
) > 2)
1952 #ifdef HAVE_PRE_DECREMENT
1953 if (data
.reverse
&& ! data
.autinc_to
)
1955 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1957 data
.explicit_inc_to
= -1;
1960 #ifdef HAVE_POST_INCREMENT
1961 if (! data
.reverse
&& ! data
.autinc_to
)
1963 data
.to_addr
= copy_addr_to_reg (to_addr
);
1965 data
.explicit_inc_to
= 1;
1968 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1969 data
.to_addr
= copy_addr_to_reg (to_addr
);
1972 if (! SLOW_UNALIGNED_ACCESS
1973 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1976 /* First move what we can in the largest integer mode, then go to
1977 successively smaller modes. */
1979 while (max_size
> 1)
1981 enum machine_mode mode
= VOIDmode
, tmode
;
1982 enum insn_code icode
;
1984 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1985 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1986 if (GET_MODE_SIZE (tmode
) < max_size
)
1989 if (mode
== VOIDmode
)
1992 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1993 if (icode
!= CODE_FOR_nothing
1994 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1995 GET_MODE_SIZE (mode
)))
1996 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1998 max_size
= GET_MODE_SIZE (mode
);
2001 /* The code above should have handled everything. */
2006 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2007 with move instructions for mode MODE. GENFUN is the gen_... function
2008 to make a move insn for that mode. DATA has all the other info. */
2011 clear_by_pieces_1 (genfun
, mode
, data
)
2013 enum machine_mode mode
;
2014 struct clear_by_pieces
*data
;
2016 register int size
= GET_MODE_SIZE (mode
);
2019 while (data
->len
>= size
)
2021 if (data
->reverse
) data
->offset
-= size
;
2023 to1
= (data
->autinc_to
2024 ? gen_rtx (MEM
, mode
, data
->to_addr
)
2025 : change_address (data
->to
, mode
,
2026 plus_constant (data
->to_addr
, data
->offset
)));
2027 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2029 #ifdef HAVE_PRE_DECREMENT
2030 if (data
->explicit_inc_to
< 0)
2031 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2034 emit_insn ((*genfun
) (to1
, const0_rtx
));
2035 #ifdef HAVE_POST_INCREMENT
2036 if (data
->explicit_inc_to
> 0)
2037 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2040 if (! data
->reverse
) data
->offset
+= size
;
2046 /* Write zeros through the storage of OBJECT.
2047 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2048 the maximum alignment we can is has, measured in bytes. */
2051 clear_storage (object
, size
, align
)
2056 if (GET_MODE (object
) == BLKmode
)
2058 object
= protect_from_queue (object
, 1);
2059 size
= protect_from_queue (size
, 0);
2061 if (GET_CODE (size
) == CONST_INT
2062 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2063 clear_by_pieces (object
, INTVAL (size
), align
);
2067 /* Try the most limited insn first, because there's no point
2068 including more than one in the machine description unless
2069 the more limited one has some advantage. */
2071 rtx opalign
= GEN_INT (align
);
2072 enum machine_mode mode
;
2074 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2075 mode
= GET_MODE_WIDER_MODE (mode
))
2077 enum insn_code code
= clrstr_optab
[(int) mode
];
2079 if (code
!= CODE_FOR_nothing
2080 /* We don't need MODE to be narrower than
2081 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2082 the mode mask, as it is returned by the macro, it will
2083 definitely be less than the actual mode mask. */
2084 && ((GET_CODE (size
) == CONST_INT
2085 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2086 <= GET_MODE_MASK (mode
)))
2087 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2088 && (insn_operand_predicate
[(int) code
][0] == 0
2089 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2091 && (insn_operand_predicate
[(int) code
][2] == 0
2092 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2096 rtx last
= get_last_insn ();
2099 op1
= convert_to_mode (mode
, size
, 1);
2100 if (insn_operand_predicate
[(int) code
][1] != 0
2101 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2103 op1
= copy_to_mode_reg (mode
, op1
);
2105 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2112 delete_insns_since (last
);
2117 #ifdef TARGET_MEM_FUNCTIONS
2118 emit_library_call (memset_libfunc
, 0,
2120 XEXP (object
, 0), Pmode
,
2121 const0_rtx
, TYPE_MODE (integer_type_node
),
2122 convert_to_mode (TYPE_MODE (sizetype
),
2123 size
, TREE_UNSIGNED (sizetype
)),
2124 TYPE_MODE (sizetype
));
2126 emit_library_call (bzero_libfunc
, 0,
2128 XEXP (object
, 0), Pmode
,
2129 convert_to_mode (TYPE_MODE (integer_type_node
),
2131 TREE_UNSIGNED (integer_type_node
)),
2132 TYPE_MODE (integer_type_node
));
2137 emit_move_insn (object
, const0_rtx
);
2140 /* Generate code to copy Y into X.
2141 Both Y and X must have the same mode, except that
2142 Y can be a constant with VOIDmode.
2143 This mode cannot be BLKmode; use emit_block_move for that.
2145 Return the last instruction emitted. */
2148 emit_move_insn (x
, y
)
2151 enum machine_mode mode
= GET_MODE (x
);
2153 x
= protect_from_queue (x
, 1);
2154 y
= protect_from_queue (y
, 0);
2156 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2159 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2160 y
= force_const_mem (mode
, y
);
2162 /* If X or Y are memory references, verify that their addresses are valid
2164 if (GET_CODE (x
) == MEM
2165 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2166 && ! push_operand (x
, GET_MODE (x
)))
2168 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2169 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2171 if (GET_CODE (y
) == MEM
2172 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2174 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2175 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2177 if (mode
== BLKmode
)
2180 return emit_move_insn_1 (x
, y
);
2183 /* Low level part of emit_move_insn.
2184 Called just like emit_move_insn, but assumes X and Y
2185 are basically valid. */
2188 emit_move_insn_1 (x
, y
)
2191 enum machine_mode mode
= GET_MODE (x
);
2192 enum machine_mode submode
;
2193 enum mode_class
class = GET_MODE_CLASS (mode
);
2196 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2198 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2200 /* Expand complex moves by moving real part and imag part, if possible. */
2201 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2202 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2204 (class == MODE_COMPLEX_INT
2205 ? MODE_INT
: MODE_FLOAT
),
2207 && (mov_optab
->handlers
[(int) submode
].insn_code
2208 != CODE_FOR_nothing
))
2210 /* Don't split destination if it is a stack push. */
2211 int stack
= push_operand (x
, GET_MODE (x
));
2214 /* If this is a stack, push the highpart first, so it
2215 will be in the argument order.
2217 In that case, change_address is used only to convert
2218 the mode, not to change the address. */
2221 /* Note that the real part always precedes the imag part in memory
2222 regardless of machine's endianness. */
2223 #ifdef STACK_GROWS_DOWNWARD
2224 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2225 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2226 gen_imagpart (submode
, y
)));
2227 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2228 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2229 gen_realpart (submode
, y
)));
2231 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2232 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2233 gen_realpart (submode
, y
)));
2234 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2235 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2236 gen_imagpart (submode
, y
)));
2241 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2242 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2243 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2244 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2247 return get_last_insn ();
2250 /* This will handle any multi-word mode that lacks a move_insn pattern.
2251 However, you will get better code if you define such patterns,
2252 even if they must turn into multiple assembler instructions. */
2253 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2258 #ifdef PUSH_ROUNDING
2260 /* If X is a push on the stack, do the push now and replace
2261 X with a reference to the stack pointer. */
2262 if (push_operand (x
, GET_MODE (x
)))
2264 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2265 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2269 /* Show the output dies here. */
2271 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
2274 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2277 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2278 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2280 /* If we can't get a part of Y, put Y into memory if it is a
2281 constant. Otherwise, force it into a register. If we still
2282 can't get a part of Y, abort. */
2283 if (ypart
== 0 && CONSTANT_P (y
))
2285 y
= force_const_mem (mode
, y
);
2286 ypart
= operand_subword (y
, i
, 1, mode
);
2288 else if (ypart
== 0)
2289 ypart
= operand_subword_force (y
, i
, mode
);
2291 if (xpart
== 0 || ypart
== 0)
2294 last_insn
= emit_move_insn (xpart
, ypart
);
2303 /* Pushing data onto the stack. */
2305 /* Push a block of length SIZE (perhaps variable)
2306 and return an rtx to address the beginning of the block.
2307 Note that it is not possible for the value returned to be a QUEUED.
2308 The value may be virtual_outgoing_args_rtx.
2310 EXTRA is the number of bytes of padding to push in addition to SIZE.
2311 BELOW nonzero means this padding comes at low addresses;
2312 otherwise, the padding comes at high addresses. */
2315 push_block (size
, extra
, below
)
2321 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2322 if (CONSTANT_P (size
))
2323 anti_adjust_stack (plus_constant (size
, extra
));
2324 else if (GET_CODE (size
) == REG
&& extra
== 0)
2325 anti_adjust_stack (size
);
2328 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2330 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2331 temp
, 0, OPTAB_LIB_WIDEN
);
2332 anti_adjust_stack (temp
);
2335 #ifdef STACK_GROWS_DOWNWARD
2336 temp
= virtual_outgoing_args_rtx
;
2337 if (extra
!= 0 && below
)
2338 temp
= plus_constant (temp
, extra
);
2340 if (GET_CODE (size
) == CONST_INT
)
2341 temp
= plus_constant (virtual_outgoing_args_rtx
,
2342 - INTVAL (size
) - (below
? 0 : extra
));
2343 else if (extra
!= 0 && !below
)
2344 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2345 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2347 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2348 negate_rtx (Pmode
, size
));
2351 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2357 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2360 /* Generate code to push X onto the stack, assuming it has mode MODE and
2362 MODE is redundant except when X is a CONST_INT (since they don't
2364 SIZE is an rtx for the size of data to be copied (in bytes),
2365 needed only if X is BLKmode.
2367 ALIGN (in bytes) is maximum alignment we can assume.
2369 If PARTIAL and REG are both nonzero, then copy that many of the first
2370 words of X into registers starting with REG, and push the rest of X.
2371 The amount of space pushed is decreased by PARTIAL words,
2372 rounded *down* to a multiple of PARM_BOUNDARY.
2373 REG must be a hard register in this case.
2374 If REG is zero but PARTIAL is not, take any all others actions for an
2375 argument partially in registers, but do not actually load any
2378 EXTRA is the amount in bytes of extra space to leave next to this arg.
2379 This is ignored if an argument block has already been allocated.
2381 On a machine that lacks real push insns, ARGS_ADDR is the address of
2382 the bottom of the argument block for this call. We use indexing off there
2383 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2384 argument block has not been preallocated.
2386 ARGS_SO_FAR is the size of args previously pushed for this call. */
2389 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2390 args_addr
, args_so_far
)
2392 enum machine_mode mode
;
2403 enum direction stack_direction
2404 #ifdef STACK_GROWS_DOWNWARD
2410 /* Decide where to pad the argument: `downward' for below,
2411 `upward' for above, or `none' for don't pad it.
2412 Default is below for small data on big-endian machines; else above. */
2413 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2415 /* If we're placing part of X into a register and part of X onto
2416 the stack, indicate that the entire register is clobbered to
2417 keep flow from thinking the unused part of the register is live. */
2418 if (partial
> 0 && reg
!= 0)
2419 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, reg
));
2421 /* Invert direction if stack is post-update. */
2422 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2423 if (where_pad
!= none
)
2424 where_pad
= (where_pad
== downward
? upward
: downward
);
2426 xinner
= x
= protect_from_queue (x
, 0);
2428 if (mode
== BLKmode
)
2430 /* Copy a block into the stack, entirely or partially. */
2433 int used
= partial
* UNITS_PER_WORD
;
2434 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2442 /* USED is now the # of bytes we need not copy to the stack
2443 because registers will take care of them. */
2446 xinner
= change_address (xinner
, BLKmode
,
2447 plus_constant (XEXP (xinner
, 0), used
));
2449 /* If the partial register-part of the arg counts in its stack size,
2450 skip the part of stack space corresponding to the registers.
2451 Otherwise, start copying to the beginning of the stack space,
2452 by setting SKIP to 0. */
2453 #ifndef REG_PARM_STACK_SPACE
2459 #ifdef PUSH_ROUNDING
2460 /* Do it with several push insns if that doesn't take lots of insns
2461 and if there is no difficulty with push insns that skip bytes
2462 on the stack for alignment purposes. */
2464 && GET_CODE (size
) == CONST_INT
2466 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2468 /* Here we avoid the case of a structure whose weak alignment
2469 forces many pushes of a small amount of data,
2470 and such small pushes do rounding that causes trouble. */
2471 && ((! SLOW_UNALIGNED_ACCESS
)
2472 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2473 || PUSH_ROUNDING (align
) == align
)
2474 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2476 /* Push padding now if padding above and stack grows down,
2477 or if padding below and stack grows up.
2478 But if space already allocated, this has already been done. */
2479 if (extra
&& args_addr
== 0
2480 && where_pad
!= none
&& where_pad
!= stack_direction
)
2481 anti_adjust_stack (GEN_INT (extra
));
2483 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
2484 INTVAL (size
) - used
, align
);
2487 #endif /* PUSH_ROUNDING */
2489 /* Otherwise make space on the stack and copy the data
2490 to the address of that space. */
2492 /* Deduct words put into registers from the size we must copy. */
2495 if (GET_CODE (size
) == CONST_INT
)
2496 size
= GEN_INT (INTVAL (size
) - used
);
2498 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2499 GEN_INT (used
), NULL_RTX
, 0,
2503 /* Get the address of the stack space.
2504 In this case, we do not deal with EXTRA separately.
2505 A single stack adjust will do. */
2508 temp
= push_block (size
, extra
, where_pad
== downward
);
2511 else if (GET_CODE (args_so_far
) == CONST_INT
)
2512 temp
= memory_address (BLKmode
,
2513 plus_constant (args_addr
,
2514 skip
+ INTVAL (args_so_far
)));
2516 temp
= memory_address (BLKmode
,
2517 plus_constant (gen_rtx (PLUS
, Pmode
,
2518 args_addr
, args_so_far
),
2521 /* TEMP is the address of the block. Copy the data there. */
2522 if (GET_CODE (size
) == CONST_INT
2523 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2526 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
2527 INTVAL (size
), align
);
2530 /* Try the most limited insn first, because there's no point
2531 including more than one in the machine description unless
2532 the more limited one has some advantage. */
2533 #ifdef HAVE_movstrqi
2535 && GET_CODE (size
) == CONST_INT
2536 && ((unsigned) INTVAL (size
)
2537 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
2539 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
2540 xinner
, size
, GEN_INT (align
));
2548 #ifdef HAVE_movstrhi
2550 && GET_CODE (size
) == CONST_INT
2551 && ((unsigned) INTVAL (size
)
2552 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
2554 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
2555 xinner
, size
, GEN_INT (align
));
2563 #ifdef HAVE_movstrsi
2566 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
2567 xinner
, size
, GEN_INT (align
));
2575 #ifdef HAVE_movstrdi
2578 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
2579 xinner
, size
, GEN_INT (align
));
2588 #ifndef ACCUMULATE_OUTGOING_ARGS
2589 /* If the source is referenced relative to the stack pointer,
2590 copy it to another register to stabilize it. We do not need
2591 to do this if we know that we won't be changing sp. */
2593 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2594 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2595 temp
= copy_to_reg (temp
);
2598 /* Make inhibit_defer_pop nonzero around the library call
2599 to force it to pop the bcopy-arguments right away. */
2601 #ifdef TARGET_MEM_FUNCTIONS
2602 emit_library_call (memcpy_libfunc
, 0,
2603 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2604 convert_to_mode (TYPE_MODE (sizetype
),
2605 size
, TREE_UNSIGNED (sizetype
)),
2606 TYPE_MODE (sizetype
));
2608 emit_library_call (bcopy_libfunc
, 0,
2609 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2610 convert_to_mode (TYPE_MODE (integer_type_node
),
2612 TREE_UNSIGNED (integer_type_node
)),
2613 TYPE_MODE (integer_type_node
));
2618 else if (partial
> 0)
2620 /* Scalar partly in registers. */
2622 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2625 /* # words of start of argument
2626 that we must make space for but need not store. */
2627 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2628 int args_offset
= INTVAL (args_so_far
);
2631 /* Push padding now if padding above and stack grows down,
2632 or if padding below and stack grows up.
2633 But if space already allocated, this has already been done. */
2634 if (extra
&& args_addr
== 0
2635 && where_pad
!= none
&& where_pad
!= stack_direction
)
2636 anti_adjust_stack (GEN_INT (extra
));
2638 /* If we make space by pushing it, we might as well push
2639 the real data. Otherwise, we can leave OFFSET nonzero
2640 and leave the space uninitialized. */
2644 /* Now NOT_STACK gets the number of words that we don't need to
2645 allocate on the stack. */
2646 not_stack
= partial
- offset
;
2648 /* If the partial register-part of the arg counts in its stack size,
2649 skip the part of stack space corresponding to the registers.
2650 Otherwise, start copying to the beginning of the stack space,
2651 by setting SKIP to 0. */
2652 #ifndef REG_PARM_STACK_SPACE
2658 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2659 x
= validize_mem (force_const_mem (mode
, x
));
2661 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2662 SUBREGs of such registers are not allowed. */
2663 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2664 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2665 x
= copy_to_reg (x
);
2667 /* Loop over all the words allocated on the stack for this arg. */
2668 /* We can do it by words, because any scalar bigger than a word
2669 has a size a multiple of a word. */
2670 #ifndef PUSH_ARGS_REVERSED
2671 for (i
= not_stack
; i
< size
; i
++)
2673 for (i
= size
- 1; i
>= not_stack
; i
--)
2675 if (i
>= not_stack
+ offset
)
2676 emit_push_insn (operand_subword_force (x
, i
, mode
),
2677 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2679 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2680 * UNITS_PER_WORD
)));
2686 /* Push padding now if padding above and stack grows down,
2687 or if padding below and stack grows up.
2688 But if space already allocated, this has already been done. */
2689 if (extra
&& args_addr
== 0
2690 && where_pad
!= none
&& where_pad
!= stack_direction
)
2691 anti_adjust_stack (GEN_INT (extra
));
2693 #ifdef PUSH_ROUNDING
2695 addr
= gen_push_operand ();
2698 if (GET_CODE (args_so_far
) == CONST_INT
)
2700 = memory_address (mode
,
2701 plus_constant (args_addr
, INTVAL (args_so_far
)));
2703 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2706 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2710 /* If part should go in registers, copy that part
2711 into the appropriate registers. Do this now, at the end,
2712 since mem-to-mem copies above may do function calls. */
2713 if (partial
> 0 && reg
!= 0)
2715 /* Handle calls that pass values in multiple non-contiguous locations.
2716 The Irix 6 ABI has examples of this. */
2717 if (GET_CODE (reg
) == PARALLEL
)
2718 emit_group_load (reg
, x
);
2720 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2723 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2724 anti_adjust_stack (GEN_INT (extra
));
2727 /* Expand an assignment that stores the value of FROM into TO.
2728 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2729 (This may contain a QUEUED rtx;
2730 if the value is constant, this rtx is a constant.)
2731 Otherwise, the returned value is NULL_RTX.
2733 SUGGEST_REG is no longer actually used.
2734 It used to mean, copy the value through a register
2735 and return that register, if that is possible.
2736 We now use WANT_VALUE to decide whether to do this. */
2739 expand_assignment (to
, from
, want_value
, suggest_reg
)
2744 register rtx to_rtx
= 0;
2747 /* Don't crash if the lhs of the assignment was erroneous. */
2749 if (TREE_CODE (to
) == ERROR_MARK
)
2751 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2752 return want_value
? result
: NULL_RTX
;
2755 if (output_bytecode
)
2757 tree dest_innermost
;
2759 bc_expand_expr (from
);
2760 bc_emit_instruction (duplicate
);
2762 dest_innermost
= bc_expand_address (to
);
2764 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2765 take care of it here. */
2767 bc_store_memory (TREE_TYPE (to
), dest_innermost
);
2771 /* Assignment of a structure component needs special treatment
2772 if the structure component's rtx is not simply a MEM.
2773 Assignment of an array element at a constant index, and assignment of
2774 an array element in an unaligned packed structure field, has the same
2777 if (TREE_CODE (to
) == COMPONENT_REF
2778 || TREE_CODE (to
) == BIT_FIELD_REF
2779 || (TREE_CODE (to
) == ARRAY_REF
2780 && ((TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2781 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
)
2782 || (SLOW_UNALIGNED_ACCESS
&& get_inner_unaligned_p (to
)))))
2784 enum machine_mode mode1
;
2794 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
2795 &unsignedp
, &volatilep
, &alignment
);
2797 /* If we are going to use store_bit_field and extract_bit_field,
2798 make sure to_rtx will be safe for multiple use. */
2800 if (mode1
== VOIDmode
&& want_value
)
2801 tem
= stabilize_reference (tem
);
2803 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2806 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2808 if (GET_CODE (to_rtx
) != MEM
)
2810 to_rtx
= change_address (to_rtx
, VOIDmode
,
2811 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
2812 force_reg (ptr_mode
, offset_rtx
)));
2816 if (GET_CODE (to_rtx
) == MEM
)
2818 /* When the offset is zero, to_rtx is the address of the
2819 structure we are storing into, and hence may be shared.
2820 We must make a new MEM before setting the volatile bit. */
2822 to_rtx
= change_address (to_rtx
, VOIDmode
, XEXP (to_rtx
, 0));
2823 MEM_VOLATILE_P (to_rtx
) = 1;
2825 #if 0 /* This was turned off because, when a field is volatile
2826 in an object which is not volatile, the object may be in a register,
2827 and then we would abort over here. */
2833 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2835 /* Spurious cast makes HPUX compiler happy. */
2836 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2839 /* Required alignment of containing datum. */
2841 int_size_in_bytes (TREE_TYPE (tem
)));
2842 preserve_temp_slots (result
);
2846 /* If the value is meaningful, convert RESULT to the proper mode.
2847 Otherwise, return nothing. */
2848 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
2849 TYPE_MODE (TREE_TYPE (from
)),
2851 TREE_UNSIGNED (TREE_TYPE (to
)))
2855 /* If the rhs is a function call and its value is not an aggregate,
2856 call the function before we start to compute the lhs.
2857 This is needed for correct code for cases such as
2858 val = setjmp (buf) on machines where reference to val
2859 requires loading up part of an address in a separate insn.
2861 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2862 a promoted variable where the zero- or sign- extension needs to be done.
2863 Handling this in the normal way is safe because no computation is done
2865 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
2866 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
2867 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
2872 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2874 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2876 /* Handle calls that return values in multiple non-contiguous locations.
2877 The Irix 6 ABI has examples of this. */
2878 if (GET_CODE (to_rtx
) == PARALLEL
)
2879 emit_group_load (to_rtx
, value
);
2880 else if (GET_MODE (to_rtx
) == BLKmode
)
2881 emit_block_move (to_rtx
, value
, expr_size (from
),
2882 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
2884 emit_move_insn (to_rtx
, value
);
2885 preserve_temp_slots (to_rtx
);
2888 return want_value
? to_rtx
: NULL_RTX
;
2891 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2892 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2895 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2897 /* Don't move directly into a return register. */
2898 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
2903 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
2904 emit_move_insn (to_rtx
, temp
);
2905 preserve_temp_slots (to_rtx
);
2908 return want_value
? to_rtx
: NULL_RTX
;
2911 /* In case we are returning the contents of an object which overlaps
2912 the place the value is being stored, use a safe function when copying
2913 a value through a pointer into a structure value return block. */
2914 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2915 && current_function_returns_struct
2916 && !current_function_returns_pcc_struct
)
2921 size
= expr_size (from
);
2922 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2924 #ifdef TARGET_MEM_FUNCTIONS
2925 emit_library_call (memcpy_libfunc
, 0,
2926 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2927 XEXP (from_rtx
, 0), Pmode
,
2928 convert_to_mode (TYPE_MODE (sizetype
),
2929 size
, TREE_UNSIGNED (sizetype
)),
2930 TYPE_MODE (sizetype
));
2932 emit_library_call (bcopy_libfunc
, 0,
2933 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2934 XEXP (to_rtx
, 0), Pmode
,
2935 convert_to_mode (TYPE_MODE (integer_type_node
),
2936 size
, TREE_UNSIGNED (integer_type_node
)),
2937 TYPE_MODE (integer_type_node
));
2940 preserve_temp_slots (to_rtx
);
2943 return want_value
? to_rtx
: NULL_RTX
;
2946 /* Compute FROM and store the value in the rtx we got. */
2949 result
= store_expr (from
, to_rtx
, want_value
);
2950 preserve_temp_slots (result
);
2953 return want_value
? result
: NULL_RTX
;
2956 /* Generate code for computing expression EXP,
2957 and storing the value into TARGET.
2958 TARGET may contain a QUEUED rtx.
2960 If WANT_VALUE is nonzero, return a copy of the value
2961 not in TARGET, so that we can be sure to use the proper
2962 value in a containing expression even if TARGET has something
2963 else stored in it. If possible, we copy the value through a pseudo
2964 and return that pseudo. Or, if the value is constant, we try to
2965 return the constant. In some cases, we return a pseudo
2966 copied *from* TARGET.
2968 If the mode is BLKmode then we may return TARGET itself.
2969 It turns out that in BLKmode it doesn't cause a problem.
2970 because C has no operators that could combine two different
2971 assignments into the same BLKmode object with different values
2972 with no sequence point. Will other languages need this to
2975 If WANT_VALUE is 0, we return NULL, to make sure
2976 to catch quickly any cases where the caller uses the value
2977 and fails to set WANT_VALUE. */
2980 store_expr (exp
, target
, want_value
)
2982 register rtx target
;
2986 int dont_return_target
= 0;
2988 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2990 /* Perform first part of compound expression, then assign from second
2992 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
2994 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
2996 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
2998 /* For conditional expression, get safe form of the target. Then
2999 test the condition, doing the appropriate assignment on either
3000 side. This avoids the creation of unnecessary temporaries.
3001 For non-BLKmode, it is more efficient not to do this. */
3003 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3004 rtx flag
= NULL_RTX
;
3005 tree left_cleanups
= NULL_TREE
;
3006 tree right_cleanups
= NULL_TREE
;
3007 tree old_cleanups
= cleanups_this_call
;
3009 /* Used to save a pointer to the place to put the setting of
3010 the flag that indicates if this side of the conditional was
3011 taken. We backpatch the code, if we find out later that we
3012 have any conditional cleanups that need to be performed. */
3013 rtx dest_right_flag
= NULL_RTX
;
3014 rtx dest_left_flag
= NULL_RTX
;
3017 target
= protect_from_queue (target
, 1);
3019 do_pending_stack_adjust ();
3021 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3022 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3023 dest_left_flag
= get_last_insn ();
3024 /* Handle conditional cleanups, if any. */
3025 left_cleanups
= defer_cleanups_to (old_cleanups
);
3027 emit_jump_insn (gen_jump (lab2
));
3030 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3031 dest_right_flag
= get_last_insn ();
3032 /* Handle conditional cleanups, if any. */
3033 right_cleanups
= defer_cleanups_to (old_cleanups
);
3038 /* Add back in any conditional cleanups. */
3039 if (left_cleanups
|| right_cleanups
)
3045 /* Now that we know that a flag is needed, go back and add in the
3046 setting of the flag. */
3048 flag
= gen_reg_rtx (word_mode
);
3050 /* Do the left side flag. */
3051 last
= get_last_insn ();
3052 /* Flag left cleanups as needed. */
3053 emit_move_insn (flag
, const1_rtx
);
3054 /* ??? deprecated, use sequences instead. */
3055 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_left_flag
);
3057 /* Do the right side flag. */
3058 last
= get_last_insn ();
3059 /* Flag left cleanups as needed. */
3060 emit_move_insn (flag
, const0_rtx
);
3061 /* ??? deprecated, use sequences instead. */
3062 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_right_flag
);
3064 /* All cleanups must be on the function_obstack. */
3065 push_obstacks_nochange ();
3066 resume_temporary_allocation ();
3068 /* convert flag, which is an rtx, into a tree. */
3069 cond
= make_node (RTL_EXPR
);
3070 TREE_TYPE (cond
) = integer_type_node
;
3071 RTL_EXPR_RTL (cond
) = flag
;
3072 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
3073 cond
= save_expr (cond
);
3075 if (! left_cleanups
)
3076 left_cleanups
= integer_zero_node
;
3077 if (! right_cleanups
)
3078 right_cleanups
= integer_zero_node
;
3079 new_cleanups
= build (COND_EXPR
, void_type_node
,
3080 truthvalue_conversion (cond
),
3081 left_cleanups
, right_cleanups
);
3082 new_cleanups
= fold (new_cleanups
);
3086 /* Now add in the conditionalized cleanups. */
3088 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
3089 expand_eh_region_start ();
3091 return want_value
? target
: NULL_RTX
;
3093 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3094 && GET_MODE (target
) != BLKmode
)
3095 /* If target is in memory and caller wants value in a register instead,
3096 arrange that. Pass TARGET as target for expand_expr so that,
3097 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3098 We know expand_expr will not use the target in that case.
3099 Don't do this if TARGET is volatile because we are supposed
3100 to write it and then read it. */
3102 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3103 GET_MODE (target
), 0);
3104 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3105 temp
= copy_to_reg (temp
);
3106 dont_return_target
= 1;
3108 else if (queued_subexp_p (target
))
3109 /* If target contains a postincrement, let's not risk
3110 using it as the place to generate the rhs. */
3112 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3114 /* Expand EXP into a new pseudo. */
3115 temp
= gen_reg_rtx (GET_MODE (target
));
3116 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3119 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3121 /* If target is volatile, ANSI requires accessing the value
3122 *from* the target, if it is accessed. So make that happen.
3123 In no case return the target itself. */
3124 if (! MEM_VOLATILE_P (target
) && want_value
)
3125 dont_return_target
= 1;
3127 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3128 /* If this is an scalar in a register that is stored in a wider mode
3129 than the declared mode, compute the result into its declared mode
3130 and then convert to the wider mode. Our value is the computed
3133 /* If we don't want a value, we can do the conversion inside EXP,
3134 which will often result in some optimizations. Do the conversion
3135 in two steps: first change the signedness, if needed, then
3136 the extend. But don't do this if the type of EXP is a subtype
3137 of something else since then the conversion might involve
3138 more than just converting modes. */
3139 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3140 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3142 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3143 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3146 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3150 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3151 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3155 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3157 /* If TEMP is a volatile MEM and we want a result value, make
3158 the access now so it gets done only once. Likewise if
3159 it contains TARGET. */
3160 if (GET_CODE (temp
) == MEM
&& want_value
3161 && (MEM_VOLATILE_P (temp
)
3162 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3163 temp
= copy_to_reg (temp
);
3165 /* If TEMP is a VOIDmode constant, use convert_modes to make
3166 sure that we properly convert it. */
3167 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3168 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3169 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3170 SUBREG_PROMOTED_UNSIGNED_P (target
));
3172 convert_move (SUBREG_REG (target
), temp
,
3173 SUBREG_PROMOTED_UNSIGNED_P (target
));
3174 return want_value
? temp
: NULL_RTX
;
3178 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3179 /* Return TARGET if it's a specified hardware register.
3180 If TARGET is a volatile mem ref, either return TARGET
3181 or return a reg copied *from* TARGET; ANSI requires this.
3183 Otherwise, if TEMP is not TARGET, return TEMP
3184 if it is constant (for efficiency),
3185 or if we really want the correct value. */
3186 if (!(target
&& GET_CODE (target
) == REG
3187 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3188 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3190 && (CONSTANT_P (temp
) || want_value
))
3191 dont_return_target
= 1;
3194 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3195 the same as that of TARGET, adjust the constant. This is needed, for
3196 example, in case it is a CONST_DOUBLE and we want only a word-sized
3198 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3199 && TREE_CODE (exp
) != ERROR_MARK
3200 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3201 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3202 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3204 /* If value was not generated in the target, store it there.
3205 Convert the value to TARGET's type first if nec. */
3207 if (temp
!= target
&& TREE_CODE (exp
) != ERROR_MARK
)
3209 target
= protect_from_queue (target
, 1);
3210 if (GET_MODE (temp
) != GET_MODE (target
)
3211 && GET_MODE (temp
) != VOIDmode
)
3213 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3214 if (dont_return_target
)
3216 /* In this case, we will return TEMP,
3217 so make sure it has the proper mode.
3218 But don't forget to store the value into TARGET. */
3219 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3220 emit_move_insn (target
, temp
);
3223 convert_move (target
, temp
, unsignedp
);
3226 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3228 /* Handle copying a string constant into an array.
3229 The string constant may be shorter than the array.
3230 So copy just the string's actual length, and clear the rest. */
3234 /* Get the size of the data type of the string,
3235 which is actually the size of the target. */
3236 size
= expr_size (exp
);
3237 if (GET_CODE (size
) == CONST_INT
3238 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3239 emit_block_move (target
, temp
, size
,
3240 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3243 /* Compute the size of the data to copy from the string. */
3245 = size_binop (MIN_EXPR
,
3246 make_tree (sizetype
, size
),
3248 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3249 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3253 /* Copy that much. */
3254 emit_block_move (target
, temp
, copy_size_rtx
,
3255 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3257 /* Figure out how much is left in TARGET that we have to clear.
3258 Do all calculations in ptr_mode. */
3260 addr
= XEXP (target
, 0);
3261 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3263 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3265 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3266 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3270 addr
= force_reg (ptr_mode
, addr
);
3271 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3272 copy_size_rtx
, NULL_RTX
, 0,
3275 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3276 copy_size_rtx
, NULL_RTX
, 0,
3279 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3280 GET_MODE (size
), 0, 0);
3281 label
= gen_label_rtx ();
3282 emit_jump_insn (gen_blt (label
));
3285 if (size
!= const0_rtx
)
3287 #ifdef TARGET_MEM_FUNCTIONS
3288 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3290 const0_rtx
, TYPE_MODE (integer_type_node
),
3291 convert_to_mode (TYPE_MODE (sizetype
),
3293 TREE_UNSIGNED (sizetype
)),
3294 TYPE_MODE (sizetype
));
3296 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3298 convert_to_mode (TYPE_MODE (integer_type_node
),
3300 TREE_UNSIGNED (integer_type_node
)),
3301 TYPE_MODE (integer_type_node
));
3309 /* Handle calls that return values in multiple non-contiguous locations.
3310 The Irix 6 ABI has examples of this. */
3311 else if (GET_CODE (target
) == PARALLEL
)
3312 emit_group_load (target
, temp
);
3313 else if (GET_MODE (temp
) == BLKmode
)
3314 emit_block_move (target
, temp
, expr_size (exp
),
3315 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3317 emit_move_insn (target
, temp
);
3320 /* If we don't want a value, return NULL_RTX. */
3324 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3325 ??? The latter test doesn't seem to make sense. */
3326 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3329 /* Return TARGET itself if it is a hard register. */
3330 else if (want_value
&& GET_MODE (target
) != BLKmode
3331 && ! (GET_CODE (target
) == REG
3332 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3333 return copy_to_reg (target
);
3339 /* Return 1 if EXP just contains zeros. */
3347 switch (TREE_CODE (exp
))
3351 case NON_LVALUE_EXPR
:
3352 return is_zeros_p (TREE_OPERAND (exp
, 0));
3355 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3359 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3362 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp
), dconst0
);
3365 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3366 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3367 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3368 if (! is_zeros_p (TREE_VALUE (elt
)))
3377 /* Return 1 if EXP contains mostly (3/4) zeros. */
3380 mostly_zeros_p (exp
)
3383 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3385 int elts
= 0, zeros
= 0;
3386 tree elt
= CONSTRUCTOR_ELTS (exp
);
3387 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3389 /* If there are no ranges of true bits, it is all zero. */
3390 return elt
== NULL_TREE
;
3392 for (; elt
; elt
= TREE_CHAIN (elt
))
3394 /* We do not handle the case where the index is a RANGE_EXPR,
3395 so the statistic will be somewhat inaccurate.
3396 We do make a more accurate count in store_constructor itself,
3397 so since this function is only used for nested array elements,
3398 this should be close enough. */
3399 if (mostly_zeros_p (TREE_VALUE (elt
)))
3404 return 4 * zeros
>= 3 * elts
;
3407 return is_zeros_p (exp
);
3410 /* Helper function for store_constructor.
3411 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3412 TYPE is the type of the CONSTRUCTOR, not the element type.
3413 CLEARED is as for store_constructor.
3415 This provides a recursive shortcut back to store_constructor when it isn't
3416 necessary to go through store_field. This is so that we can pass through
3417 the cleared field to let store_constructor know that we may not have to
3418 clear a substructure if the outer structure has already been cleared. */
3421 store_constructor_field (target
, bitsize
, bitpos
,
3422 mode
, exp
, type
, cleared
)
3424 int bitsize
, bitpos
;
3425 enum machine_mode mode
;
3429 if (TREE_CODE (exp
) == CONSTRUCTOR
3430 && bitpos
% BITS_PER_UNIT
== 0
3431 /* If we have a non-zero bitpos for a register target, then we just
3432 let store_field do the bitfield handling. This is unlikely to
3433 generate unnecessary clear instructions anyways. */
3434 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3437 target
= change_address (target
, VOIDmode
,
3438 plus_constant (XEXP (target
, 0),
3439 bitpos
/ BITS_PER_UNIT
));
3440 store_constructor (exp
, target
, cleared
);
3443 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3444 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3445 int_size_in_bytes (type
));
3448 /* Store the value of constructor EXP into the rtx TARGET.
3449 TARGET is either a REG or a MEM.
3450 CLEARED is true if TARGET is known to have been zero'd. */
3453 store_constructor (exp
, target
, cleared
)
3458 tree type
= TREE_TYPE (exp
);
3460 /* We know our target cannot conflict, since safe_from_p has been called. */
3462 /* Don't try copying piece by piece into a hard register
3463 since that is vulnerable to being clobbered by EXP.
3464 Instead, construct in a pseudo register and then copy it all. */
3465 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3467 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3468 store_constructor (exp
, temp
, 0);
3469 emit_move_insn (target
, temp
);
3474 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3475 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3479 /* Inform later passes that the whole union value is dead. */
3480 if (TREE_CODE (type
) == UNION_TYPE
3481 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3482 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3484 /* If we are building a static constructor into a register,
3485 set the initial value as zero so we can fold the value into
3486 a constant. But if more than one register is involved,
3487 this probably loses. */
3488 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3489 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3492 emit_move_insn (target
, const0_rtx
);
3497 /* If the constructor has fewer fields than the structure
3498 or if we are initializing the structure to mostly zeros,
3499 clear the whole structure first. */
3500 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3501 != list_length (TYPE_FIELDS (type
)))
3502 || mostly_zeros_p (exp
))
3505 clear_storage (target
, expr_size (exp
),
3506 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3511 /* Inform later passes that the old value is dead. */
3512 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3514 /* Store each element of the constructor into
3515 the corresponding field of TARGET. */
3517 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3519 register tree field
= TREE_PURPOSE (elt
);
3520 register enum machine_mode mode
;
3524 tree pos
, constant
= 0, offset
= 0;
3525 rtx to_rtx
= target
;
3527 /* Just ignore missing fields.
3528 We cleared the whole structure, above,
3529 if any fields are missing. */
3533 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3536 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3537 unsignedp
= TREE_UNSIGNED (field
);
3538 mode
= DECL_MODE (field
);
3539 if (DECL_BIT_FIELD (field
))
3542 pos
= DECL_FIELD_BITPOS (field
);
3543 if (TREE_CODE (pos
) == INTEGER_CST
)
3545 else if (TREE_CODE (pos
) == PLUS_EXPR
3546 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3547 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3552 bitpos
= TREE_INT_CST_LOW (constant
);
3558 if (contains_placeholder_p (offset
))
3559 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3562 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3563 size_int (BITS_PER_UNIT
));
3565 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3566 if (GET_CODE (to_rtx
) != MEM
)
3570 = change_address (to_rtx
, VOIDmode
,
3571 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
3572 force_reg (ptr_mode
, offset_rtx
)));
3574 if (TREE_READONLY (field
))
3576 if (GET_CODE (to_rtx
) == MEM
)
3577 to_rtx
= change_address (to_rtx
, GET_MODE (to_rtx
),
3579 RTX_UNCHANGING_P (to_rtx
) = 1;
3582 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3583 mode
, TREE_VALUE (elt
), type
, cleared
);
3586 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3591 tree domain
= TYPE_DOMAIN (type
);
3592 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3593 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3594 tree elttype
= TREE_TYPE (type
);
3596 /* If the constructor has fewer elements than the array,
3597 clear the whole array first. Similarly if this this is
3598 static constructor of a non-BLKmode object. */
3599 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3603 HOST_WIDE_INT count
= 0, zero_count
= 0;
3605 /* This loop is a more accurate version of the loop in
3606 mostly_zeros_p (it handles RANGE_EXPR in an index).
3607 It is also needed to check for missing elements. */
3608 for (elt
= CONSTRUCTOR_ELTS (exp
);
3610 elt
= TREE_CHAIN (elt
))
3612 tree index
= TREE_PURPOSE (elt
);
3613 HOST_WIDE_INT this_node_count
;
3614 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3616 tree lo_index
= TREE_OPERAND (index
, 0);
3617 tree hi_index
= TREE_OPERAND (index
, 1);
3618 if (TREE_CODE (lo_index
) != INTEGER_CST
3619 || TREE_CODE (hi_index
) != INTEGER_CST
)
3624 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3625 - TREE_INT_CST_LOW (lo_index
) + 1;
3628 this_node_count
= 1;
3629 count
+= this_node_count
;
3630 if (mostly_zeros_p (TREE_VALUE (elt
)))
3631 zero_count
+= this_node_count
;
3633 /* Clear the entire array first if there are any missing elements,
3634 or if the incidence of zero elements is >= 75%. */
3635 if (count
< maxelt
- minelt
+ 1
3636 || 4 * zero_count
>= 3 * count
)
3642 clear_storage (target
, expr_size (exp
),
3643 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3647 /* Inform later passes that the old value is dead. */
3648 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3650 /* Store each element of the constructor into
3651 the corresponding element of TARGET, determined
3652 by counting the elements. */
3653 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3655 elt
= TREE_CHAIN (elt
), i
++)
3657 register enum machine_mode mode
;
3661 tree value
= TREE_VALUE (elt
);
3662 tree index
= TREE_PURPOSE (elt
);
3663 rtx xtarget
= target
;
3665 if (cleared
&& is_zeros_p (value
))
3668 mode
= TYPE_MODE (elttype
);
3669 bitsize
= GET_MODE_BITSIZE (mode
);
3670 unsignedp
= TREE_UNSIGNED (elttype
);
3672 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3674 tree lo_index
= TREE_OPERAND (index
, 0);
3675 tree hi_index
= TREE_OPERAND (index
, 1);
3676 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3677 struct nesting
*loop
;
3678 HOST_WIDE_INT lo
, hi
, count
;
3681 /* If the range is constant and "small", unroll the loop. */
3682 if (TREE_CODE (lo_index
) == INTEGER_CST
3683 && TREE_CODE (hi_index
) == INTEGER_CST
3684 && (lo
= TREE_INT_CST_LOW (lo_index
),
3685 hi
= TREE_INT_CST_LOW (hi_index
),
3686 count
= hi
- lo
+ 1,
3687 (GET_CODE (target
) != MEM
3689 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3690 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3693 lo
-= minelt
; hi
-= minelt
;
3694 for (; lo
<= hi
; lo
++)
3696 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3697 store_constructor_field (target
, bitsize
, bitpos
,
3698 mode
, value
, type
, cleared
);
3703 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3704 loop_top
= gen_label_rtx ();
3705 loop_end
= gen_label_rtx ();
3707 unsignedp
= TREE_UNSIGNED (domain
);
3709 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3711 DECL_RTL (index
) = index_r
3712 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3715 if (TREE_CODE (value
) == SAVE_EXPR
3716 && SAVE_EXPR_RTL (value
) == 0)
3718 /* Make sure value gets expanded once before the
3720 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3723 store_expr (lo_index
, index_r
, 0);
3724 loop
= expand_start_loop (0);
3726 /* Assign value to element index. */
3727 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3728 size_int (BITS_PER_UNIT
));
3729 position
= size_binop (MULT_EXPR
,
3730 size_binop (MINUS_EXPR
, index
,
3731 TYPE_MIN_VALUE (domain
)),
3733 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3734 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3735 xtarget
= change_address (target
, mode
, addr
);
3736 if (TREE_CODE (value
) == CONSTRUCTOR
)
3737 store_constructor (value
, xtarget
, cleared
);
3739 store_expr (value
, xtarget
, 0);
3741 expand_exit_loop_if_false (loop
,
3742 build (LT_EXPR
, integer_type_node
,
3745 expand_increment (build (PREINCREMENT_EXPR
,
3747 index
, integer_one_node
), 0, 0);
3749 emit_label (loop_end
);
3751 /* Needed by stupid register allocation. to extend the
3752 lifetime of pseudo-regs used by target past the end
3754 emit_insn (gen_rtx (USE
, GET_MODE (target
), target
));
3757 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3758 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3764 index
= size_int (i
);
3767 index
= size_binop (MINUS_EXPR
, index
,
3768 TYPE_MIN_VALUE (domain
));
3769 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3770 size_int (BITS_PER_UNIT
));
3771 position
= size_binop (MULT_EXPR
, index
, position
);
3772 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3773 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3774 xtarget
= change_address (target
, mode
, addr
);
3775 store_expr (value
, xtarget
, 0);
3780 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3781 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3783 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3784 store_constructor_field (target
, bitsize
, bitpos
,
3785 mode
, value
, type
, cleared
);
3789 /* set constructor assignments */
3790 else if (TREE_CODE (type
) == SET_TYPE
)
3792 tree elt
= CONSTRUCTOR_ELTS (exp
);
3793 rtx xtarget
= XEXP (target
, 0);
3794 int set_word_size
= TYPE_ALIGN (type
);
3795 int nbytes
= int_size_in_bytes (type
), nbits
;
3796 tree domain
= TYPE_DOMAIN (type
);
3797 tree domain_min
, domain_max
, bitlength
;
3799 /* The default implementation strategy is to extract the constant
3800 parts of the constructor, use that to initialize the target,
3801 and then "or" in whatever non-constant ranges we need in addition.
3803 If a large set is all zero or all ones, it is
3804 probably better to set it using memset (if available) or bzero.
3805 Also, if a large set has just a single range, it may also be
3806 better to first clear all the first clear the set (using
3807 bzero/memset), and set the bits we want. */
3809 /* Check for all zeros. */
3810 if (elt
== NULL_TREE
)
3813 clear_storage (target
, expr_size (exp
),
3814 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3818 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3819 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3820 bitlength
= size_binop (PLUS_EXPR
,
3821 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3824 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
3826 nbits
= TREE_INT_CST_LOW (bitlength
);
3828 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3829 are "complicated" (more than one range), initialize (the
3830 constant parts) by copying from a constant. */
3831 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
3832 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
3834 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3835 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3836 char *bit_buffer
= (char *) alloca (nbits
);
3837 HOST_WIDE_INT word
= 0;
3840 int offset
= 0; /* In bytes from beginning of set. */
3841 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
3844 if (bit_buffer
[ibit
])
3846 if (BYTES_BIG_ENDIAN
)
3847 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3849 word
|= 1 << bit_pos
;
3852 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
3854 if (word
!= 0 || ! cleared
)
3856 rtx datum
= GEN_INT (word
);
3858 /* The assumption here is that it is safe to use
3859 XEXP if the set is multi-word, but not if
3860 it's single-word. */
3861 if (GET_CODE (target
) == MEM
)
3863 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
3864 to_rtx
= change_address (target
, mode
, to_rtx
);
3866 else if (offset
== 0)
3870 emit_move_insn (to_rtx
, datum
);
3876 offset
+= set_word_size
/ BITS_PER_UNIT
;
3882 /* Don't bother clearing storage if the set is all ones. */
3883 if (TREE_CHAIN (elt
) != NULL_TREE
3884 || (TREE_PURPOSE (elt
) == NULL_TREE
3886 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
3887 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
3888 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
3889 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
3891 clear_storage (target
, expr_size (exp
),
3892 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3895 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
3897 /* start of range of element or NULL */
3898 tree startbit
= TREE_PURPOSE (elt
);
3899 /* end of range of element, or element value */
3900 tree endbit
= TREE_VALUE (elt
);
3901 HOST_WIDE_INT startb
, endb
;
3902 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
3904 bitlength_rtx
= expand_expr (bitlength
,
3905 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
3907 /* handle non-range tuple element like [ expr ] */
3908 if (startbit
== NULL_TREE
)
3910 startbit
= save_expr (endbit
);
3913 startbit
= convert (sizetype
, startbit
);
3914 endbit
= convert (sizetype
, endbit
);
3915 if (! integer_zerop (domain_min
))
3917 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
3918 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
3920 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
3921 EXPAND_CONST_ADDRESS
);
3922 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
3923 EXPAND_CONST_ADDRESS
);
3927 targetx
= assign_stack_temp (GET_MODE (target
),
3928 GET_MODE_SIZE (GET_MODE (target
)),
3930 emit_move_insn (targetx
, target
);
3932 else if (GET_CODE (target
) == MEM
)
3937 #ifdef TARGET_MEM_FUNCTIONS
3938 /* Optimization: If startbit and endbit are
3939 constants divisible by BITS_PER_UNIT,
3940 call memset instead. */
3941 if (TREE_CODE (startbit
) == INTEGER_CST
3942 && TREE_CODE (endbit
) == INTEGER_CST
3943 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
3944 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
3946 emit_library_call (memset_libfunc
, 0,
3948 plus_constant (XEXP (targetx
, 0),
3949 startb
/ BITS_PER_UNIT
),
3951 constm1_rtx
, TYPE_MODE (integer_type_node
),
3952 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
3953 TYPE_MODE (sizetype
));
3958 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__setbits"),
3959 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
3960 bitlength_rtx
, TYPE_MODE (sizetype
),
3961 startbit_rtx
, TYPE_MODE (sizetype
),
3962 endbit_rtx
, TYPE_MODE (sizetype
));
3965 emit_move_insn (target
, targetx
);
3973 /* Store the value of EXP (an expression tree)
3974 into a subfield of TARGET which has mode MODE and occupies
3975 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3976 If MODE is VOIDmode, it means that we are storing into a bit-field.
3978 If VALUE_MODE is VOIDmode, return nothing in particular.
3979 UNSIGNEDP is not used in this case.
3981 Otherwise, return an rtx for the value stored. This rtx
3982 has mode VALUE_MODE if that is convenient to do.
3983 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3985 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3986 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3989 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
3990 unsignedp
, align
, total_size
)
3992 int bitsize
, bitpos
;
3993 enum machine_mode mode
;
3995 enum machine_mode value_mode
;
4000 HOST_WIDE_INT width_mask
= 0;
4002 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4003 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4005 /* If we are storing into an unaligned field of an aligned union that is
4006 in a register, we may have the mode of TARGET being an integer mode but
4007 MODE == BLKmode. In that case, get an aligned object whose size and
4008 alignment are the same as TARGET and store TARGET into it (we can avoid
4009 the store if the field being stored is the entire width of TARGET). Then
4010 call ourselves recursively to store the field into a BLKmode version of
4011 that object. Finally, load from the object into TARGET. This is not
4012 very efficient in general, but should only be slightly more expensive
4013 than the otherwise-required unaligned accesses. Perhaps this can be
4014 cleaned up later. */
4017 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4019 rtx object
= assign_stack_temp (GET_MODE (target
),
4020 GET_MODE_SIZE (GET_MODE (target
)), 0);
4021 rtx blk_object
= copy_rtx (object
);
4023 MEM_IN_STRUCT_P (object
) = 1;
4024 MEM_IN_STRUCT_P (blk_object
) = 1;
4025 PUT_MODE (blk_object
, BLKmode
);
4027 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4028 emit_move_insn (object
, target
);
4030 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4033 /* Even though we aren't returning target, we need to
4034 give it the updated value. */
4035 emit_move_insn (target
, object
);
4040 /* If the structure is in a register or if the component
4041 is a bit field, we cannot use addressing to access it.
4042 Use bit-field techniques or SUBREG to store in it. */
4044 if (mode
== VOIDmode
4045 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
4046 || GET_CODE (target
) == REG
4047 || GET_CODE (target
) == SUBREG
4048 /* If the field isn't aligned enough to store as an ordinary memref,
4049 store it as a bit field. */
4050 || (SLOW_UNALIGNED_ACCESS
4051 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
4052 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
4054 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4056 /* If BITSIZE is narrower than the size of the type of EXP
4057 we will be narrowing TEMP. Normally, what's wanted are the
4058 low-order bits. However, if EXP's type is a record and this is
4059 big-endian machine, we want the upper BITSIZE bits. */
4060 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4061 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4062 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4063 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4064 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4068 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4070 if (mode
!= VOIDmode
&& mode
!= BLKmode
4071 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4072 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4074 /* If the modes of TARGET and TEMP are both BLKmode, both
4075 must be in memory and BITPOS must be aligned on a byte
4076 boundary. If so, we simply do a block copy. */
4077 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4079 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4080 || bitpos
% BITS_PER_UNIT
!= 0)
4083 target
= change_address (target
, VOIDmode
,
4084 plus_constant (XEXP (target
, 0),
4085 bitpos
/ BITS_PER_UNIT
));
4087 emit_block_move (target
, temp
,
4088 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4092 return value_mode
== VOIDmode
? const0_rtx
: target
;
4095 /* Store the value in the bitfield. */
4096 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4097 if (value_mode
!= VOIDmode
)
4099 /* The caller wants an rtx for the value. */
4100 /* If possible, avoid refetching from the bitfield itself. */
4102 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4105 enum machine_mode tmode
;
4108 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4109 tmode
= GET_MODE (temp
);
4110 if (tmode
== VOIDmode
)
4112 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4113 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4114 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4116 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4117 NULL_RTX
, value_mode
, 0, align
,
4124 rtx addr
= XEXP (target
, 0);
4127 /* If a value is wanted, it must be the lhs;
4128 so make the address stable for multiple use. */
4130 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4131 && ! CONSTANT_ADDRESS_P (addr
)
4132 /* A frame-pointer reference is already stable. */
4133 && ! (GET_CODE (addr
) == PLUS
4134 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4135 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4136 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4137 addr
= copy_to_reg (addr
);
4139 /* Now build a reference to just the desired component. */
4141 to_rtx
= change_address (target
, mode
,
4142 plus_constant (addr
, (bitpos
/ BITS_PER_UNIT
)));
4143 MEM_IN_STRUCT_P (to_rtx
) = 1;
4145 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4149 /* Return true if any object containing the innermost array is an unaligned
4150 packed structure field. */
4153 get_inner_unaligned_p (exp
)
4156 int needed_alignment
= TYPE_ALIGN (TREE_TYPE (exp
));
4160 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4162 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4166 else if (TREE_CODE (exp
) != ARRAY_REF
4167 && TREE_CODE (exp
) != NON_LVALUE_EXPR
4168 && ! ((TREE_CODE (exp
) == NOP_EXPR
4169 || TREE_CODE (exp
) == CONVERT_EXPR
)
4170 && (TYPE_MODE (TREE_TYPE (exp
))
4171 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4174 exp
= TREE_OPERAND (exp
, 0);
4180 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4181 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4182 ARRAY_REFs and find the ultimate containing object, which we return.
4184 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4185 bit position, and *PUNSIGNEDP to the signedness of the field.
4186 If the position of the field is variable, we store a tree
4187 giving the variable offset (in units) in *POFFSET.
4188 This offset is in addition to the bit position.
4189 If the position is not variable, we store 0 in *POFFSET.
4190 We set *PALIGNMENT to the alignment in bytes of the address that will be
4191 computed. This is the alignment of the thing we return if *POFFSET
4192 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4194 If any of the extraction expressions is volatile,
4195 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4197 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4198 is a mode that can be used to access the field. In that case, *PBITSIZE
4201 If the field describes a variable-sized object, *PMODE is set to
4202 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4203 this case, but the address of the object can be found. */
4206 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4207 punsignedp
, pvolatilep
, palignment
)
4212 enum machine_mode
*pmode
;
4217 tree orig_exp
= exp
;
4219 enum machine_mode mode
= VOIDmode
;
4220 tree offset
= integer_zero_node
;
4221 int alignment
= BIGGEST_ALIGNMENT
;
4223 if (TREE_CODE (exp
) == COMPONENT_REF
)
4225 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4226 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4227 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4228 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4230 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4232 size_tree
= TREE_OPERAND (exp
, 1);
4233 *punsignedp
= TREE_UNSIGNED (exp
);
4237 mode
= TYPE_MODE (TREE_TYPE (exp
));
4238 *pbitsize
= GET_MODE_BITSIZE (mode
);
4239 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4244 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4245 mode
= BLKmode
, *pbitsize
= -1;
4247 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4250 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4251 and find the ultimate containing object. */
4257 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4259 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4260 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4261 : TREE_OPERAND (exp
, 2));
4262 tree constant
= integer_zero_node
, var
= pos
;
4264 /* If this field hasn't been filled in yet, don't go
4265 past it. This should only happen when folding expressions
4266 made during type construction. */
4270 /* Assume here that the offset is a multiple of a unit.
4271 If not, there should be an explicitly added constant. */
4272 if (TREE_CODE (pos
) == PLUS_EXPR
4273 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4274 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4275 else if (TREE_CODE (pos
) == INTEGER_CST
)
4276 constant
= pos
, var
= integer_zero_node
;
4278 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4279 offset
= size_binop (PLUS_EXPR
, offset
,
4280 size_binop (EXACT_DIV_EXPR
, var
,
4281 size_int (BITS_PER_UNIT
)));
4284 else if (TREE_CODE (exp
) == ARRAY_REF
)
4286 /* This code is based on the code in case ARRAY_REF in expand_expr
4287 below. We assume here that the size of an array element is
4288 always an integral multiple of BITS_PER_UNIT. */
4290 tree index
= TREE_OPERAND (exp
, 1);
4291 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4293 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4294 tree index_type
= TREE_TYPE (index
);
4296 if (! integer_zerop (low_bound
))
4297 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4299 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4301 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4303 index_type
= TREE_TYPE (index
);
4306 index
= fold (build (MULT_EXPR
, index_type
, index
,
4307 TYPE_SIZE (TREE_TYPE (exp
))));
4309 if (TREE_CODE (index
) == INTEGER_CST
4310 && TREE_INT_CST_HIGH (index
) == 0)
4311 *pbitpos
+= TREE_INT_CST_LOW (index
);
4313 offset
= size_binop (PLUS_EXPR
, offset
,
4314 size_binop (FLOOR_DIV_EXPR
, index
,
4315 size_int (BITS_PER_UNIT
)));
4317 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4318 && ! ((TREE_CODE (exp
) == NOP_EXPR
4319 || TREE_CODE (exp
) == CONVERT_EXPR
)
4320 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4321 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4323 && (TYPE_MODE (TREE_TYPE (exp
))
4324 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4327 /* If any reference in the chain is volatile, the effect is volatile. */
4328 if (TREE_THIS_VOLATILE (exp
))
4331 /* If the offset is non-constant already, then we can't assume any
4332 alignment more than the alignment here. */
4333 if (! integer_zerop (offset
))
4334 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4336 exp
= TREE_OPERAND (exp
, 0);
4339 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4340 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
4341 else if (TREE_TYPE (exp
) != 0)
4342 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4344 if (integer_zerop (offset
))
4347 if (offset
!= 0 && contains_placeholder_p (offset
))
4348 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4352 *palignment
= alignment
/ BITS_PER_UNIT
;
4356 /* Given an rtx VALUE that may contain additions and multiplications,
4357 return an equivalent value that just refers to a register or memory.
4358 This is done by generating instructions to perform the arithmetic
4359 and returning a pseudo-register containing the value.
4361 The returned value may be a REG, SUBREG, MEM or constant. */
4364 force_operand (value
, target
)
4367 register optab binoptab
= 0;
4368 /* Use a temporary to force order of execution of calls to
4372 /* Use subtarget as the target for operand 0 of a binary operation. */
4373 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4375 if (GET_CODE (value
) == PLUS
)
4376 binoptab
= add_optab
;
4377 else if (GET_CODE (value
) == MINUS
)
4378 binoptab
= sub_optab
;
4379 else if (GET_CODE (value
) == MULT
)
4381 op2
= XEXP (value
, 1);
4382 if (!CONSTANT_P (op2
)
4383 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4385 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4386 return expand_mult (GET_MODE (value
), tmp
,
4387 force_operand (op2
, NULL_RTX
),
4393 op2
= XEXP (value
, 1);
4394 if (!CONSTANT_P (op2
)
4395 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4397 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4399 binoptab
= add_optab
;
4400 op2
= negate_rtx (GET_MODE (value
), op2
);
4403 /* Check for an addition with OP2 a constant integer and our first
4404 operand a PLUS of a virtual register and something else. In that
4405 case, we want to emit the sum of the virtual register and the
4406 constant first and then add the other value. This allows virtual
4407 register instantiation to simply modify the constant rather than
4408 creating another one around this addition. */
4409 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4410 && GET_CODE (XEXP (value
, 0)) == PLUS
4411 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4412 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4413 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4415 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4416 XEXP (XEXP (value
, 0), 0), op2
,
4417 subtarget
, 0, OPTAB_LIB_WIDEN
);
4418 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4419 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4420 target
, 0, OPTAB_LIB_WIDEN
);
4423 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4424 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4425 force_operand (op2
, NULL_RTX
),
4426 target
, 0, OPTAB_LIB_WIDEN
);
4427 /* We give UNSIGNEDP = 0 to expand_binop
4428 because the only operations we are expanding here are signed ones. */
4433 /* Subroutine of expand_expr:
4434 save the non-copied parts (LIST) of an expr (LHS), and return a list
4435 which can restore these values to their previous values,
4436 should something modify their storage. */
4439 save_noncopied_parts (lhs
, list
)
4446 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4447 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4448 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4451 tree part
= TREE_VALUE (tail
);
4452 tree part_type
= TREE_TYPE (part
);
4453 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4454 rtx target
= assign_temp (part_type
, 0, 1, 1);
4455 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4456 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4457 parts
= tree_cons (to_be_saved
,
4458 build (RTL_EXPR
, part_type
, NULL_TREE
,
4461 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4466 /* Subroutine of expand_expr:
4467 record the non-copied parts (LIST) of an expr (LHS), and return a list
4468 which specifies the initial values of these parts. */
4471 init_noncopied_parts (lhs
, list
)
4478 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4479 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4480 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4483 tree part
= TREE_VALUE (tail
);
4484 tree part_type
= TREE_TYPE (part
);
4485 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4486 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4491 /* Subroutine of expand_expr: return nonzero iff there is no way that
4492 EXP can reference X, which is being modified. */
4495 safe_from_p (x
, exp
)
4503 /* If EXP has varying size, we MUST use a target since we currently
4504 have no way of allocating temporaries of variable size
4505 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4506 So we assume here that something at a higher level has prevented a
4507 clash. This is somewhat bogus, but the best we can do. Only
4508 do this when X is BLKmode. */
4509 || (TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4510 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4511 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
4512 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
4513 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
4515 && GET_MODE (x
) == BLKmode
))
4518 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4519 find the underlying pseudo. */
4520 if (GET_CODE (x
) == SUBREG
)
4523 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4527 /* If X is a location in the outgoing argument area, it is always safe. */
4528 if (GET_CODE (x
) == MEM
4529 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4530 || (GET_CODE (XEXP (x
, 0)) == PLUS
4531 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4534 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4537 exp_rtl
= DECL_RTL (exp
);
4544 if (TREE_CODE (exp
) == TREE_LIST
)
4545 return ((TREE_VALUE (exp
) == 0
4546 || safe_from_p (x
, TREE_VALUE (exp
)))
4547 && (TREE_CHAIN (exp
) == 0
4548 || safe_from_p (x
, TREE_CHAIN (exp
))));
4553 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4557 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
4558 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
4562 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4563 the expression. If it is set, we conflict iff we are that rtx or
4564 both are in memory. Otherwise, we check all operands of the
4565 expression recursively. */
4567 switch (TREE_CODE (exp
))
4570 return (staticp (TREE_OPERAND (exp
, 0))
4571 || safe_from_p (x
, TREE_OPERAND (exp
, 0)));
4574 if (GET_CODE (x
) == MEM
)
4579 exp_rtl
= CALL_EXPR_RTL (exp
);
4582 /* Assume that the call will clobber all hard registers and
4584 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4585 || GET_CODE (x
) == MEM
)
4592 /* If a sequence exists, we would have to scan every instruction
4593 in the sequence to see if it was safe. This is probably not
4595 if (RTL_EXPR_SEQUENCE (exp
))
4598 exp_rtl
= RTL_EXPR_RTL (exp
);
4601 case WITH_CLEANUP_EXPR
:
4602 exp_rtl
= RTL_EXPR_RTL (exp
);
4605 case CLEANUP_POINT_EXPR
:
4606 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4609 exp_rtl
= SAVE_EXPR_RTL (exp
);
4613 /* The only operand we look at is operand 1. The rest aren't
4614 part of the expression. */
4615 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
4617 case METHOD_CALL_EXPR
:
4618 /* This takes a rtx argument, but shouldn't appear here. */
4622 /* If we have an rtx, we do not need to scan our operands. */
4626 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
4627 for (i
= 0; i
< nops
; i
++)
4628 if (TREE_OPERAND (exp
, i
) != 0
4629 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
4633 /* If we have an rtl, find any enclosed object. Then see if we conflict
4637 if (GET_CODE (exp_rtl
) == SUBREG
)
4639 exp_rtl
= SUBREG_REG (exp_rtl
);
4640 if (GET_CODE (exp_rtl
) == REG
4641 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
4645 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4646 are memory and EXP is not readonly. */
4647 return ! (rtx_equal_p (x
, exp_rtl
)
4648 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
4649 && ! TREE_READONLY (exp
)));
4652 /* If we reach here, it is safe. */
4656 /* Subroutine of expand_expr: return nonzero iff EXP is an
4657 expression whose type is statically determinable. */
4663 if (TREE_CODE (exp
) == PARM_DECL
4664 || TREE_CODE (exp
) == VAR_DECL
4665 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
4666 || TREE_CODE (exp
) == COMPONENT_REF
4667 || TREE_CODE (exp
) == ARRAY_REF
)
4672 /* Subroutine of expand_expr: return rtx if EXP is a
4673 variable or parameter; else return 0. */
4680 switch (TREE_CODE (exp
))
4684 return DECL_RTL (exp
);
4690 /* expand_expr: generate code for computing expression EXP.
4691 An rtx for the computed value is returned. The value is never null.
4692 In the case of a void EXP, const0_rtx is returned.
4694 The value may be stored in TARGET if TARGET is nonzero.
4695 TARGET is just a suggestion; callers must assume that
4696 the rtx returned may not be the same as TARGET.
4698 If TARGET is CONST0_RTX, it means that the value will be ignored.
4700 If TMODE is not VOIDmode, it suggests generating the
4701 result in mode TMODE. But this is done only when convenient.
4702 Otherwise, TMODE is ignored and the value generated in its natural mode.
4703 TMODE is just a suggestion; callers must assume that
4704 the rtx returned may not have mode TMODE.
4706 Note that TARGET may have neither TMODE nor MODE. In that case, it
4707 probably will not be used.
4709 If MODIFIER is EXPAND_SUM then when EXP is an addition
4710 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4711 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4712 products as above, or REG or MEM, or constant.
4713 Ordinarily in such cases we would output mul or add instructions
4714 and then return a pseudo reg containing the sum.
4716 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4717 it also marks a label as absolutely required (it can't be dead).
4718 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4719 This is used for outputting expressions used in initializers.
4721 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4722 with a constant address even if that address is not normally legitimate.
4723 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4726 expand_expr (exp
, target
, tmode
, modifier
)
4729 enum machine_mode tmode
;
4730 enum expand_modifier modifier
;
4732 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4733 This is static so it will be accessible to our recursive callees. */
4734 static tree placeholder_list
= 0;
4735 register rtx op0
, op1
, temp
;
4736 tree type
= TREE_TYPE (exp
);
4737 int unsignedp
= TREE_UNSIGNED (type
);
4738 register enum machine_mode mode
= TYPE_MODE (type
);
4739 register enum tree_code code
= TREE_CODE (exp
);
4741 /* Use subtarget as the target for operand 0 of a binary operation. */
4742 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4743 rtx original_target
= target
;
4744 /* Maybe defer this until sure not doing bytecode? */
4745 int ignore
= (target
== const0_rtx
4746 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4747 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4748 || code
== COND_EXPR
)
4749 && TREE_CODE (type
) == VOID_TYPE
));
4753 if (output_bytecode
&& modifier
!= EXPAND_INITIALIZER
)
4755 bc_expand_expr (exp
);
4759 /* Don't use hard regs as subtargets, because the combiner
4760 can only handle pseudo regs. */
4761 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4763 /* Avoid subtargets inside loops,
4764 since they hide some invariant expressions. */
4765 if (preserve_subexpressions_p ())
4768 /* If we are going to ignore this result, we need only do something
4769 if there is a side-effect somewhere in the expression. If there
4770 is, short-circuit the most common cases here. Note that we must
4771 not call expand_expr with anything but const0_rtx in case this
4772 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4776 if (! TREE_SIDE_EFFECTS (exp
))
4779 /* Ensure we reference a volatile object even if value is ignored. */
4780 if (TREE_THIS_VOLATILE (exp
)
4781 && TREE_CODE (exp
) != FUNCTION_DECL
4782 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4784 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
4785 if (GET_CODE (temp
) == MEM
)
4786 temp
= copy_to_reg (temp
);
4790 if (TREE_CODE_CLASS (code
) == '1')
4791 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4792 VOIDmode
, modifier
);
4793 else if (TREE_CODE_CLASS (code
) == '2'
4794 || TREE_CODE_CLASS (code
) == '<')
4796 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
4797 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
4800 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4801 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4802 /* If the second operand has no side effects, just evaluate
4804 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4805 VOIDmode
, modifier
);
4810 /* If will do cse, generate all results into pseudo registers
4811 since 1) that allows cse to find more things
4812 and 2) otherwise cse could produce an insn the machine
4815 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4816 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4823 tree function
= decl_function_context (exp
);
4824 /* Handle using a label in a containing function. */
4825 if (function
!= current_function_decl
&& function
!= 0)
4827 struct function
*p
= find_function_data (function
);
4828 /* Allocate in the memory associated with the function
4829 that the label is in. */
4830 push_obstacks (p
->function_obstack
,
4831 p
->function_maybepermanent_obstack
);
4833 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4834 label_rtx (exp
), p
->forced_labels
);
4837 else if (modifier
== EXPAND_INITIALIZER
)
4838 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4839 label_rtx (exp
), forced_labels
);
4840 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
4841 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
4842 if (function
!= current_function_decl
&& function
!= 0)
4843 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
4848 if (DECL_RTL (exp
) == 0)
4850 error_with_decl (exp
, "prior parameter's size depends on `%s'");
4851 return CONST0_RTX (mode
);
4854 /* ... fall through ... */
4857 /* If a static var's type was incomplete when the decl was written,
4858 but the type is complete now, lay out the decl now. */
4859 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4860 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
4862 push_obstacks_nochange ();
4863 end_temporary_allocation ();
4864 layout_decl (exp
, 0);
4865 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
4869 /* ... fall through ... */
4873 if (DECL_RTL (exp
) == 0)
4876 /* Ensure variable marked as used even if it doesn't go through
4877 a parser. If it hasn't be used yet, write out an external
4879 if (! TREE_USED (exp
))
4881 assemble_external (exp
);
4882 TREE_USED (exp
) = 1;
4885 /* Show we haven't gotten RTL for this yet. */
4888 /* Handle variables inherited from containing functions. */
4889 context
= decl_function_context (exp
);
4891 /* We treat inline_function_decl as an alias for the current function
4892 because that is the inline function whose vars, types, etc.
4893 are being merged into the current function.
4894 See expand_inline_function. */
4896 if (context
!= 0 && context
!= current_function_decl
4897 && context
!= inline_function_decl
4898 /* If var is static, we don't need a static chain to access it. */
4899 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
4900 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
4904 /* Mark as non-local and addressable. */
4905 DECL_NONLOCAL (exp
) = 1;
4906 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
4908 mark_addressable (exp
);
4909 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
4911 addr
= XEXP (DECL_RTL (exp
), 0);
4912 if (GET_CODE (addr
) == MEM
)
4913 addr
= gen_rtx (MEM
, Pmode
,
4914 fix_lexical_addr (XEXP (addr
, 0), exp
));
4916 addr
= fix_lexical_addr (addr
, exp
);
4917 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
4920 /* This is the case of an array whose size is to be determined
4921 from its initializer, while the initializer is still being parsed.
4924 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4925 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
4926 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
4927 XEXP (DECL_RTL (exp
), 0));
4929 /* If DECL_RTL is memory, we are in the normal case and either
4930 the address is not valid or it is not a register and -fforce-addr
4931 is specified, get the address into a register. */
4933 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4934 && modifier
!= EXPAND_CONST_ADDRESS
4935 && modifier
!= EXPAND_SUM
4936 && modifier
!= EXPAND_INITIALIZER
4937 && (! memory_address_p (DECL_MODE (exp
),
4938 XEXP (DECL_RTL (exp
), 0))
4940 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
4941 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
4942 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
4944 /* If we got something, return it. But first, set the alignment
4945 the address is a register. */
4948 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
4949 mark_reg_pointer (XEXP (temp
, 0),
4950 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
4955 /* If the mode of DECL_RTL does not match that of the decl, it
4956 must be a promoted value. We return a SUBREG of the wanted mode,
4957 but mark it so that we know that it was already extended. */
4959 if (GET_CODE (DECL_RTL (exp
)) == REG
4960 && GET_MODE (DECL_RTL (exp
)) != mode
)
4962 /* Get the signedness used for this variable. Ensure we get the
4963 same mode we got when the variable was declared. */
4964 if (GET_MODE (DECL_RTL (exp
))
4965 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
4968 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
4969 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4970 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4974 return DECL_RTL (exp
);
4977 return immed_double_const (TREE_INT_CST_LOW (exp
),
4978 TREE_INT_CST_HIGH (exp
),
4982 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
4985 /* If optimized, generate immediate CONST_DOUBLE
4986 which will be turned into memory by reload if necessary.
4988 We used to force a register so that loop.c could see it. But
4989 this does not allow gen_* patterns to perform optimizations with
4990 the constants. It also produces two insns in cases like "x = 1.0;".
4991 On most machines, floating-point constants are not permitted in
4992 many insns, so we'd end up copying it to a register in any case.
4994 Now, we do the copying in expand_binop, if appropriate. */
4995 return immed_real_const (exp
);
4999 if (! TREE_CST_RTL (exp
))
5000 output_constant_def (exp
);
5002 /* TREE_CST_RTL probably contains a constant address.
5003 On RISC machines where a constant address isn't valid,
5004 make some insns to get that address into a register. */
5005 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
5006 && modifier
!= EXPAND_CONST_ADDRESS
5007 && modifier
!= EXPAND_INITIALIZER
5008 && modifier
!= EXPAND_SUM
5009 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
5011 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
5012 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
5013 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
5014 return TREE_CST_RTL (exp
);
5017 context
= decl_function_context (exp
);
5019 /* We treat inline_function_decl as an alias for the current function
5020 because that is the inline function whose vars, types, etc.
5021 are being merged into the current function.
5022 See expand_inline_function. */
5023 if (context
== current_function_decl
|| context
== inline_function_decl
)
5026 /* If this is non-local, handle it. */
5029 temp
= SAVE_EXPR_RTL (exp
);
5030 if (temp
&& GET_CODE (temp
) == REG
)
5032 put_var_into_stack (exp
);
5033 temp
= SAVE_EXPR_RTL (exp
);
5035 if (temp
== 0 || GET_CODE (temp
) != MEM
)
5037 return change_address (temp
, mode
,
5038 fix_lexical_addr (XEXP (temp
, 0), exp
));
5040 if (SAVE_EXPR_RTL (exp
) == 0)
5042 if (mode
== VOIDmode
)
5045 temp
= assign_temp (type
, 0, 0, 0);
5047 SAVE_EXPR_RTL (exp
) = temp
;
5048 if (!optimize
&& GET_CODE (temp
) == REG
)
5049 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
5052 /* If the mode of TEMP does not match that of the expression, it
5053 must be a promoted value. We pass store_expr a SUBREG of the
5054 wanted mode but mark it so that we know that it was already
5055 extended. Note that `unsignedp' was modified above in
5058 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5060 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5061 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5062 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5065 if (temp
== const0_rtx
)
5066 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
5068 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5071 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5072 must be a promoted value. We return a SUBREG of the wanted mode,
5073 but mark it so that we know that it was already extended. */
5075 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5076 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5078 /* Compute the signedness and make the proper SUBREG. */
5079 promote_mode (type
, mode
, &unsignedp
, 0);
5080 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5081 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5082 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5086 return SAVE_EXPR_RTL (exp
);
5091 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5092 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5096 case PLACEHOLDER_EXPR
:
5097 /* If there is an object on the head of the placeholder list,
5098 see if some object in it's references is of type TYPE. For
5099 further information, see tree.def. */
5100 if (placeholder_list
)
5103 tree old_list
= placeholder_list
;
5105 for (object
= TREE_PURPOSE (placeholder_list
);
5106 (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
5107 != TYPE_MAIN_VARIANT (type
))
5108 && (TREE_CODE_CLASS (TREE_CODE (object
)) == 'r'
5109 || TREE_CODE_CLASS (TREE_CODE (object
)) == '1'
5110 || TREE_CODE_CLASS (TREE_CODE (object
)) == '2'
5111 || TREE_CODE_CLASS (TREE_CODE (object
)) == 'e');
5112 object
= TREE_OPERAND (object
, 0))
5116 && (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
5117 == TYPE_MAIN_VARIANT (type
)))
5119 /* Expand this object skipping the list entries before
5120 it was found in case it is also a PLACEHOLDER_EXPR.
5121 In that case, we want to translate it using subsequent
5123 placeholder_list
= TREE_CHAIN (placeholder_list
);
5124 temp
= expand_expr (object
, original_target
, tmode
, modifier
);
5125 placeholder_list
= old_list
;
5130 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5133 case WITH_RECORD_EXPR
:
5134 /* Put the object on the placeholder list, expand our first operand,
5135 and pop the list. */
5136 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5138 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5140 placeholder_list
= TREE_CHAIN (placeholder_list
);
5144 expand_exit_loop_if_false (NULL_PTR
,
5145 invert_truthvalue (TREE_OPERAND (exp
, 0)));
5150 expand_start_loop (1);
5151 expand_expr_stmt (TREE_OPERAND (exp
, 0));
5159 tree vars
= TREE_OPERAND (exp
, 0);
5160 int vars_need_expansion
= 0;
5162 /* Need to open a binding contour here because
5163 if there are any cleanups they most be contained here. */
5164 expand_start_bindings (0);
5166 /* Mark the corresponding BLOCK for output in its proper place. */
5167 if (TREE_OPERAND (exp
, 2) != 0
5168 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
5169 insert_block (TREE_OPERAND (exp
, 2));
5171 /* If VARS have not yet been expanded, expand them now. */
5174 if (DECL_RTL (vars
) == 0)
5176 vars_need_expansion
= 1;
5179 expand_decl_init (vars
);
5180 vars
= TREE_CHAIN (vars
);
5183 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
5185 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
5191 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
5193 emit_insns (RTL_EXPR_SEQUENCE (exp
));
5194 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
5195 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
5196 free_temps_for_rtl_expr (exp
);
5197 return RTL_EXPR_RTL (exp
);
5200 /* If we don't need the result, just ensure we evaluate any
5205 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5206 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
5210 /* All elts simple constants => refer to a constant in memory. But
5211 if this is a non-BLKmode mode, let it store a field at a time
5212 since that should make a CONST_INT or CONST_DOUBLE when we
5213 fold. Likewise, if we have a target we can use, it is best to
5214 store directly into the target unless the type is large enough
5215 that memcpy will be used. If we are making an initializer and
5216 all operands are constant, put it in memory as well. */
5217 else if ((TREE_STATIC (exp
)
5218 && ((mode
== BLKmode
5219 && ! (target
!= 0 && safe_from_p (target
, exp
)))
5220 || TREE_ADDRESSABLE (exp
)
5221 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5222 && (move_by_pieces_ninsns
5223 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
5224 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
5226 && ! mostly_zeros_p (exp
))))
5227 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
5229 rtx constructor
= output_constant_def (exp
);
5230 if (modifier
!= EXPAND_CONST_ADDRESS
5231 && modifier
!= EXPAND_INITIALIZER
5232 && modifier
!= EXPAND_SUM
5233 && (! memory_address_p (GET_MODE (constructor
),
5234 XEXP (constructor
, 0))
5236 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5237 constructor
= change_address (constructor
, VOIDmode
,
5238 XEXP (constructor
, 0));
5244 /* Handle calls that pass values in multiple non-contiguous
5245 locations. The Irix 6 ABI has examples of this. */
5246 if (target
== 0 || ! safe_from_p (target
, exp
)
5247 || GET_CODE (target
) == PARALLEL
)
5249 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5250 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5252 target
= assign_temp (type
, 0, 1, 1);
5255 if (TREE_READONLY (exp
))
5257 if (GET_CODE (target
) == MEM
)
5258 target
= change_address (target
, GET_MODE (target
),
5260 RTX_UNCHANGING_P (target
) = 1;
5263 store_constructor (exp
, target
, 0);
5269 tree exp1
= TREE_OPERAND (exp
, 0);
5272 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5273 op0
= memory_address (mode
, op0
);
5275 temp
= gen_rtx (MEM
, mode
, op0
);
5276 /* If address was computed by addition,
5277 mark this as an element of an aggregate. */
5278 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5279 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
5280 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
5281 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5282 || (TREE_CODE (exp1
) == ADDR_EXPR
5283 && (exp2
= TREE_OPERAND (exp1
, 0))
5284 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
5285 MEM_IN_STRUCT_P (temp
) = 1;
5286 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5288 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5289 here, because, in C and C++, the fact that a location is accessed
5290 through a pointer to const does not mean that the value there can
5291 never change. Languages where it can never change should
5292 also set TREE_STATIC. */
5293 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5298 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5302 tree array
= TREE_OPERAND (exp
, 0);
5303 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5304 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5305 tree index
= TREE_OPERAND (exp
, 1);
5306 tree index_type
= TREE_TYPE (index
);
5309 if (TREE_CODE (low_bound
) != INTEGER_CST
5310 && contains_placeholder_p (low_bound
))
5311 low_bound
= build (WITH_RECORD_EXPR
, sizetype
, low_bound
, exp
);
5313 /* Optimize the special-case of a zero lower bound.
5315 We convert the low_bound to sizetype to avoid some problems
5316 with constant folding. (E.g. suppose the lower bound is 1,
5317 and its mode is QI. Without the conversion, (ARRAY
5318 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5319 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5321 But sizetype isn't quite right either (especially if
5322 the lowbound is negative). FIXME */
5324 if (! integer_zerop (low_bound
))
5325 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5326 convert (sizetype
, low_bound
)));
5328 if ((TREE_CODE (index
) != INTEGER_CST
5329 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5330 && (! SLOW_UNALIGNED_ACCESS
|| ! get_inner_unaligned_p (exp
)))
5332 /* Nonconstant array index or nonconstant element size, and
5333 not an array in an unaligned (packed) structure field.
5334 Generate the tree for *(&array+index) and expand that,
5335 except do it in a language-independent way
5336 and don't complain about non-lvalue arrays.
5337 `mark_addressable' should already have been called
5338 for any array for which this case will be reached. */
5340 /* Don't forget the const or volatile flag from the array
5342 tree variant_type
= build_type_variant (type
,
5343 TREE_READONLY (exp
),
5344 TREE_THIS_VOLATILE (exp
));
5345 tree array_adr
= build1 (ADDR_EXPR
,
5346 build_pointer_type (variant_type
), array
);
5348 tree size
= size_in_bytes (type
);
5350 /* Convert the integer argument to a type the same size as sizetype
5351 so the multiply won't overflow spuriously. */
5352 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
5353 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
5356 if (TREE_CODE (size
) != INTEGER_CST
5357 && contains_placeholder_p (size
))
5358 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, exp
);
5360 /* Don't think the address has side effects
5361 just because the array does.
5362 (In some cases the address might have side effects,
5363 and we fail to record that fact here. However, it should not
5364 matter, since expand_expr should not care.) */
5365 TREE_SIDE_EFFECTS (array_adr
) = 0;
5369 (INDIRECT_REF
, type
,
5370 fold (build (PLUS_EXPR
,
5371 TYPE_POINTER_TO (variant_type
),
5376 TYPE_POINTER_TO (variant_type
),
5377 fold (build (MULT_EXPR
, TREE_TYPE (index
),
5379 convert (TREE_TYPE (index
),
5382 /* Volatility, etc., of new expression is same as old
5384 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
5385 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
5386 TREE_READONLY (elt
) = TREE_READONLY (exp
);
5388 return expand_expr (elt
, target
, tmode
, modifier
);
5391 /* Fold an expression like: "foo"[2].
5392 This is not done in fold so it won't happen inside &.
5393 Don't fold if this is for wide characters since it's too
5394 difficult to do correctly and this is a very rare case. */
5396 if (TREE_CODE (array
) == STRING_CST
5397 && TREE_CODE (index
) == INTEGER_CST
5398 && !TREE_INT_CST_HIGH (index
)
5399 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5400 && GET_MODE_CLASS (mode
) == MODE_INT
5401 && GET_MODE_SIZE (mode
) == 1)
5402 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5404 /* If this is a constant index into a constant array,
5405 just get the value from the array. Handle both the cases when
5406 we have an explicit constructor and when our operand is a variable
5407 that was declared const. */
5409 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5411 if (TREE_CODE (index
) == INTEGER_CST
5412 && TREE_INT_CST_HIGH (index
) == 0)
5414 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5416 i
= TREE_INT_CST_LOW (index
);
5418 elem
= TREE_CHAIN (elem
);
5420 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5425 else if (optimize
>= 1
5426 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5427 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5428 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5430 if (TREE_CODE (index
) == INTEGER_CST
5431 && TREE_INT_CST_HIGH (index
) == 0)
5433 tree init
= DECL_INITIAL (array
);
5435 i
= TREE_INT_CST_LOW (index
);
5436 if (TREE_CODE (init
) == CONSTRUCTOR
)
5438 tree elem
= CONSTRUCTOR_ELTS (init
);
5441 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5442 elem
= TREE_CHAIN (elem
);
5444 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5447 else if (TREE_CODE (init
) == STRING_CST
5448 && i
< TREE_STRING_LENGTH (init
))
5449 return GEN_INT (TREE_STRING_POINTER (init
)[i
]);
5454 /* Treat array-ref with constant index as a component-ref. */
5458 /* If the operand is a CONSTRUCTOR, we can just extract the
5459 appropriate field if it is present. Don't do this if we have
5460 already written the data since we want to refer to that copy
5461 and varasm.c assumes that's what we'll do. */
5462 if (code
!= ARRAY_REF
5463 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5464 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5468 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5469 elt
= TREE_CHAIN (elt
))
5470 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
5471 return expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5475 enum machine_mode mode1
;
5481 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
5482 &mode1
, &unsignedp
, &volatilep
,
5485 /* If we got back the original object, something is wrong. Perhaps
5486 we are evaluating an expression too early. In any event, don't
5487 infinitely recurse. */
5491 /* If TEM's type is a union of variable size, pass TARGET to the inner
5492 computation, since it will need a temporary and TARGET is known
5493 to have to do. This occurs in unchecked conversion in Ada. */
5495 op0
= expand_expr (tem
,
5496 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
5497 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
5499 ? target
: NULL_RTX
),
5501 modifier
== EXPAND_INITIALIZER
? modifier
: 0);
5503 /* If this is a constant, put it into a register if it is a
5504 legitimate constant and memory if it isn't. */
5505 if (CONSTANT_P (op0
))
5507 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
5508 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
5509 op0
= force_reg (mode
, op0
);
5511 op0
= validize_mem (force_const_mem (mode
, op0
));
5516 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5518 if (GET_CODE (op0
) != MEM
)
5520 op0
= change_address (op0
, VOIDmode
,
5521 gen_rtx (PLUS
, ptr_mode
, XEXP (op0
, 0),
5522 force_reg (ptr_mode
, offset_rtx
)));
5525 /* Don't forget about volatility even if this is a bitfield. */
5526 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
5528 op0
= copy_rtx (op0
);
5529 MEM_VOLATILE_P (op0
) = 1;
5532 /* In cases where an aligned union has an unaligned object
5533 as a field, we might be extracting a BLKmode value from
5534 an integer-mode (e.g., SImode) object. Handle this case
5535 by doing the extract into an object as wide as the field
5536 (which we know to be the width of a basic mode), then
5537 storing into memory, and changing the mode to BLKmode.
5538 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5539 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5540 if (mode1
== VOIDmode
5541 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
5542 || (modifier
!= EXPAND_CONST_ADDRESS
5543 && modifier
!= EXPAND_INITIALIZER
5544 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
5545 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5546 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5547 /* If the field isn't aligned enough to fetch as a memref,
5548 fetch it as a bit field. */
5549 || (SLOW_UNALIGNED_ACCESS
5550 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
5551 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
5553 enum machine_mode ext_mode
= mode
;
5555 if (ext_mode
== BLKmode
)
5556 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
5558 if (ext_mode
== BLKmode
)
5560 /* In this case, BITPOS must start at a byte boundary and
5561 TARGET, if specified, must be a MEM. */
5562 if (GET_CODE (op0
) != MEM
5563 || (target
!= 0 && GET_CODE (target
) != MEM
)
5564 || bitpos
% BITS_PER_UNIT
!= 0)
5567 op0
= change_address (op0
, VOIDmode
,
5568 plus_constant (XEXP (op0
, 0),
5569 bitpos
/ BITS_PER_UNIT
));
5571 target
= assign_temp (type
, 0, 1, 1);
5573 emit_block_move (target
, op0
,
5574 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5581 op0
= validize_mem (op0
);
5583 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
5584 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5586 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
5587 unsignedp
, target
, ext_mode
, ext_mode
,
5589 int_size_in_bytes (TREE_TYPE (tem
)));
5591 /* If the result is a record type and BITSIZE is narrower than
5592 the mode of OP0, an integral mode, and this is a big endian
5593 machine, we must put the field into the high-order bits. */
5594 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
5595 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
5596 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
5597 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
5598 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
5602 if (mode
== BLKmode
)
5604 rtx
new = assign_stack_temp (ext_mode
,
5605 bitsize
/ BITS_PER_UNIT
, 0);
5607 emit_move_insn (new, op0
);
5608 op0
= copy_rtx (new);
5609 PUT_MODE (op0
, BLKmode
);
5610 MEM_IN_STRUCT_P (op0
) = 1;
5616 /* If the result is BLKmode, use that to access the object
5618 if (mode
== BLKmode
)
5621 /* Get a reference to just this component. */
5622 if (modifier
== EXPAND_CONST_ADDRESS
5623 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5624 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
5625 (bitpos
/ BITS_PER_UNIT
)));
5627 op0
= change_address (op0
, mode1
,
5628 plus_constant (XEXP (op0
, 0),
5629 (bitpos
/ BITS_PER_UNIT
)));
5630 if (GET_CODE (XEXP (op0
, 0)) == REG
)
5631 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5633 MEM_IN_STRUCT_P (op0
) = 1;
5634 MEM_VOLATILE_P (op0
) |= volatilep
;
5635 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
)
5638 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5639 convert_move (target
, op0
, unsignedp
);
5643 /* Intended for a reference to a buffer of a file-object in Pascal.
5644 But it's not certain that a special tree code will really be
5645 necessary for these. INDIRECT_REF might work for them. */
5651 /* Pascal set IN expression.
5654 rlo = set_low - (set_low%bits_per_word);
5655 the_word = set [ (index - rlo)/bits_per_word ];
5656 bit_index = index % bits_per_word;
5657 bitmask = 1 << bit_index;
5658 return !!(the_word & bitmask); */
5660 tree set
= TREE_OPERAND (exp
, 0);
5661 tree index
= TREE_OPERAND (exp
, 1);
5662 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
5663 tree set_type
= TREE_TYPE (set
);
5664 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
5665 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
5666 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
5667 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
5668 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
5669 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
5670 rtx setaddr
= XEXP (setval
, 0);
5671 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
5673 rtx diff
, quo
, rem
, addr
, bit
, result
;
5675 preexpand_calls (exp
);
5677 /* If domain is empty, answer is no. Likewise if index is constant
5678 and out of bounds. */
5679 if ((TREE_CODE (set_high_bound
) == INTEGER_CST
5680 && TREE_CODE (set_low_bound
) == INTEGER_CST
5681 && tree_int_cst_lt (set_high_bound
, set_low_bound
)
5682 || (TREE_CODE (index
) == INTEGER_CST
5683 && TREE_CODE (set_low_bound
) == INTEGER_CST
5684 && tree_int_cst_lt (index
, set_low_bound
))
5685 || (TREE_CODE (set_high_bound
) == INTEGER_CST
5686 && TREE_CODE (index
) == INTEGER_CST
5687 && tree_int_cst_lt (set_high_bound
, index
))))
5691 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5693 /* If we get here, we have to generate the code for both cases
5694 (in range and out of range). */
5696 op0
= gen_label_rtx ();
5697 op1
= gen_label_rtx ();
5699 if (! (GET_CODE (index_val
) == CONST_INT
5700 && GET_CODE (lo_r
) == CONST_INT
))
5702 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
5703 GET_MODE (index_val
), iunsignedp
, 0);
5704 emit_jump_insn (gen_blt (op1
));
5707 if (! (GET_CODE (index_val
) == CONST_INT
5708 && GET_CODE (hi_r
) == CONST_INT
))
5710 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5711 GET_MODE (index_val
), iunsignedp
, 0);
5712 emit_jump_insn (gen_bgt (op1
));
5715 /* Calculate the element number of bit zero in the first word
5717 if (GET_CODE (lo_r
) == CONST_INT
)
5718 rlow
= GEN_INT (INTVAL (lo_r
)
5719 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5721 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5722 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5723 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5725 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5726 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5728 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5729 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5730 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5731 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5733 addr
= memory_address (byte_mode
,
5734 expand_binop (index_mode
, add_optab
, diff
,
5735 setaddr
, NULL_RTX
, iunsignedp
,
5738 /* Extract the bit we want to examine */
5739 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5740 gen_rtx (MEM
, byte_mode
, addr
),
5741 make_tree (TREE_TYPE (index
), rem
),
5743 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
5744 GET_MODE (target
) == byte_mode
? target
: 0,
5745 1, OPTAB_LIB_WIDEN
);
5747 if (result
!= target
)
5748 convert_move (target
, result
, 1);
5750 /* Output the code to handle the out-of-range case. */
5753 emit_move_insn (target
, const0_rtx
);
5758 case WITH_CLEANUP_EXPR
:
5759 if (RTL_EXPR_RTL (exp
) == 0)
5762 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5764 = tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2), cleanups_this_call
);
5765 /* That's it for this cleanup. */
5766 TREE_OPERAND (exp
, 2) = 0;
5767 expand_eh_region_start ();
5769 return RTL_EXPR_RTL (exp
);
5771 case CLEANUP_POINT_EXPR
:
5773 extern int temp_slot_level
;
5774 tree old_cleanups
= cleanups_this_call
;
5775 int old_temp_level
= target_temp_slot_level
;
5777 target_temp_slot_level
= temp_slot_level
;
5778 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5779 /* If we're going to use this value, load it up now. */
5781 op0
= force_not_mem (op0
);
5782 expand_cleanups_to (old_cleanups
);
5783 preserve_temp_slots (op0
);
5786 target_temp_slot_level
= old_temp_level
;
5791 /* Check for a built-in function. */
5792 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5793 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5795 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5796 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
5798 /* If this call was expanded already by preexpand_calls,
5799 just return the result we got. */
5800 if (CALL_EXPR_RTL (exp
) != 0)
5801 return CALL_EXPR_RTL (exp
);
5803 return expand_call (exp
, target
, ignore
);
5805 case NON_LVALUE_EXPR
:
5808 case REFERENCE_EXPR
:
5809 if (TREE_CODE (type
) == UNION_TYPE
)
5811 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5814 if (mode
!= BLKmode
)
5815 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5817 target
= assign_temp (type
, 0, 1, 1);
5820 if (GET_CODE (target
) == MEM
)
5821 /* Store data into beginning of memory target. */
5822 store_expr (TREE_OPERAND (exp
, 0),
5823 change_address (target
, TYPE_MODE (valtype
), 0), 0);
5825 else if (GET_CODE (target
) == REG
)
5826 /* Store this field into a union of the proper type. */
5827 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
5828 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
5830 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5834 /* Return the entire union. */
5838 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5840 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
5843 /* If the signedness of the conversion differs and OP0 is
5844 a promoted SUBREG, clear that indication since we now
5845 have to do the proper extension. */
5846 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
5847 && GET_CODE (op0
) == SUBREG
)
5848 SUBREG_PROMOTED_VAR_P (op0
) = 0;
5853 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
5854 if (GET_MODE (op0
) == mode
)
5857 /* If OP0 is a constant, just convert it into the proper mode. */
5858 if (CONSTANT_P (op0
))
5860 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5861 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5863 if (modifier
== EXPAND_INITIALIZER
)
5864 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
5868 convert_to_mode (mode
, op0
,
5869 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5871 convert_move (target
, op0
,
5872 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5876 /* We come here from MINUS_EXPR when the second operand is a
5879 this_optab
= add_optab
;
5881 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5882 something else, make sure we add the register to the constant and
5883 then to the other thing. This case can occur during strength
5884 reduction and doing it this way will produce better code if the
5885 frame pointer or argument pointer is eliminated.
5887 fold-const.c will ensure that the constant is always in the inner
5888 PLUS_EXPR, so the only case we need to do anything about is if
5889 sp, ap, or fp is our second argument, in which case we must swap
5890 the innermost first argument and our second argument. */
5892 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5893 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
5894 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
5895 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
5896 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
5897 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
5899 tree t
= TREE_OPERAND (exp
, 1);
5901 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5902 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
5905 /* If the result is to be ptr_mode and we are adding an integer to
5906 something, we might be forming a constant. So try to use
5907 plus_constant. If it produces a sum and we can't accept it,
5908 use force_operand. This allows P = &ARR[const] to generate
5909 efficient code on machines where a SYMBOL_REF is not a valid
5912 If this is an EXPAND_SUM call, always return the sum. */
5913 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
5914 || mode
== ptr_mode
)
5916 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
5917 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5918 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
5920 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
5922 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
5923 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5924 op1
= force_operand (op1
, target
);
5928 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5929 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
5930 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
5932 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
5934 if (! CONSTANT_P (op0
))
5936 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5937 VOIDmode
, modifier
);
5938 /* Don't go to both_summands if modifier
5939 says it's not right to return a PLUS. */
5940 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5944 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
5945 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5946 op0
= force_operand (op0
, target
);
5951 /* No sense saving up arithmetic to be done
5952 if it's all in the wrong mode to form part of an address.
5953 And force_operand won't know whether to sign-extend or
5955 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5956 || mode
!= ptr_mode
)
5959 preexpand_calls (exp
);
5960 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5963 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
5964 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
5967 /* Make sure any term that's a sum with a constant comes last. */
5968 if (GET_CODE (op0
) == PLUS
5969 && CONSTANT_P (XEXP (op0
, 1)))
5975 /* If adding to a sum including a constant,
5976 associate it to put the constant outside. */
5977 if (GET_CODE (op1
) == PLUS
5978 && CONSTANT_P (XEXP (op1
, 1)))
5980 rtx constant_term
= const0_rtx
;
5982 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
5985 /* Ensure that MULT comes first if there is one. */
5986 else if (GET_CODE (op0
) == MULT
)
5987 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
5989 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
5991 /* Let's also eliminate constants from op0 if possible. */
5992 op0
= eliminate_constant_term (op0
, &constant_term
);
5994 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5995 their sum should be a constant. Form it into OP1, since the
5996 result we want will then be OP0 + OP1. */
5998 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
6003 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
6006 /* Put a constant term last and put a multiplication first. */
6007 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
6008 temp
= op1
, op1
= op0
, op0
= temp
;
6010 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
6011 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
6014 /* For initializers, we are allowed to return a MINUS of two
6015 symbolic constants. Here we handle all cases when both operands
6017 /* Handle difference of two symbolic constants,
6018 for the sake of an initializer. */
6019 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6020 && really_constant_p (TREE_OPERAND (exp
, 0))
6021 && really_constant_p (TREE_OPERAND (exp
, 1)))
6023 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
6024 VOIDmode
, modifier
);
6025 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6026 VOIDmode
, modifier
);
6028 /* If the last operand is a CONST_INT, use plus_constant of
6029 the negated constant. Else make the MINUS. */
6030 if (GET_CODE (op1
) == CONST_INT
)
6031 return plus_constant (op0
, - INTVAL (op1
));
6033 return gen_rtx (MINUS
, mode
, op0
, op1
);
6035 /* Convert A - const to A + (-const). */
6036 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6038 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6039 TREE_OPERAND (exp
, 1)));
6041 /* Deal with the case where we can't negate the constant
6043 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6045 tree newtype
= signed_type (type
);
6046 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6047 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
6048 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
6050 if (! TREE_OVERFLOW (newneg
))
6051 return expand_expr (convert (type
,
6052 build (PLUS_EXPR
, newtype
,
6054 target
, tmode
, modifier
);
6058 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
6062 this_optab
= sub_optab
;
6066 preexpand_calls (exp
);
6067 /* If first operand is constant, swap them.
6068 Thus the following special case checks need only
6069 check the second operand. */
6070 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6072 register tree t1
= TREE_OPERAND (exp
, 0);
6073 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
6074 TREE_OPERAND (exp
, 1) = t1
;
6077 /* Attempt to return something suitable for generating an
6078 indexed address, for machines that support that. */
6080 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
6081 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6082 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6084 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
6086 /* Apply distributive law if OP0 is x+c. */
6087 if (GET_CODE (op0
) == PLUS
6088 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
6089 return gen_rtx (PLUS
, mode
,
6090 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
6091 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
6092 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
6093 * INTVAL (XEXP (op0
, 1))));
6095 if (GET_CODE (op0
) != REG
)
6096 op0
= force_operand (op0
, NULL_RTX
);
6097 if (GET_CODE (op0
) != REG
)
6098 op0
= copy_to_mode_reg (mode
, op0
);
6100 return gen_rtx (MULT
, mode
, op0
,
6101 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
6104 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6107 /* Check for multiplying things that have been extended
6108 from a narrower type. If this machine supports multiplying
6109 in that narrower type with a result in the desired type,
6110 do it that way, and avoid the explicit type-conversion. */
6111 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
6112 && TREE_CODE (type
) == INTEGER_TYPE
6113 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6114 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6115 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6116 && int_fits_type_p (TREE_OPERAND (exp
, 1),
6117 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6118 /* Don't use a widening multiply if a shift will do. */
6119 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
6120 > HOST_BITS_PER_WIDE_INT
)
6121 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
6123 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6124 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6126 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
6127 /* If both operands are extended, they must either both
6128 be zero-extended or both be sign-extended. */
6129 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6131 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
6133 enum machine_mode innermode
6134 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
6135 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6136 ? smul_widen_optab
: umul_widen_optab
);
6137 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6138 ? umul_widen_optab
: smul_widen_optab
);
6139 if (mode
== GET_MODE_WIDER_MODE (innermode
))
6141 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
6143 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6144 NULL_RTX
, VOIDmode
, 0);
6145 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6146 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6149 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6150 NULL_RTX
, VOIDmode
, 0);
6153 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
6154 && innermode
== word_mode
)
6157 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6158 NULL_RTX
, VOIDmode
, 0);
6159 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6160 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6163 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6164 NULL_RTX
, VOIDmode
, 0);
6165 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
6166 unsignedp
, OPTAB_LIB_WIDEN
);
6167 htem
= expand_mult_highpart_adjust (innermode
,
6168 gen_highpart (innermode
, temp
),
6170 gen_highpart (innermode
, temp
),
6172 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
6177 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6178 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6179 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
6181 case TRUNC_DIV_EXPR
:
6182 case FLOOR_DIV_EXPR
:
6184 case ROUND_DIV_EXPR
:
6185 case EXACT_DIV_EXPR
:
6186 preexpand_calls (exp
);
6187 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6189 /* Possible optimization: compute the dividend with EXPAND_SUM
6190 then if the divisor is constant can optimize the case
6191 where some terms of the dividend have coeffs divisible by it. */
6192 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6193 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6194 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
6197 this_optab
= flodiv_optab
;
6200 case TRUNC_MOD_EXPR
:
6201 case FLOOR_MOD_EXPR
:
6203 case ROUND_MOD_EXPR
:
6204 preexpand_calls (exp
);
6205 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6207 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6208 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6209 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
6211 case FIX_ROUND_EXPR
:
6212 case FIX_FLOOR_EXPR
:
6214 abort (); /* Not used for C. */
6216 case FIX_TRUNC_EXPR
:
6217 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6219 target
= gen_reg_rtx (mode
);
6220 expand_fix (target
, op0
, unsignedp
);
6224 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6226 target
= gen_reg_rtx (mode
);
6227 /* expand_float can't figure out what to do if FROM has VOIDmode.
6228 So give it the correct mode. With -O, cse will optimize this. */
6229 if (GET_MODE (op0
) == VOIDmode
)
6230 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6232 expand_float (target
, op0
,
6233 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6237 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6238 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6244 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6246 /* Handle complex values specially. */
6247 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6248 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6249 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6251 /* Unsigned abs is simply the operand. Testing here means we don't
6252 risk generating incorrect code below. */
6253 if (TREE_UNSIGNED (type
))
6256 return expand_abs (mode
, op0
, target
, unsignedp
,
6257 safe_from_p (target
, TREE_OPERAND (exp
, 0)));
6261 target
= original_target
;
6262 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
6263 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6264 || GET_MODE (target
) != mode
6265 || (GET_CODE (target
) == REG
6266 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6267 target
= gen_reg_rtx (mode
);
6268 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6269 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6271 /* First try to do it with a special MIN or MAX instruction.
6272 If that does not win, use a conditional jump to select the proper
6274 this_optab
= (TREE_UNSIGNED (type
)
6275 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6276 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6278 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6283 /* At this point, a MEM target is no longer useful; we will get better
6286 if (GET_CODE (target
) == MEM
)
6287 target
= gen_reg_rtx (mode
);
6290 emit_move_insn (target
, op0
);
6292 op0
= gen_label_rtx ();
6294 /* If this mode is an integer too wide to compare properly,
6295 compare word by word. Rely on cse to optimize constant cases. */
6296 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6298 if (code
== MAX_EXPR
)
6299 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6300 target
, op1
, NULL_RTX
, op0
);
6302 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6303 op1
, target
, NULL_RTX
, op0
);
6304 emit_move_insn (target
, op1
);
6308 if (code
== MAX_EXPR
)
6309 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6310 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6311 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6313 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6314 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6315 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6316 if (temp
== const0_rtx
)
6317 emit_move_insn (target
, op1
);
6318 else if (temp
!= const_true_rtx
)
6320 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6321 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6324 emit_move_insn (target
, op1
);
6331 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6332 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6338 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6339 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6344 /* ??? Can optimize bitwise operations with one arg constant.
6345 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6346 and (a bitwise1 b) bitwise2 b (etc)
6347 but that is probably not worth while. */
6349 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6350 boolean values when we want in all cases to compute both of them. In
6351 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6352 as actual zero-or-1 values and then bitwise anding. In cases where
6353 there cannot be any side effects, better code would be made by
6354 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6355 how to recognize those cases. */
6357 case TRUTH_AND_EXPR
:
6359 this_optab
= and_optab
;
6364 this_optab
= ior_optab
;
6367 case TRUTH_XOR_EXPR
:
6369 this_optab
= xor_optab
;
6376 preexpand_calls (exp
);
6377 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6379 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6380 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6383 /* Could determine the answer when only additive constants differ. Also,
6384 the addition of one can be handled by changing the condition. */
6391 preexpand_calls (exp
);
6392 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
6396 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6397 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
6399 && GET_CODE (original_target
) == REG
6400 && (GET_MODE (original_target
)
6401 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6403 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6406 if (temp
!= original_target
)
6407 temp
= copy_to_reg (temp
);
6409 op1
= gen_label_rtx ();
6410 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
6411 GET_MODE (temp
), unsignedp
, 0);
6412 emit_jump_insn (gen_beq (op1
));
6413 emit_move_insn (temp
, const1_rtx
);
6418 /* If no set-flag instruction, must generate a conditional
6419 store into a temporary variable. Drop through
6420 and handle this like && and ||. */
6422 case TRUTH_ANDIF_EXPR
:
6423 case TRUTH_ORIF_EXPR
:
6425 && (target
== 0 || ! safe_from_p (target
, exp
)
6426 /* Make sure we don't have a hard reg (such as function's return
6427 value) live across basic blocks, if not optimizing. */
6428 || (!optimize
&& GET_CODE (target
) == REG
6429 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
6430 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6433 emit_clr_insn (target
);
6435 op1
= gen_label_rtx ();
6436 jumpifnot (exp
, op1
);
6439 emit_0_to_1_insn (target
);
6442 return ignore
? const0_rtx
: target
;
6444 case TRUTH_NOT_EXPR
:
6445 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6446 /* The parser is careful to generate TRUTH_NOT_EXPR
6447 only with operands that are always zero or one. */
6448 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
6449 target
, 1, OPTAB_LIB_WIDEN
);
6455 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6457 return expand_expr (TREE_OPERAND (exp
, 1),
6458 (ignore
? const0_rtx
: target
),
6462 /* If we would have a "singleton" (see below) were it not for a
6463 conversion in each arm, bring that conversion back out. */
6464 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6465 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
6466 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
6467 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
6469 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
6470 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
6472 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6473 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6474 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6475 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6476 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6477 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6478 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6479 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6480 return expand_expr (build1 (NOP_EXPR
, type
,
6481 build (COND_EXPR
, TREE_TYPE (true),
6482 TREE_OPERAND (exp
, 0),
6484 target
, tmode
, modifier
);
6488 rtx flag
= NULL_RTX
;
6489 tree left_cleanups
= NULL_TREE
;
6490 tree right_cleanups
= NULL_TREE
;
6492 /* Used to save a pointer to the place to put the setting of
6493 the flag that indicates if this side of the conditional was
6494 taken. We backpatch the code, if we find out later that we
6495 have any conditional cleanups that need to be performed. */
6496 rtx dest_right_flag
= NULL_RTX
;
6497 rtx dest_left_flag
= NULL_RTX
;
6499 /* Note that COND_EXPRs whose type is a structure or union
6500 are required to be constructed to contain assignments of
6501 a temporary variable, so that we can evaluate them here
6502 for side effect only. If type is void, we must do likewise. */
6504 /* If an arm of the branch requires a cleanup,
6505 only that cleanup is performed. */
6508 tree binary_op
= 0, unary_op
= 0;
6509 tree old_cleanups
= cleanups_this_call
;
6511 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6512 convert it to our mode, if necessary. */
6513 if (integer_onep (TREE_OPERAND (exp
, 1))
6514 && integer_zerop (TREE_OPERAND (exp
, 2))
6515 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6519 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6524 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
6525 if (GET_MODE (op0
) == mode
)
6529 target
= gen_reg_rtx (mode
);
6530 convert_move (target
, op0
, unsignedp
);
6534 /* Check for X ? A + B : A. If we have this, we can copy A to the
6535 output and conditionally add B. Similarly for unary operations.
6536 Don't do this if X has side-effects because those side effects
6537 might affect A or B and the "?" operation is a sequence point in
6538 ANSI. (operand_equal_p tests for side effects.) */
6540 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
6541 && operand_equal_p (TREE_OPERAND (exp
, 2),
6542 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6543 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
6544 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
6545 && operand_equal_p (TREE_OPERAND (exp
, 1),
6546 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6547 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
6548 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
6549 && operand_equal_p (TREE_OPERAND (exp
, 2),
6550 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6551 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
6552 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
6553 && operand_equal_p (TREE_OPERAND (exp
, 1),
6554 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6555 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
6557 /* If we are not to produce a result, we have no target. Otherwise,
6558 if a target was specified use it; it will not be used as an
6559 intermediate target unless it is safe. If no target, use a
6564 else if (original_target
6565 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0))
6566 || (singleton
&& GET_CODE (original_target
) == REG
6567 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
6568 && original_target
== var_rtx (singleton
)))
6569 && GET_MODE (original_target
) == mode
6570 && ! (GET_CODE (original_target
) == MEM
6571 && MEM_VOLATILE_P (original_target
)))
6572 temp
= original_target
;
6573 else if (TREE_ADDRESSABLE (type
))
6576 temp
= assign_temp (type
, 0, 0, 1);
6578 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6579 do the test of X as a store-flag operation, do this as
6580 A + ((X != 0) << log C). Similarly for other simple binary
6581 operators. Only do for C == 1 if BRANCH_COST is low. */
6582 if (temp
&& singleton
&& binary_op
6583 && (TREE_CODE (binary_op
) == PLUS_EXPR
6584 || TREE_CODE (binary_op
) == MINUS_EXPR
6585 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
6586 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
6587 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
6588 : integer_onep (TREE_OPERAND (binary_op
, 1)))
6589 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6592 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
6593 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
6594 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
6597 /* If we had X ? A : A + 1, do this as A + (X == 0).
6599 We have to invert the truth value here and then put it
6600 back later if do_store_flag fails. We cannot simply copy
6601 TREE_OPERAND (exp, 0) to another variable and modify that
6602 because invert_truthvalue can modify the tree pointed to
6604 if (singleton
== TREE_OPERAND (exp
, 1))
6605 TREE_OPERAND (exp
, 0)
6606 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6608 result
= do_store_flag (TREE_OPERAND (exp
, 0),
6609 (safe_from_p (temp
, singleton
)
6611 mode
, BRANCH_COST
<= 1);
6613 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
6614 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
6615 build_int_2 (tree_log2
6619 (safe_from_p (temp
, singleton
)
6620 ? temp
: NULL_RTX
), 0);
6624 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
6625 return expand_binop (mode
, boptab
, op1
, result
, temp
,
6626 unsignedp
, OPTAB_LIB_WIDEN
);
6628 else if (singleton
== TREE_OPERAND (exp
, 1))
6629 TREE_OPERAND (exp
, 0)
6630 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6633 do_pending_stack_adjust ();
6635 op0
= gen_label_rtx ();
6637 flag
= gen_reg_rtx (word_mode
);
6638 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
6642 /* If the target conflicts with the other operand of the
6643 binary op, we can't use it. Also, we can't use the target
6644 if it is a hard register, because evaluating the condition
6645 might clobber it. */
6647 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
6648 || (GET_CODE (temp
) == REG
6649 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
6650 temp
= gen_reg_rtx (mode
);
6651 store_expr (singleton
, temp
, 0);
6654 expand_expr (singleton
,
6655 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6656 dest_left_flag
= get_last_insn ();
6657 if (singleton
== TREE_OPERAND (exp
, 1))
6658 jumpif (TREE_OPERAND (exp
, 0), op0
);
6660 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6662 /* Allows cleanups up to here. */
6663 old_cleanups
= cleanups_this_call
;
6664 if (binary_op
&& temp
== 0)
6665 /* Just touch the other operand. */
6666 expand_expr (TREE_OPERAND (binary_op
, 1),
6667 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6669 store_expr (build (TREE_CODE (binary_op
), type
,
6670 make_tree (type
, temp
),
6671 TREE_OPERAND (binary_op
, 1)),
6674 store_expr (build1 (TREE_CODE (unary_op
), type
,
6675 make_tree (type
, temp
)),
6678 dest_right_flag
= get_last_insn ();
6681 /* This is now done in jump.c and is better done there because it
6682 produces shorter register lifetimes. */
6684 /* Check for both possibilities either constants or variables
6685 in registers (but not the same as the target!). If so, can
6686 save branches by assigning one, branching, and assigning the
6688 else if (temp
&& GET_MODE (temp
) != BLKmode
6689 && (TREE_CONSTANT (TREE_OPERAND (exp
, 1))
6690 || ((TREE_CODE (TREE_OPERAND (exp
, 1)) == PARM_DECL
6691 || TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
)
6692 && DECL_RTL (TREE_OPERAND (exp
, 1))
6693 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 1))) == REG
6694 && DECL_RTL (TREE_OPERAND (exp
, 1)) != temp
))
6695 && (TREE_CONSTANT (TREE_OPERAND (exp
, 2))
6696 || ((TREE_CODE (TREE_OPERAND (exp
, 2)) == PARM_DECL
6697 || TREE_CODE (TREE_OPERAND (exp
, 2)) == VAR_DECL
)
6698 && DECL_RTL (TREE_OPERAND (exp
, 2))
6699 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 2))) == REG
6700 && DECL_RTL (TREE_OPERAND (exp
, 2)) != temp
)))
6702 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6703 temp
= gen_reg_rtx (mode
);
6704 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6705 dest_left_flag
= get_last_insn ();
6706 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6708 /* Allows cleanups up to here. */
6709 old_cleanups
= cleanups_this_call
;
6710 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6712 dest_right_flag
= get_last_insn ();
6715 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6716 comparison operator. If we have one of these cases, set the
6717 output to A, branch on A (cse will merge these two references),
6718 then set the output to FOO. */
6720 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6721 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6722 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6723 TREE_OPERAND (exp
, 1), 0)
6724 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6725 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
6727 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6728 temp
= gen_reg_rtx (mode
);
6729 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6730 dest_left_flag
= get_last_insn ();
6731 jumpif (TREE_OPERAND (exp
, 0), op0
);
6733 /* Allows cleanups up to here. */
6734 old_cleanups
= cleanups_this_call
;
6735 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6737 dest_right_flag
= get_last_insn ();
6740 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6741 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6742 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6743 TREE_OPERAND (exp
, 2), 0)
6744 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6745 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
6747 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6748 temp
= gen_reg_rtx (mode
);
6749 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6750 dest_left_flag
= get_last_insn ();
6751 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6753 /* Allows cleanups up to here. */
6754 old_cleanups
= cleanups_this_call
;
6755 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6757 dest_right_flag
= get_last_insn ();
6761 op1
= gen_label_rtx ();
6762 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6764 /* Allows cleanups up to here. */
6765 old_cleanups
= cleanups_this_call
;
6767 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6769 expand_expr (TREE_OPERAND (exp
, 1),
6770 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6771 dest_left_flag
= get_last_insn ();
6773 /* Handle conditional cleanups, if any. */
6774 left_cleanups
= defer_cleanups_to (old_cleanups
);
6777 emit_jump_insn (gen_jump (op1
));
6781 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6783 expand_expr (TREE_OPERAND (exp
, 2),
6784 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6785 dest_right_flag
= get_last_insn ();
6788 /* Handle conditional cleanups, if any. */
6789 right_cleanups
= defer_cleanups_to (old_cleanups
);
6795 /* Add back in, any conditional cleanups. */
6796 if (left_cleanups
|| right_cleanups
)
6802 /* Now that we know that a flag is needed, go back and add in the
6803 setting of the flag. */
6805 /* Do the left side flag. */
6806 last
= get_last_insn ();
6807 /* Flag left cleanups as needed. */
6808 emit_move_insn (flag
, const1_rtx
);
6809 /* ??? deprecated, use sequences instead. */
6810 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_left_flag
);
6812 /* Do the right side flag. */
6813 last
= get_last_insn ();
6814 /* Flag left cleanups as needed. */
6815 emit_move_insn (flag
, const0_rtx
);
6816 /* ??? deprecated, use sequences instead. */
6817 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_right_flag
);
6819 /* All cleanups must be on the function_obstack. */
6820 push_obstacks_nochange ();
6821 resume_temporary_allocation ();
6823 /* convert flag, which is an rtx, into a tree. */
6824 cond
= make_node (RTL_EXPR
);
6825 TREE_TYPE (cond
) = integer_type_node
;
6826 RTL_EXPR_RTL (cond
) = flag
;
6827 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
6828 cond
= save_expr (cond
);
6830 if (! left_cleanups
)
6831 left_cleanups
= integer_zero_node
;
6832 if (! right_cleanups
)
6833 right_cleanups
= integer_zero_node
;
6834 new_cleanups
= build (COND_EXPR
, void_type_node
,
6835 truthvalue_conversion (cond
),
6836 left_cleanups
, right_cleanups
);
6837 new_cleanups
= fold (new_cleanups
);
6841 /* Now add in the conditionalized cleanups. */
6843 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
6844 expand_eh_region_start ();
6851 /* Something needs to be initialized, but we didn't know
6852 where that thing was when building the tree. For example,
6853 it could be the return value of a function, or a parameter
6854 to a function which lays down in the stack, or a temporary
6855 variable which must be passed by reference.
6857 We guarantee that the expression will either be constructed
6858 or copied into our original target. */
6860 tree slot
= TREE_OPERAND (exp
, 0);
6861 tree cleanups
= NULL_TREE
;
6865 if (TREE_CODE (slot
) != VAR_DECL
)
6869 target
= original_target
;
6873 if (DECL_RTL (slot
) != 0)
6875 target
= DECL_RTL (slot
);
6876 /* If we have already expanded the slot, so don't do
6878 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6883 target
= assign_temp (type
, 2, 1, 1);
6884 /* All temp slots at this level must not conflict. */
6885 preserve_temp_slots (target
);
6886 DECL_RTL (slot
) = target
;
6888 /* Since SLOT is not known to the called function
6889 to belong to its stack frame, we must build an explicit
6890 cleanup. This case occurs when we must build up a reference
6891 to pass the reference as an argument. In this case,
6892 it is very likely that such a reference need not be
6895 if (TREE_OPERAND (exp
, 2) == 0)
6896 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
6897 cleanups
= TREE_OPERAND (exp
, 2);
6902 /* This case does occur, when expanding a parameter which
6903 needs to be constructed on the stack. The target
6904 is the actual stack address that we want to initialize.
6905 The function we call will perform the cleanup in this case. */
6907 /* If we have already assigned it space, use that space,
6908 not target that we were passed in, as our target
6909 parameter is only a hint. */
6910 if (DECL_RTL (slot
) != 0)
6912 target
= DECL_RTL (slot
);
6913 /* If we have already expanded the slot, so don't do
6915 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6919 DECL_RTL (slot
) = target
;
6922 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
6923 /* Mark it as expanded. */
6924 TREE_OPERAND (exp
, 1) = NULL_TREE
;
6926 store_expr (exp1
, target
, 0);
6930 cleanups_this_call
= tree_cons (NULL_TREE
,
6932 cleanups_this_call
);
6933 expand_eh_region_start ();
6941 tree lhs
= TREE_OPERAND (exp
, 0);
6942 tree rhs
= TREE_OPERAND (exp
, 1);
6943 tree noncopied_parts
= 0;
6944 tree lhs_type
= TREE_TYPE (lhs
);
6946 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6947 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
6948 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
6949 TYPE_NONCOPIED_PARTS (lhs_type
));
6950 while (noncopied_parts
!= 0)
6952 expand_assignment (TREE_VALUE (noncopied_parts
),
6953 TREE_PURPOSE (noncopied_parts
), 0, 0);
6954 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6961 /* If lhs is complex, expand calls in rhs before computing it.
6962 That's so we don't compute a pointer and save it over a call.
6963 If lhs is simple, compute it first so we can give it as a
6964 target if the rhs is just a call. This avoids an extra temp and copy
6965 and that prevents a partial-subsumption which makes bad code.
6966 Actually we could treat component_ref's of vars like vars. */
6968 tree lhs
= TREE_OPERAND (exp
, 0);
6969 tree rhs
= TREE_OPERAND (exp
, 1);
6970 tree noncopied_parts
= 0;
6971 tree lhs_type
= TREE_TYPE (lhs
);
6975 if (TREE_CODE (lhs
) != VAR_DECL
6976 && TREE_CODE (lhs
) != RESULT_DECL
6977 && TREE_CODE (lhs
) != PARM_DECL
)
6978 preexpand_calls (exp
);
6980 /* Check for |= or &= of a bitfield of size one into another bitfield
6981 of size 1. In this case, (unless we need the result of the
6982 assignment) we can do this more efficiently with a
6983 test followed by an assignment, if necessary.
6985 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6986 things change so we do, this code should be enhanced to
6989 && TREE_CODE (lhs
) == COMPONENT_REF
6990 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
6991 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
6992 && TREE_OPERAND (rhs
, 0) == lhs
6993 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
6994 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
6995 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
6997 rtx label
= gen_label_rtx ();
6999 do_jump (TREE_OPERAND (rhs
, 1),
7000 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
7001 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
7002 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
7003 (TREE_CODE (rhs
) == BIT_IOR_EXPR
7005 : integer_zero_node
)),
7007 do_pending_stack_adjust ();
7012 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
7013 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
7014 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
7015 TYPE_NONCOPIED_PARTS (lhs_type
));
7017 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7018 while (noncopied_parts
!= 0)
7020 expand_assignment (TREE_PURPOSE (noncopied_parts
),
7021 TREE_VALUE (noncopied_parts
), 0, 0);
7022 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7027 case PREINCREMENT_EXPR
:
7028 case PREDECREMENT_EXPR
:
7029 return expand_increment (exp
, 0, ignore
);
7031 case POSTINCREMENT_EXPR
:
7032 case POSTDECREMENT_EXPR
:
7033 /* Faster to treat as pre-increment if result is not used. */
7034 return expand_increment (exp
, ! ignore
, ignore
);
7037 /* If nonzero, TEMP will be set to the address of something that might
7038 be a MEM corresponding to a stack slot. */
7041 /* Are we taking the address of a nested function? */
7042 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
7043 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
7044 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0)))
7046 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
7047 op0
= force_operand (op0
, target
);
7049 /* If we are taking the address of something erroneous, just
7051 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
7055 /* We make sure to pass const0_rtx down if we came in with
7056 ignore set, to avoid doing the cleanups twice for something. */
7057 op0
= expand_expr (TREE_OPERAND (exp
, 0),
7058 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
7059 (modifier
== EXPAND_INITIALIZER
7060 ? modifier
: EXPAND_CONST_ADDRESS
));
7062 /* If we are going to ignore the result, OP0 will have been set
7063 to const0_rtx, so just return it. Don't get confused and
7064 think we are taking the address of the constant. */
7068 op0
= protect_from_queue (op0
, 0);
7070 /* We would like the object in memory. If it is a constant,
7071 we can have it be statically allocated into memory. For
7072 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7073 memory and store the value into it. */
7075 if (CONSTANT_P (op0
))
7076 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7078 else if (GET_CODE (op0
) == MEM
)
7080 mark_temp_addr_taken (op0
);
7081 temp
= XEXP (op0
, 0);
7084 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7085 || GET_CODE (op0
) == CONCAT
)
7087 /* If this object is in a register, it must be not
7089 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7090 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
7092 mark_temp_addr_taken (memloc
);
7093 emit_move_insn (memloc
, op0
);
7097 if (GET_CODE (op0
) != MEM
)
7100 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7102 temp
= XEXP (op0
, 0);
7103 #ifdef POINTERS_EXTEND_UNSIGNED
7104 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
7105 && mode
== ptr_mode
)
7106 temp
= convert_memory_address (ptr_mode
, temp
);
7111 op0
= force_operand (XEXP (op0
, 0), target
);
7114 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
7115 op0
= force_reg (Pmode
, op0
);
7117 if (GET_CODE (op0
) == REG
7118 && ! REG_USERVAR_P (op0
))
7119 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
7121 /* If we might have had a temp slot, add an equivalent address
7124 update_temp_slot_address (temp
, op0
);
7126 #ifdef POINTERS_EXTEND_UNSIGNED
7127 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7128 && mode
== ptr_mode
)
7129 op0
= convert_memory_address (ptr_mode
, op0
);
7134 case ENTRY_VALUE_EXPR
:
7137 /* COMPLEX type for Extended Pascal & Fortran */
7140 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7143 /* Get the rtx code of the operands. */
7144 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7145 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
7148 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7152 /* Move the real (op0) and imaginary (op1) parts to their location. */
7153 emit_move_insn (gen_realpart (mode
, target
), op0
);
7154 emit_move_insn (gen_imagpart (mode
, target
), op1
);
7156 insns
= get_insns ();
7159 /* Complex construction should appear as a single unit. */
7160 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7161 each with a separate pseudo as destination.
7162 It's not correct for flow to treat them as a unit. */
7163 if (GET_CODE (target
) != CONCAT
)
7164 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
7172 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7173 return gen_realpart (mode
, op0
);
7176 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7177 return gen_imagpart (mode
, op0
);
7181 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7185 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7188 target
= gen_reg_rtx (mode
);
7192 /* Store the realpart and the negated imagpart to target. */
7193 emit_move_insn (gen_realpart (partmode
, target
),
7194 gen_realpart (partmode
, op0
));
7196 imag_t
= gen_imagpart (partmode
, target
);
7197 temp
= expand_unop (partmode
, neg_optab
,
7198 gen_imagpart (partmode
, op0
), imag_t
, 0);
7200 emit_move_insn (imag_t
, temp
);
7202 insns
= get_insns ();
7205 /* Conjugate should appear as a single unit
7206 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7207 each with a separate pseudo as destination.
7208 It's not correct for flow to treat them as a unit. */
7209 if (GET_CODE (target
) != CONCAT
)
7210 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
7218 op0
= CONST0_RTX (tmode
);
7224 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7227 /* Here to do an ordinary binary operator, generating an instruction
7228 from the optab already placed in `this_optab'. */
7230 preexpand_calls (exp
);
7231 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
7233 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7234 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7236 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
7237 unsignedp
, OPTAB_LIB_WIDEN
);
7244 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7247 bc_expand_expr (exp
)
7250 enum tree_code code
;
7253 struct binary_operator
*binoptab
;
7254 struct unary_operator
*unoptab
;
7255 struct increment_operator
*incroptab
;
7256 struct bc_label
*lab
, *lab1
;
7257 enum bytecode_opcode opcode
;
7260 code
= TREE_CODE (exp
);
7266 if (DECL_RTL (exp
) == 0)
7268 error_with_decl (exp
, "prior parameter's size depends on `%s'");
7272 bc_load_parmaddr (DECL_RTL (exp
));
7273 bc_load_memory (TREE_TYPE (exp
), exp
);
7279 if (DECL_RTL (exp
) == 0)
7283 if (BYTECODE_LABEL (DECL_RTL (exp
)))
7284 bc_load_externaddr (DECL_RTL (exp
));
7286 bc_load_localaddr (DECL_RTL (exp
));
7288 if (TREE_PUBLIC (exp
))
7289 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
7290 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
7292 bc_load_localaddr (DECL_RTL (exp
));
7294 bc_load_memory (TREE_TYPE (exp
), exp
);
7299 #ifdef DEBUG_PRINT_CODE
7300 fprintf (stderr
, " [%x]\n", TREE_INT_CST_LOW (exp
));
7302 bc_emit_instruction (mode_to_const_map
[(int) (DECL_BIT_FIELD (exp
)
7304 : TYPE_MODE (TREE_TYPE (exp
)))],
7305 (HOST_WIDE_INT
) TREE_INT_CST_LOW (exp
));
7311 #ifdef DEBUG_PRINT_CODE
7312 fprintf (stderr
, " [%g]\n", (double) TREE_INT_CST_LOW (exp
));
7314 /* FIX THIS: find a better way to pass real_cst's. -bson */
7315 bc_emit_instruction (mode_to_const_map
[TYPE_MODE (TREE_TYPE (exp
))],
7316 (double) TREE_REAL_CST (exp
));
7325 /* We build a call description vector describing the type of
7326 the return value and of the arguments; this call vector,
7327 together with a pointer to a location for the return value
7328 and the base of the argument list, is passed to the low
7329 level machine dependent call subroutine, which is responsible
7330 for putting the arguments wherever real functions expect
7331 them, as well as getting the return value back. */
7333 tree calldesc
= 0, arg
;
7337 /* Push the evaluated args on the evaluation stack in reverse
7338 order. Also make an entry for each arg in the calldesc
7339 vector while we're at it. */
7341 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7343 for (arg
= TREE_OPERAND (exp
, 1); arg
; arg
= TREE_CHAIN (arg
))
7346 bc_expand_expr (TREE_VALUE (arg
));
7348 calldesc
= tree_cons ((tree
) 0,
7349 size_in_bytes (TREE_TYPE (TREE_VALUE (arg
))),
7351 calldesc
= tree_cons ((tree
) 0,
7352 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg
))),
7356 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7358 /* Allocate a location for the return value and push its
7359 address on the evaluation stack. Also make an entry
7360 at the front of the calldesc for the return value type. */
7362 type
= TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7363 retval
= bc_allocate_local (int_size_in_bytes (type
), TYPE_ALIGN (type
));
7364 bc_load_localaddr (retval
);
7366 calldesc
= tree_cons ((tree
) 0, size_in_bytes (type
), calldesc
);
7367 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (type
), calldesc
);
7369 /* Prepend the argument count. */
7370 calldesc
= tree_cons ((tree
) 0,
7371 build_int_2 (nargs
, 0),
7374 /* Push the address of the call description vector on the stack. */
7375 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
7376 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
7377 build_index_type (build_int_2 (nargs
* 2, 0)));
7378 r
= output_constant_def (calldesc
);
7379 bc_load_externaddr (r
);
7381 /* Push the address of the function to be called. */
7382 bc_expand_expr (TREE_OPERAND (exp
, 0));
7384 /* Call the function, popping its address and the calldesc vector
7385 address off the evaluation stack in the process. */
7386 bc_emit_instruction (call
);
7388 /* Pop the arguments off the stack. */
7389 bc_adjust_stack (nargs
);
7391 /* Load the return value onto the stack. */
7392 bc_load_localaddr (retval
);
7393 bc_load_memory (type
, TREE_OPERAND (exp
, 0));
7399 if (!SAVE_EXPR_RTL (exp
))
7401 /* First time around: copy to local variable */
7402 SAVE_EXPR_RTL (exp
) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp
)),
7403 TYPE_ALIGN (TREE_TYPE(exp
)));
7404 bc_expand_expr (TREE_OPERAND (exp
, 0));
7405 bc_emit_instruction (duplicate
);
7407 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7408 bc_store_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7412 /* Consecutive reference: use saved copy */
7413 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7414 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7419 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7420 how are they handled instead? */
7423 TREE_USED (exp
) = 1;
7424 bc_expand_expr (STMT_BODY (exp
));
7431 bc_expand_expr (TREE_OPERAND (exp
, 0));
7432 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)), TREE_TYPE (exp
));
7437 expand_assignment (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1), 0, 0);
7442 bc_expand_address (TREE_OPERAND (exp
, 0));
7447 bc_expand_expr (TREE_OPERAND (exp
, 0));
7448 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7453 bc_expand_expr (bc_canonicalize_array_ref (exp
));
7458 bc_expand_component_address (exp
);
7460 /* If we have a bitfield, generate a proper load */
7461 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp
, 1)), TREE_OPERAND (exp
, 1));
7466 bc_expand_expr (TREE_OPERAND (exp
, 0));
7467 bc_emit_instruction (drop
);
7468 bc_expand_expr (TREE_OPERAND (exp
, 1));
7473 bc_expand_expr (TREE_OPERAND (exp
, 0));
7474 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7475 lab
= bc_get_bytecode_label ();
7476 bc_emit_bytecode (xjumpifnot
);
7477 bc_emit_bytecode_labelref (lab
);
7479 #ifdef DEBUG_PRINT_CODE
7480 fputc ('\n', stderr
);
7482 bc_expand_expr (TREE_OPERAND (exp
, 1));
7483 lab1
= bc_get_bytecode_label ();
7484 bc_emit_bytecode (jump
);
7485 bc_emit_bytecode_labelref (lab1
);
7487 #ifdef DEBUG_PRINT_CODE
7488 fputc ('\n', stderr
);
7491 bc_emit_bytecode_labeldef (lab
);
7492 bc_expand_expr (TREE_OPERAND (exp
, 2));
7493 bc_emit_bytecode_labeldef (lab1
);
7496 case TRUTH_ANDIF_EXPR
:
7498 opcode
= xjumpifnot
;
7501 case TRUTH_ORIF_EXPR
:
7508 binoptab
= optab_plus_expr
;
7513 binoptab
= optab_minus_expr
;
7518 binoptab
= optab_mult_expr
;
7521 case TRUNC_DIV_EXPR
:
7522 case FLOOR_DIV_EXPR
:
7524 case ROUND_DIV_EXPR
:
7525 case EXACT_DIV_EXPR
:
7527 binoptab
= optab_trunc_div_expr
;
7530 case TRUNC_MOD_EXPR
:
7531 case FLOOR_MOD_EXPR
:
7533 case ROUND_MOD_EXPR
:
7535 binoptab
= optab_trunc_mod_expr
;
7538 case FIX_ROUND_EXPR
:
7539 case FIX_FLOOR_EXPR
:
7541 abort (); /* Not used for C. */
7543 case FIX_TRUNC_EXPR
:
7550 abort (); /* FIXME */
7554 binoptab
= optab_rdiv_expr
;
7559 binoptab
= optab_bit_and_expr
;
7564 binoptab
= optab_bit_ior_expr
;
7569 binoptab
= optab_bit_xor_expr
;
7574 binoptab
= optab_lshift_expr
;
7579 binoptab
= optab_rshift_expr
;
7582 case TRUTH_AND_EXPR
:
7584 binoptab
= optab_truth_and_expr
;
7589 binoptab
= optab_truth_or_expr
;
7594 binoptab
= optab_lt_expr
;
7599 binoptab
= optab_le_expr
;
7604 binoptab
= optab_ge_expr
;
7609 binoptab
= optab_gt_expr
;
7614 binoptab
= optab_eq_expr
;
7619 binoptab
= optab_ne_expr
;
7624 unoptab
= optab_negate_expr
;
7629 unoptab
= optab_bit_not_expr
;
7632 case TRUTH_NOT_EXPR
:
7634 unoptab
= optab_truth_not_expr
;
7637 case PREDECREMENT_EXPR
:
7639 incroptab
= optab_predecrement_expr
;
7642 case PREINCREMENT_EXPR
:
7644 incroptab
= optab_preincrement_expr
;
7647 case POSTDECREMENT_EXPR
:
7649 incroptab
= optab_postdecrement_expr
;
7652 case POSTINCREMENT_EXPR
:
7654 incroptab
= optab_postincrement_expr
;
7659 bc_expand_constructor (exp
);
7669 tree vars
= TREE_OPERAND (exp
, 0);
7670 int vars_need_expansion
= 0;
7672 /* Need to open a binding contour here because
7673 if there are any cleanups they most be contained here. */
7674 expand_start_bindings (0);
7676 /* Mark the corresponding BLOCK for output. */
7677 if (TREE_OPERAND (exp
, 2) != 0)
7678 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
7680 /* If VARS have not yet been expanded, expand them now. */
7683 if (DECL_RTL (vars
) == 0)
7685 vars_need_expansion
= 1;
7688 expand_decl_init (vars
);
7689 vars
= TREE_CHAIN (vars
);
7692 bc_expand_expr (TREE_OPERAND (exp
, 1));
7694 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7704 bc_expand_binary_operation (binoptab
, TREE_TYPE (exp
),
7705 TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1));
7711 bc_expand_unary_operation (unoptab
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7717 bc_expand_expr (TREE_OPERAND (exp
, 0));
7718 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7719 lab
= bc_get_bytecode_label ();
7721 bc_emit_instruction (duplicate
);
7722 bc_emit_bytecode (opcode
);
7723 bc_emit_bytecode_labelref (lab
);
7725 #ifdef DEBUG_PRINT_CODE
7726 fputc ('\n', stderr
);
7729 bc_emit_instruction (drop
);
7731 bc_expand_expr (TREE_OPERAND (exp
, 1));
7732 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7733 bc_emit_bytecode_labeldef (lab
);
7739 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7741 /* Push the quantum. */
7742 bc_expand_expr (TREE_OPERAND (exp
, 1));
7744 /* Convert it to the lvalue's type. */
7745 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)), type
);
7747 /* Push the address of the lvalue */
7748 bc_expand_expr (build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
), TREE_OPERAND (exp
, 0)));
7750 /* Perform actual increment */
7751 bc_expand_increment (incroptab
, type
);
7755 /* Return the alignment in bits of EXP, a pointer valued expression.
7756 But don't return more than MAX_ALIGN no matter what.
7757 The alignment returned is, by default, the alignment of the thing that
7758 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7760 Otherwise, look at the expression to see if we can do better, i.e., if the
7761 expression is actually pointing at an object whose alignment is tighter. */
7764 get_pointer_alignment (exp
, max_align
)
7768 unsigned align
, inner
;
7770 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7773 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7774 align
= MIN (align
, max_align
);
7778 switch (TREE_CODE (exp
))
7782 case NON_LVALUE_EXPR
:
7783 exp
= TREE_OPERAND (exp
, 0);
7784 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7786 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7787 align
= MIN (inner
, max_align
);
7791 /* If sum of pointer + int, restrict our maximum alignment to that
7792 imposed by the integer. If not, we can't do any better than
7794 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7797 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7802 exp
= TREE_OPERAND (exp
, 0);
7806 /* See what we are pointing at and look at its alignment. */
7807 exp
= TREE_OPERAND (exp
, 0);
7808 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7809 align
= FUNCTION_BOUNDARY
;
7810 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7811 align
= DECL_ALIGN (exp
);
7812 #ifdef CONSTANT_ALIGNMENT
7813 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7814 align
= CONSTANT_ALIGNMENT (exp
, align
);
7816 return MIN (align
, max_align
);
7824 /* Return the tree node and offset if a given argument corresponds to
7825 a string constant. */
7828 string_constant (arg
, ptr_offset
)
7834 if (TREE_CODE (arg
) == ADDR_EXPR
7835 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7837 *ptr_offset
= integer_zero_node
;
7838 return TREE_OPERAND (arg
, 0);
7840 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7842 tree arg0
= TREE_OPERAND (arg
, 0);
7843 tree arg1
= TREE_OPERAND (arg
, 1);
7848 if (TREE_CODE (arg0
) == ADDR_EXPR
7849 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7852 return TREE_OPERAND (arg0
, 0);
7854 else if (TREE_CODE (arg1
) == ADDR_EXPR
7855 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7858 return TREE_OPERAND (arg1
, 0);
7865 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7866 way, because it could contain a zero byte in the middle.
7867 TREE_STRING_LENGTH is the size of the character array, not the string.
7869 Unfortunately, string_constant can't access the values of const char
7870 arrays with initializers, so neither can we do so here. */
7880 src
= string_constant (src
, &offset_node
);
7883 max
= TREE_STRING_LENGTH (src
);
7884 ptr
= TREE_STRING_POINTER (src
);
7885 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7887 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7888 compute the offset to the following null if we don't know where to
7889 start searching for it. */
7891 for (i
= 0; i
< max
; i
++)
7894 /* We don't know the starting offset, but we do know that the string
7895 has no internal zero bytes. We can assume that the offset falls
7896 within the bounds of the string; otherwise, the programmer deserves
7897 what he gets. Subtract the offset from the length of the string,
7899 /* This would perhaps not be valid if we were dealing with named
7900 arrays in addition to literal string constants. */
7901 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7904 /* We have a known offset into the string. Start searching there for
7905 a null character. */
7906 if (offset_node
== 0)
7910 /* Did we get a long long offset? If so, punt. */
7911 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7913 offset
= TREE_INT_CST_LOW (offset_node
);
7915 /* If the offset is known to be out of bounds, warn, and call strlen at
7917 if (offset
< 0 || offset
> max
)
7919 warning ("offset outside bounds of constant string");
7922 /* Use strlen to search for the first zero byte. Since any strings
7923 constructed with build_string will have nulls appended, we win even
7924 if we get handed something like (char[4])"abcd".
7926 Since OFFSET is our starting index into the string, no further
7927 calculation is needed. */
7928 return size_int (strlen (ptr
+ offset
));
7932 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7933 enum built_in_function fndecl_code
;
7939 /* Some machines need special handling before we can access
7940 arbitrary frames. For example, on the sparc, we must first flush
7941 all register windows to the stack. */
7942 #ifdef SETUP_FRAME_ADDRESSES
7943 SETUP_FRAME_ADDRESSES ();
7946 /* On the sparc, the return address is not in the frame, it is in a
7947 register. There is no way to access it off of the current frame
7948 pointer, but it can be accessed off the previous frame pointer by
7949 reading the value from the register window save area. */
7950 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7951 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7955 /* Scan back COUNT frames to the specified frame. */
7956 for (i
= 0; i
< count
; i
++)
7958 /* Assume the dynamic chain pointer is in the word that the
7959 frame address points to, unless otherwise specified. */
7960 #ifdef DYNAMIC_CHAIN_ADDRESS
7961 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7963 tem
= memory_address (Pmode
, tem
);
7964 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
7967 /* For __builtin_frame_address, return what we've got. */
7968 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
7971 /* For __builtin_return_address, Get the return address from that
7973 #ifdef RETURN_ADDR_RTX
7974 tem
= RETURN_ADDR_RTX (count
, tem
);
7976 tem
= memory_address (Pmode
,
7977 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
7978 tem
= gen_rtx (MEM
, Pmode
, tem
);
7983 /* Expand an expression EXP that calls a built-in function,
7984 with result going to TARGET if that's convenient
7985 (and in mode MODE if that's convenient).
7986 SUBTARGET may be used as the target for computing one of EXP's operands.
7987 IGNORE is nonzero if the value is to be ignored. */
7989 #define CALLED_AS_BUILT_IN(NODE) \
7990 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7993 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
7997 enum machine_mode mode
;
8000 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8001 tree arglist
= TREE_OPERAND (exp
, 1);
8004 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
8005 optab builtin_optab
;
8007 switch (DECL_FUNCTION_CODE (fndecl
))
8012 /* build_function_call changes these into ABS_EXPR. */
8017 /* Treat these like sqrt, but only if the user asks for them. */
8018 if (! flag_fast_math
)
8020 case BUILT_IN_FSQRT
:
8021 /* If not optimizing, call the library function. */
8026 /* Arg could be wrong type if user redeclared this fcn wrong. */
8027 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
8030 /* Stabilize and compute the argument. */
8031 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
8032 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
8034 exp
= copy_node (exp
);
8035 arglist
= copy_node (arglist
);
8036 TREE_OPERAND (exp
, 1) = arglist
;
8037 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
8039 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8041 /* Make a suitable register to place result in. */
8042 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8047 switch (DECL_FUNCTION_CODE (fndecl
))
8050 builtin_optab
= sin_optab
; break;
8052 builtin_optab
= cos_optab
; break;
8053 case BUILT_IN_FSQRT
:
8054 builtin_optab
= sqrt_optab
; break;
8059 /* Compute into TARGET.
8060 Set TARGET to wherever the result comes back. */
8061 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8062 builtin_optab
, op0
, target
, 0);
8064 /* If we were unable to expand via the builtin, stop the
8065 sequence (without outputting the insns) and break, causing
8066 a call the the library function. */
8073 /* Check the results by default. But if flag_fast_math is turned on,
8074 then assume sqrt will always be called with valid arguments. */
8076 if (! flag_fast_math
)
8078 /* Don't define the builtin FP instructions
8079 if your machine is not IEEE. */
8080 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
8083 lab1
= gen_label_rtx ();
8085 /* Test the result; if it is NaN, set errno=EDOM because
8086 the argument was not in the domain. */
8087 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
8088 emit_jump_insn (gen_beq (lab1
));
8092 #ifdef GEN_ERRNO_RTX
8093 rtx errno_rtx
= GEN_ERRNO_RTX
;
8096 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "errno"));
8099 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
8102 /* We can't set errno=EDOM directly; let the library call do it.
8103 Pop the arguments right away in case the call gets deleted. */
8105 expand_call (exp
, target
, 0);
8112 /* Output the entire sequence. */
8113 insns
= get_insns ();
8119 /* __builtin_apply_args returns block of memory allocated on
8120 the stack into which is stored the arg pointer, structure
8121 value address, static chain, and all the registers that might
8122 possibly be used in performing a function call. The code is
8123 moved to the start of the function so the incoming values are
8125 case BUILT_IN_APPLY_ARGS
:
8126 /* Don't do __builtin_apply_args more than once in a function.
8127 Save the result of the first call and reuse it. */
8128 if (apply_args_value
!= 0)
8129 return apply_args_value
;
8131 /* When this function is called, it means that registers must be
8132 saved on entry to this function. So we migrate the
8133 call to the first insn of this function. */
8138 temp
= expand_builtin_apply_args ();
8142 apply_args_value
= temp
;
8144 /* Put the sequence after the NOTE that starts the function.
8145 If this is inside a SEQUENCE, make the outer-level insn
8146 chain current, so the code is placed at the start of the
8148 push_topmost_sequence ();
8149 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8150 pop_topmost_sequence ();
8154 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8155 FUNCTION with a copy of the parameters described by
8156 ARGUMENTS, and ARGSIZE. It returns a block of memory
8157 allocated on the stack into which is stored all the registers
8158 that might possibly be used for returning the result of a
8159 function. ARGUMENTS is the value returned by
8160 __builtin_apply_args. ARGSIZE is the number of bytes of
8161 arguments that must be copied. ??? How should this value be
8162 computed? We'll also need a safe worst case value for varargs
8164 case BUILT_IN_APPLY
:
8166 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8167 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8168 || TREE_CHAIN (arglist
) == 0
8169 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8170 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8171 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8179 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8180 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8182 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8185 /* __builtin_return (RESULT) causes the function to return the
8186 value described by RESULT. RESULT is address of the block of
8187 memory returned by __builtin_apply. */
8188 case BUILT_IN_RETURN
:
8190 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8191 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8192 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8193 NULL_RTX
, VOIDmode
, 0));
8196 case BUILT_IN_SAVEREGS
:
8197 /* Don't do __builtin_saveregs more than once in a function.
8198 Save the result of the first call and reuse it. */
8199 if (saveregs_value
!= 0)
8200 return saveregs_value
;
8202 /* When this function is called, it means that registers must be
8203 saved on entry to this function. So we migrate the
8204 call to the first insn of this function. */
8208 /* Now really call the function. `expand_call' does not call
8209 expand_builtin, so there is no danger of infinite recursion here. */
8212 #ifdef EXPAND_BUILTIN_SAVEREGS
8213 /* Do whatever the machine needs done in this case. */
8214 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8216 /* The register where the function returns its value
8217 is likely to have something else in it, such as an argument.
8218 So preserve that register around the call. */
8220 if (value_mode
!= VOIDmode
)
8222 rtx valreg
= hard_libcall_value (value_mode
);
8223 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8225 emit_move_insn (saved_valreg
, valreg
);
8226 temp
= expand_call (exp
, target
, ignore
);
8227 emit_move_insn (valreg
, saved_valreg
);
8230 /* Generate the call, putting the value in a pseudo. */
8231 temp
= expand_call (exp
, target
, ignore
);
8237 saveregs_value
= temp
;
8239 /* Put the sequence after the NOTE that starts the function.
8240 If this is inside a SEQUENCE, make the outer-level insn
8241 chain current, so the code is placed at the start of the
8243 push_topmost_sequence ();
8244 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8245 pop_topmost_sequence ();
8249 /* __builtin_args_info (N) returns word N of the arg space info
8250 for the current function. The number and meanings of words
8251 is controlled by the definition of CUMULATIVE_ARGS. */
8252 case BUILT_IN_ARGS_INFO
:
8254 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8256 int *word_ptr
= (int *) ¤t_function_args_info
;
8257 tree type
, elts
, result
;
8259 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8260 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8261 __FILE__
, __LINE__
);
8265 tree arg
= TREE_VALUE (arglist
);
8266 if (TREE_CODE (arg
) != INTEGER_CST
)
8267 error ("argument of `__builtin_args_info' must be constant");
8270 int wordnum
= TREE_INT_CST_LOW (arg
);
8272 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8273 error ("argument of `__builtin_args_info' out of range");
8275 return GEN_INT (word_ptr
[wordnum
]);
8279 error ("missing argument in `__builtin_args_info'");
8284 for (i
= 0; i
< nwords
; i
++)
8285 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8287 type
= build_array_type (integer_type_node
,
8288 build_index_type (build_int_2 (nwords
, 0)));
8289 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8290 TREE_CONSTANT (result
) = 1;
8291 TREE_STATIC (result
) = 1;
8292 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8293 TREE_CONSTANT (result
) = 1;
8294 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
8298 /* Return the address of the first anonymous stack arg. */
8299 case BUILT_IN_NEXT_ARG
:
8301 tree fntype
= TREE_TYPE (current_function_decl
);
8303 if ((TYPE_ARG_TYPES (fntype
) == 0
8304 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8306 && ! current_function_varargs
)
8308 error ("`va_start' used in function with fixed args");
8314 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8315 tree arg
= TREE_VALUE (arglist
);
8317 /* Strip off all nops for the sake of the comparison. This
8318 is not quite the same as STRIP_NOPS. It does more.
8319 We must also strip off INDIRECT_EXPR for C++ reference
8321 while (TREE_CODE (arg
) == NOP_EXPR
8322 || TREE_CODE (arg
) == CONVERT_EXPR
8323 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8324 || TREE_CODE (arg
) == INDIRECT_REF
)
8325 arg
= TREE_OPERAND (arg
, 0);
8326 if (arg
!= last_parm
)
8327 warning ("second parameter of `va_start' not last named argument");
8329 else if (! current_function_varargs
)
8330 /* Evidently an out of date version of <stdarg.h>; can't validate
8331 va_start's second argument, but can still work as intended. */
8332 warning ("`__builtin_next_arg' called without an argument");
8335 return expand_binop (Pmode
, add_optab
,
8336 current_function_internal_arg_pointer
,
8337 current_function_arg_offset_rtx
,
8338 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8340 case BUILT_IN_CLASSIFY_TYPE
:
8343 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8344 enum tree_code code
= TREE_CODE (type
);
8345 if (code
== VOID_TYPE
)
8346 return GEN_INT (void_type_class
);
8347 if (code
== INTEGER_TYPE
)
8348 return GEN_INT (integer_type_class
);
8349 if (code
== CHAR_TYPE
)
8350 return GEN_INT (char_type_class
);
8351 if (code
== ENUMERAL_TYPE
)
8352 return GEN_INT (enumeral_type_class
);
8353 if (code
== BOOLEAN_TYPE
)
8354 return GEN_INT (boolean_type_class
);
8355 if (code
== POINTER_TYPE
)
8356 return GEN_INT (pointer_type_class
);
8357 if (code
== REFERENCE_TYPE
)
8358 return GEN_INT (reference_type_class
);
8359 if (code
== OFFSET_TYPE
)
8360 return GEN_INT (offset_type_class
);
8361 if (code
== REAL_TYPE
)
8362 return GEN_INT (real_type_class
);
8363 if (code
== COMPLEX_TYPE
)
8364 return GEN_INT (complex_type_class
);
8365 if (code
== FUNCTION_TYPE
)
8366 return GEN_INT (function_type_class
);
8367 if (code
== METHOD_TYPE
)
8368 return GEN_INT (method_type_class
);
8369 if (code
== RECORD_TYPE
)
8370 return GEN_INT (record_type_class
);
8371 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8372 return GEN_INT (union_type_class
);
8373 if (code
== ARRAY_TYPE
)
8375 if (TYPE_STRING_FLAG (type
))
8376 return GEN_INT (string_type_class
);
8378 return GEN_INT (array_type_class
);
8380 if (code
== SET_TYPE
)
8381 return GEN_INT (set_type_class
);
8382 if (code
== FILE_TYPE
)
8383 return GEN_INT (file_type_class
);
8384 if (code
== LANG_TYPE
)
8385 return GEN_INT (lang_type_class
);
8387 return GEN_INT (no_type_class
);
8389 case BUILT_IN_CONSTANT_P
:
8394 tree arg
= TREE_VALUE (arglist
);
8397 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
8398 || (TREE_CODE (arg
) == ADDR_EXPR
8399 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8400 ? const1_rtx
: const0_rtx
);
8403 case BUILT_IN_FRAME_ADDRESS
:
8404 /* The argument must be a nonnegative integer constant.
8405 It counts the number of frames to scan up the stack.
8406 The value is the address of that frame. */
8407 case BUILT_IN_RETURN_ADDRESS
:
8408 /* The argument must be a nonnegative integer constant.
8409 It counts the number of frames to scan up the stack.
8410 The value is the return address saved in that frame. */
8412 /* Warning about missing arg was already issued. */
8414 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
)
8416 error ("invalid arg to `__builtin_return_address'");
8419 else if (tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8421 error ("invalid arg to `__builtin_return_address'");
8426 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8427 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8428 hard_frame_pointer_rtx
);
8430 /* For __builtin_frame_address, return what we've got. */
8431 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8434 if (GET_CODE (tem
) != REG
)
8435 tem
= copy_to_reg (tem
);
8439 case BUILT_IN_ALLOCA
:
8441 /* Arg could be non-integer if user redeclared this fcn wrong. */
8442 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8445 /* Compute the argument. */
8446 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8448 /* Allocate the desired space. */
8449 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8452 /* If not optimizing, call the library function. */
8453 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8457 /* Arg could be non-integer if user redeclared this fcn wrong. */
8458 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8461 /* Compute the argument. */
8462 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8463 /* Compute ffs, into TARGET if possible.
8464 Set TARGET to wherever the result comes back. */
8465 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8466 ffs_optab
, op0
, target
, 1);
8471 case BUILT_IN_STRLEN
:
8472 /* If not optimizing, call the library function. */
8473 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8477 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8478 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8482 tree src
= TREE_VALUE (arglist
);
8483 tree len
= c_strlen (src
);
8486 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8488 rtx result
, src_rtx
, char_rtx
;
8489 enum machine_mode insn_mode
= value_mode
, char_mode
;
8490 enum insn_code icode
;
8492 /* If the length is known, just return it. */
8494 return expand_expr (len
, target
, mode
, 0);
8496 /* If SRC is not a pointer type, don't do this operation inline. */
8500 /* Call a function if we can't compute strlen in the right mode. */
8502 while (insn_mode
!= VOIDmode
)
8504 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8505 if (icode
!= CODE_FOR_nothing
)
8508 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8510 if (insn_mode
== VOIDmode
)
8513 /* Make a place to write the result of the instruction. */
8516 && GET_CODE (result
) == REG
8517 && GET_MODE (result
) == insn_mode
8518 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8519 result
= gen_reg_rtx (insn_mode
);
8521 /* Make sure the operands are acceptable to the predicates. */
8523 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8524 result
= gen_reg_rtx (insn_mode
);
8526 src_rtx
= memory_address (BLKmode
,
8527 expand_expr (src
, NULL_RTX
, ptr_mode
,
8529 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8530 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8532 char_rtx
= const0_rtx
;
8533 char_mode
= insn_operand_mode
[(int)icode
][2];
8534 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8535 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8537 emit_insn (GEN_FCN (icode
) (result
,
8538 gen_rtx (MEM
, BLKmode
, src_rtx
),
8539 char_rtx
, GEN_INT (align
)));
8541 /* Return the value in the proper mode for this function. */
8542 if (GET_MODE (result
) == value_mode
)
8544 else if (target
!= 0)
8546 convert_move (target
, result
, 0);
8550 return convert_to_mode (value_mode
, result
, 0);
8553 case BUILT_IN_STRCPY
:
8554 /* If not optimizing, call the library function. */
8555 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8559 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8560 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8561 || TREE_CHAIN (arglist
) == 0
8562 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8566 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8571 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8573 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8577 case BUILT_IN_MEMCPY
:
8578 /* If not optimizing, call the library function. */
8579 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8583 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8584 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8585 || TREE_CHAIN (arglist
) == 0
8586 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8587 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8588 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8592 tree dest
= TREE_VALUE (arglist
);
8593 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8594 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8598 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8600 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8601 rtx dest_rtx
, dest_mem
, src_mem
;
8603 /* If either SRC or DEST is not a pointer type, don't do
8604 this operation in-line. */
8605 if (src_align
== 0 || dest_align
== 0)
8607 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8608 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8612 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8613 dest_mem
= gen_rtx (MEM
, BLKmode
,
8614 memory_address (BLKmode
, dest_rtx
));
8615 /* There could be a void* cast on top of the object. */
8616 while (TREE_CODE (dest
) == NOP_EXPR
)
8617 dest
= TREE_OPERAND (dest
, 0);
8618 type
= TREE_TYPE (TREE_TYPE (dest
));
8619 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8620 src_mem
= gen_rtx (MEM
, BLKmode
,
8621 memory_address (BLKmode
,
8622 expand_expr (src
, NULL_RTX
,
8625 /* There could be a void* cast on top of the object. */
8626 while (TREE_CODE (src
) == NOP_EXPR
)
8627 src
= TREE_OPERAND (src
, 0);
8628 type
= TREE_TYPE (TREE_TYPE (src
));
8629 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
8631 /* Copy word part most expediently. */
8632 emit_block_move (dest_mem
, src_mem
,
8633 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8634 MIN (src_align
, dest_align
));
8635 return force_operand (dest_rtx
, NULL_RTX
);
8638 case BUILT_IN_MEMSET
:
8639 /* If not optimizing, call the library function. */
8640 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8644 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8645 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8646 || TREE_CHAIN (arglist
) == 0
8647 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8649 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8651 != (TREE_CODE (TREE_TYPE
8653 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
8657 tree dest
= TREE_VALUE (arglist
);
8658 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
8659 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8663 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8664 rtx dest_rtx
, dest_mem
;
8666 /* If DEST is not a pointer type, don't do this
8667 operation in-line. */
8668 if (dest_align
== 0)
8671 /* If VAL is not 0, don't do this operation in-line. */
8672 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
8675 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8676 dest_mem
= gen_rtx (MEM
, BLKmode
,
8677 memory_address (BLKmode
, dest_rtx
));
8678 /* There could be a void* cast on top of the object. */
8679 while (TREE_CODE (dest
) == NOP_EXPR
)
8680 dest
= TREE_OPERAND (dest
, 0);
8681 type
= TREE_TYPE (TREE_TYPE (dest
));
8682 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8684 clear_storage (dest_mem
, expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8687 return force_operand (dest_rtx
, NULL_RTX
);
8690 /* These comparison functions need an instruction that returns an actual
8691 index. An ordinary compare that just sets the condition codes
8693 #ifdef HAVE_cmpstrsi
8694 case BUILT_IN_STRCMP
:
8695 /* If not optimizing, call the library function. */
8696 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8700 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8701 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8702 || TREE_CHAIN (arglist
) == 0
8703 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8705 else if (!HAVE_cmpstrsi
)
8708 tree arg1
= TREE_VALUE (arglist
);
8709 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8713 len
= c_strlen (arg1
);
8715 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
8716 len2
= c_strlen (arg2
);
8718 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
8720 /* If we don't have a constant length for the first, use the length
8721 of the second, if we know it. We don't require a constant for
8722 this case; some cost analysis could be done if both are available
8723 but neither is constant. For now, assume they're equally cheap.
8725 If both strings have constant lengths, use the smaller. This
8726 could arise if optimization results in strcpy being called with
8727 two fixed strings, or if the code was machine-generated. We should
8728 add some code to the `memcmp' handler below to deal with such
8729 situations, someday. */
8730 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
8737 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
8739 if (tree_int_cst_lt (len2
, len
))
8743 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8747 case BUILT_IN_MEMCMP
:
8748 /* If not optimizing, call the library function. */
8749 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8753 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8754 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8755 || TREE_CHAIN (arglist
) == 0
8756 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8757 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8758 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8760 else if (!HAVE_cmpstrsi
)
8763 tree arg1
= TREE_VALUE (arglist
);
8764 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8765 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8769 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8771 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8772 enum machine_mode insn_mode
8773 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
8775 /* If we don't have POINTER_TYPE, call the function. */
8776 if (arg1_align
== 0 || arg2_align
== 0)
8778 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
8779 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8783 /* Make a place to write the result of the instruction. */
8786 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
8787 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8788 result
= gen_reg_rtx (insn_mode
);
8790 emit_insn (gen_cmpstrsi (result
,
8791 gen_rtx (MEM
, BLKmode
,
8792 expand_expr (arg1
, NULL_RTX
,
8795 gen_rtx (MEM
, BLKmode
,
8796 expand_expr (arg2
, NULL_RTX
,
8799 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8800 GEN_INT (MIN (arg1_align
, arg2_align
))));
8802 /* Return the value in the proper mode for this function. */
8803 mode
= TYPE_MODE (TREE_TYPE (exp
));
8804 if (GET_MODE (result
) == mode
)
8806 else if (target
!= 0)
8808 convert_move (target
, result
, 0);
8812 return convert_to_mode (mode
, result
, 0);
8815 case BUILT_IN_STRCMP
:
8816 case BUILT_IN_MEMCMP
:
8820 /* __builtin_setjmp is passed a pointer to an array of five words
8821 (not all will be used on all machines). It operates similarly to
8822 the C library function of the same name, but is more efficient.
8823 Much of the code below (and for longjmp) is copied from the handling
8826 NOTE: This is intended for use by GNAT and will only work in
8827 the method used by it. This code will likely NOT survive to
8828 the GCC 2.8.0 release. */
8829 case BUILT_IN_SETJMP
:
8831 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8835 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
8837 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
8838 enum machine_mode sa_mode
= Pmode
;
8840 int old_inhibit_defer_pop
= inhibit_defer_pop
;
8841 int return_pops
= RETURN_POPS_ARGS (get_identifier ("__dummy"),
8842 get_identifier ("__dummy"), 0);
8844 CUMULATIVE_ARGS args_so_far
;
8847 #ifdef POINTERS_EXTEND_UNSIGNED
8848 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
8851 buf_addr
= force_reg (Pmode
, buf_addr
);
8853 if (target
== 0 || GET_CODE (target
) != REG
8854 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
8855 target
= gen_reg_rtx (value_mode
);
8859 CONST_CALL_P (emit_note (NULL_PTR
, NOTE_INSN_SETJMP
)) = 1;
8860 current_function_calls_setjmp
= 1;
8862 /* We store the frame pointer and the address of lab1 in the buffer
8863 and use the rest of it for the stack save area, which is
8864 machine-dependent. */
8865 emit_move_insn (gen_rtx (MEM
, Pmode
, buf_addr
),
8866 virtual_stack_vars_rtx
);
8868 (validize_mem (gen_rtx (MEM
, Pmode
,
8869 plus_constant (buf_addr
,
8870 GET_MODE_SIZE (Pmode
)))),
8871 gen_rtx (LABEL_REF
, Pmode
, lab1
));
8873 #ifdef HAVE_save_stack_nonlocal
8874 if (HAVE_save_stack_nonlocal
)
8875 sa_mode
= insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0];
8878 stack_save
= gen_rtx (MEM
, sa_mode
,
8879 plus_constant (buf_addr
,
8880 2 * GET_MODE_SIZE (Pmode
)));
8881 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
8885 emit_insn (gen_setjmp ());
8888 /* Set TARGET to zero and branch around the other case. */
8889 emit_move_insn (target
, const0_rtx
);
8890 emit_jump_insn (gen_jump (lab2
));
8894 /* Note that setjmp clobbers FP when we get here, so we have to
8895 make sure it's marked as used by this function. */
8896 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
8898 /* Mark the static chain as clobbered here so life information
8899 doesn't get messed up for it. */
8900 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, static_chain_rtx
));
8902 /* Now put in the code to restore the frame pointer, and argument
8903 pointer, if needed. The code below is from expand_end_bindings
8904 in stmt.c; see detailed documentation there. */
8905 #ifdef HAVE_nonlocal_goto
8906 if (! HAVE_nonlocal_goto
)
8908 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
8910 current_function_has_nonlocal_goto
= 1;
8912 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8913 if (fixed_regs
[ARG_POINTER_REGNUM
])
8915 #ifdef ELIMINABLE_REGS
8916 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
8918 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
8919 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
8920 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
8923 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
8926 /* Now restore our arg pointer from the address at which it
8927 was saved in our stack frame.
8928 If there hasn't be space allocated for it yet, make
8930 if (arg_pointer_save_area
== 0)
8931 arg_pointer_save_area
8932 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8933 emit_move_insn (virtual_incoming_args_rtx
,
8934 copy_to_reg (arg_pointer_save_area
));
8939 #ifdef HAVE_nonlocal_goto_receiver
8940 if (HAVE_nonlocal_goto_receiver
)
8941 emit_insn (gen_nonlocal_goto_receiver ());
8943 /* The static chain pointer contains the address of dummy function.
8944 We need to call it here to handle some PIC cases of restoring
8945 a global pointer. Then return 1. */
8946 op0
= copy_to_mode_reg (Pmode
, static_chain_rtx
);
8948 /* We can't actually call emit_library_call here, so do everything
8949 it does, which isn't much for a libfunc with no args. */
8950 op0
= memory_address (FUNCTION_MODE
, op0
);
8952 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
,
8953 gen_rtx (SYMBOL_REF
, Pmode
, "__dummy"), 1);
8954 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1);
8956 #ifndef ACCUMULATE_OUTGOING_ARGS
8957 #ifdef HAVE_call_pop
8959 emit_call_insn (gen_call_pop (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8960 const0_rtx
, next_arg_reg
,
8961 GEN_INT (return_pops
)));
8968 emit_call_insn (gen_call (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8969 const0_rtx
, next_arg_reg
, const0_rtx
));
8974 emit_move_insn (target
, const1_rtx
);
8979 /* __builtin_longjmp is passed a pointer to an array of five words
8980 and a value, which is a dummy. It's similar to the C library longjmp
8981 function but works with __builtin_setjmp above. */
8982 case BUILT_IN_LONGJMP
:
8983 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
8984 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8988 tree dummy_id
= get_identifier ("__dummy");
8989 tree dummy_type
= build_function_type (void_type_node
, NULL_TREE
);
8990 tree dummy_decl
= build_decl (FUNCTION_DECL
, dummy_id
, dummy_type
);
8991 #ifdef POINTERS_EXTEND_UNSIGNED
8994 convert_memory_address
8996 expand_expr (TREE_VALUE (arglist
),
8997 NULL_RTX
, VOIDmode
, 0)));
9000 = force_reg (Pmode
, expand_expr (TREE_VALUE (arglist
),
9004 rtx fp
= gen_rtx (MEM
, Pmode
, buf_addr
);
9005 rtx lab
= gen_rtx (MEM
, Pmode
,
9006 plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
)));
9007 enum machine_mode sa_mode
9008 #ifdef HAVE_save_stack_nonlocal
9009 = (HAVE_save_stack_nonlocal
9010 ? insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0]
9015 rtx stack
= gen_rtx (MEM
, sa_mode
,
9016 plus_constant (buf_addr
,
9017 2 * GET_MODE_SIZE (Pmode
)));
9019 DECL_EXTERNAL (dummy_decl
) = 1;
9020 TREE_PUBLIC (dummy_decl
) = 1;
9021 make_decl_rtl (dummy_decl
, NULL_PTR
, 1);
9023 /* Expand the second expression just for side-effects. */
9024 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
9025 const0_rtx
, VOIDmode
, 0);
9027 assemble_external (dummy_decl
);
9029 /* Pick up FP, label, and SP from the block and jump. This code is
9030 from expand_goto in stmt.c; see there for detailed comments. */
9031 #if HAVE_nonlocal_goto
9032 if (HAVE_nonlocal_goto
)
9033 emit_insn (gen_nonlocal_goto (fp
, lab
, stack
,
9034 XEXP (DECL_RTL (dummy_decl
), 0)));
9038 lab
= copy_to_reg (lab
);
9039 emit_move_insn (hard_frame_pointer_rtx
, fp
);
9040 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
9042 /* Put in the static chain register the address of the dummy
9044 emit_move_insn (static_chain_rtx
, XEXP (DECL_RTL (dummy_decl
), 0));
9045 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
9046 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
9047 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
9048 emit_indirect_jump (lab
);
9054 default: /* just do library call, if unknown builtin */
9055 error ("built-in function `%s' not currently supported",
9056 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9059 /* The switch statement above can drop through to cause the function
9060 to be called normally. */
9062 return expand_call (exp
, target
, ignore
);
9065 /* Built-in functions to perform an untyped call and return. */
9067 /* For each register that may be used for calling a function, this
9068 gives a mode used to copy the register's value. VOIDmode indicates
9069 the register is not used for calling a function. If the machine
9070 has register windows, this gives only the outbound registers.
9071 INCOMING_REGNO gives the corresponding inbound register. */
9072 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
9074 /* For each register that may be used for returning values, this gives
9075 a mode used to copy the register's value. VOIDmode indicates the
9076 register is not used for returning values. If the machine has
9077 register windows, this gives only the outbound registers.
9078 INCOMING_REGNO gives the corresponding inbound register. */
9079 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
9081 /* For each register that may be used for calling a function, this
9082 gives the offset of that register into the block returned by
9083 __builtin_apply_args. 0 indicates that the register is not
9084 used for calling a function. */
9085 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
9087 /* Return the offset of register REGNO into the block returned by
9088 __builtin_apply_args. This is not declared static, since it is
9089 needed in objc-act.c. */
9092 apply_args_register_offset (regno
)
9097 /* Arguments are always put in outgoing registers (in the argument
9098 block) if such make sense. */
9099 #ifdef OUTGOING_REGNO
9100 regno
= OUTGOING_REGNO(regno
);
9102 return apply_args_reg_offset
[regno
];
9105 /* Return the size required for the block returned by __builtin_apply_args,
9106 and initialize apply_args_mode. */
9111 static int size
= -1;
9113 enum machine_mode mode
;
9115 /* The values computed by this function never change. */
9118 /* The first value is the incoming arg-pointer. */
9119 size
= GET_MODE_SIZE (Pmode
);
9121 /* The second value is the structure value address unless this is
9122 passed as an "invisible" first argument. */
9123 if (struct_value_rtx
)
9124 size
+= GET_MODE_SIZE (Pmode
);
9126 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9127 if (FUNCTION_ARG_REGNO_P (regno
))
9129 /* Search for the proper mode for copying this register's
9130 value. I'm not sure this is right, but it works so far. */
9131 enum machine_mode best_mode
= VOIDmode
;
9133 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9135 mode
= GET_MODE_WIDER_MODE (mode
))
9136 if (HARD_REGNO_MODE_OK (regno
, mode
)
9137 && HARD_REGNO_NREGS (regno
, mode
) == 1)
9140 if (best_mode
== VOIDmode
)
9141 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9143 mode
= GET_MODE_WIDER_MODE (mode
))
9144 if (HARD_REGNO_MODE_OK (regno
, mode
)
9145 && (mov_optab
->handlers
[(int) mode
].insn_code
9146 != CODE_FOR_nothing
))
9150 if (mode
== VOIDmode
)
9153 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9154 if (size
% align
!= 0)
9155 size
= CEIL (size
, align
) * align
;
9156 apply_args_reg_offset
[regno
] = size
;
9157 size
+= GET_MODE_SIZE (mode
);
9158 apply_args_mode
[regno
] = mode
;
9162 apply_args_mode
[regno
] = VOIDmode
;
9163 apply_args_reg_offset
[regno
] = 0;
9169 /* Return the size required for the block returned by __builtin_apply,
9170 and initialize apply_result_mode. */
9173 apply_result_size ()
9175 static int size
= -1;
9177 enum machine_mode mode
;
9179 /* The values computed by this function never change. */
9184 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9185 if (FUNCTION_VALUE_REGNO_P (regno
))
9187 /* Search for the proper mode for copying this register's
9188 value. I'm not sure this is right, but it works so far. */
9189 enum machine_mode best_mode
= VOIDmode
;
9191 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9193 mode
= GET_MODE_WIDER_MODE (mode
))
9194 if (HARD_REGNO_MODE_OK (regno
, mode
))
9197 if (best_mode
== VOIDmode
)
9198 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9200 mode
= GET_MODE_WIDER_MODE (mode
))
9201 if (HARD_REGNO_MODE_OK (regno
, mode
)
9202 && (mov_optab
->handlers
[(int) mode
].insn_code
9203 != CODE_FOR_nothing
))
9207 if (mode
== VOIDmode
)
9210 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9211 if (size
% align
!= 0)
9212 size
= CEIL (size
, align
) * align
;
9213 size
+= GET_MODE_SIZE (mode
);
9214 apply_result_mode
[regno
] = mode
;
9217 apply_result_mode
[regno
] = VOIDmode
;
9219 /* Allow targets that use untyped_call and untyped_return to override
9220 the size so that machine-specific information can be stored here. */
9221 #ifdef APPLY_RESULT_SIZE
9222 size
= APPLY_RESULT_SIZE
;
9228 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9229 /* Create a vector describing the result block RESULT. If SAVEP is true,
9230 the result block is used to save the values; otherwise it is used to
9231 restore the values. */
9234 result_vector (savep
, result
)
9238 int regno
, size
, align
, nelts
;
9239 enum machine_mode mode
;
9241 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9244 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9245 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9247 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9248 if (size
% align
!= 0)
9249 size
= CEIL (size
, align
) * align
;
9250 reg
= gen_rtx (REG
, mode
, savep
? regno
: INCOMING_REGNO (regno
));
9251 mem
= change_address (result
, mode
,
9252 plus_constant (XEXP (result
, 0), size
));
9253 savevec
[nelts
++] = (savep
9254 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
9255 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
9256 size
+= GET_MODE_SIZE (mode
);
9258 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9260 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9262 /* Save the state required to perform an untyped call with the same
9263 arguments as were passed to the current function. */
9266 expand_builtin_apply_args ()
9269 int size
, align
, regno
;
9270 enum machine_mode mode
;
9272 /* Create a block where the arg-pointer, structure value address,
9273 and argument registers can be saved. */
9274 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9276 /* Walk past the arg-pointer and structure value address. */
9277 size
= GET_MODE_SIZE (Pmode
);
9278 if (struct_value_rtx
)
9279 size
+= GET_MODE_SIZE (Pmode
);
9281 /* Save each register used in calling a function to the block. */
9282 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9283 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9287 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9288 if (size
% align
!= 0)
9289 size
= CEIL (size
, align
) * align
;
9291 tem
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9294 /* For reg-stack.c's stack register household.
9295 Compare with a similar piece of code in function.c. */
9297 emit_insn (gen_rtx (USE
, mode
, tem
));
9300 emit_move_insn (change_address (registers
, mode
,
9301 plus_constant (XEXP (registers
, 0),
9304 size
+= GET_MODE_SIZE (mode
);
9307 /* Save the arg pointer to the block. */
9308 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9309 copy_to_reg (virtual_incoming_args_rtx
));
9310 size
= GET_MODE_SIZE (Pmode
);
9312 /* Save the structure value address unless this is passed as an
9313 "invisible" first argument. */
9314 if (struct_value_incoming_rtx
)
9316 emit_move_insn (change_address (registers
, Pmode
,
9317 plus_constant (XEXP (registers
, 0),
9319 copy_to_reg (struct_value_incoming_rtx
));
9320 size
+= GET_MODE_SIZE (Pmode
);
9323 /* Return the address of the block. */
9324 return copy_addr_to_reg (XEXP (registers
, 0));
9327 /* Perform an untyped call and save the state required to perform an
9328 untyped return of whatever value was returned by the given function. */
9331 expand_builtin_apply (function
, arguments
, argsize
)
9332 rtx function
, arguments
, argsize
;
9334 int size
, align
, regno
;
9335 enum machine_mode mode
;
9336 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9337 rtx old_stack_level
= 0;
9338 rtx call_fusage
= 0;
9340 /* Create a block where the return registers can be saved. */
9341 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9343 /* ??? The argsize value should be adjusted here. */
9345 /* Fetch the arg pointer from the ARGUMENTS block. */
9346 incoming_args
= gen_reg_rtx (Pmode
);
9347 emit_move_insn (incoming_args
,
9348 gen_rtx (MEM
, Pmode
, arguments
));
9349 #ifndef STACK_GROWS_DOWNWARD
9350 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9351 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9354 /* Perform postincrements before actually calling the function. */
9357 /* Push a new argument block and copy the arguments. */
9358 do_pending_stack_adjust ();
9359 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9361 /* Push a block of memory onto the stack to store the memory arguments.
9362 Save the address in a register, and copy the memory arguments. ??? I
9363 haven't figured out how the calling convention macros effect this,
9364 but it's likely that the source and/or destination addresses in
9365 the block copy will need updating in machine specific ways. */
9366 dest
= copy_addr_to_reg (push_block (argsize
, 0, 0));
9367 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
9368 gen_rtx (MEM
, BLKmode
, incoming_args
),
9370 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9372 /* Refer to the argument block. */
9374 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
9376 /* Walk past the arg-pointer and structure value address. */
9377 size
= GET_MODE_SIZE (Pmode
);
9378 if (struct_value_rtx
)
9379 size
+= GET_MODE_SIZE (Pmode
);
9381 /* Restore each of the registers previously saved. Make USE insns
9382 for each of these registers for use in making the call. */
9383 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9384 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9386 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9387 if (size
% align
!= 0)
9388 size
= CEIL (size
, align
) * align
;
9389 reg
= gen_rtx (REG
, mode
, regno
);
9390 emit_move_insn (reg
,
9391 change_address (arguments
, mode
,
9392 plus_constant (XEXP (arguments
, 0),
9395 use_reg (&call_fusage
, reg
);
9396 size
+= GET_MODE_SIZE (mode
);
9399 /* Restore the structure value address unless this is passed as an
9400 "invisible" first argument. */
9401 size
= GET_MODE_SIZE (Pmode
);
9402 if (struct_value_rtx
)
9404 rtx value
= gen_reg_rtx (Pmode
);
9405 emit_move_insn (value
,
9406 change_address (arguments
, Pmode
,
9407 plus_constant (XEXP (arguments
, 0),
9409 emit_move_insn (struct_value_rtx
, value
);
9410 if (GET_CODE (struct_value_rtx
) == REG
)
9411 use_reg (&call_fusage
, struct_value_rtx
);
9412 size
+= GET_MODE_SIZE (Pmode
);
9415 /* All arguments and registers used for the call are set up by now! */
9416 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9418 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9419 and we don't want to load it into a register as an optimization,
9420 because prepare_call_address already did it if it should be done. */
9421 if (GET_CODE (function
) != SYMBOL_REF
)
9422 function
= memory_address (FUNCTION_MODE
, function
);
9424 /* Generate the actual call instruction and save the return value. */
9425 #ifdef HAVE_untyped_call
9426 if (HAVE_untyped_call
)
9427 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
9428 result
, result_vector (1, result
)));
9431 #ifdef HAVE_call_value
9432 if (HAVE_call_value
)
9436 /* Locate the unique return register. It is not possible to
9437 express a call that sets more than one return register using
9438 call_value; use untyped_call for that. In fact, untyped_call
9439 only needs to save the return registers in the given block. */
9440 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9441 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9444 abort (); /* HAVE_untyped_call required. */
9445 valreg
= gen_rtx (REG
, mode
, regno
);
9448 emit_call_insn (gen_call_value (valreg
,
9449 gen_rtx (MEM
, FUNCTION_MODE
, function
),
9450 const0_rtx
, NULL_RTX
, const0_rtx
));
9452 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9460 /* Find the CALL insn we just emitted. */
9461 for (call_insn
= get_last_insn ();
9462 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9463 call_insn
= PREV_INSN (call_insn
))
9469 /* Put the register usage information on the CALL. If there is already
9470 some usage information, put ours at the end. */
9471 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9475 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9476 link
= XEXP (link
, 1))
9479 XEXP (link
, 1) = call_fusage
;
9482 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9484 /* Restore the stack. */
9485 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9487 /* Return the address of the result block. */
9488 return copy_addr_to_reg (XEXP (result
, 0));
9491 /* Perform an untyped return. */
9494 expand_builtin_return (result
)
9497 int size
, align
, regno
;
9498 enum machine_mode mode
;
9500 rtx call_fusage
= 0;
9502 apply_result_size ();
9503 result
= gen_rtx (MEM
, BLKmode
, result
);
9505 #ifdef HAVE_untyped_return
9506 if (HAVE_untyped_return
)
9508 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9514 /* Restore the return value and note that each value is used. */
9516 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9517 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9519 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9520 if (size
% align
!= 0)
9521 size
= CEIL (size
, align
) * align
;
9522 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9523 emit_move_insn (reg
,
9524 change_address (result
, mode
,
9525 plus_constant (XEXP (result
, 0),
9528 push_to_sequence (call_fusage
);
9529 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
9530 call_fusage
= get_insns ();
9532 size
+= GET_MODE_SIZE (mode
);
9535 /* Put the USE insns before the return. */
9536 emit_insns (call_fusage
);
9538 /* Return whatever values was restored by jumping directly to the end
9540 expand_null_return ();
9543 /* Expand code for a post- or pre- increment or decrement
9544 and return the RTX for the result.
9545 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9548 expand_increment (exp
, post
, ignore
)
9552 register rtx op0
, op1
;
9553 register rtx temp
, value
;
9554 register tree incremented
= TREE_OPERAND (exp
, 0);
9555 optab this_optab
= add_optab
;
9557 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9558 int op0_is_copy
= 0;
9559 int single_insn
= 0;
9560 /* 1 means we can't store into OP0 directly,
9561 because it is a subreg narrower than a word,
9562 and we don't dare clobber the rest of the word. */
9565 if (output_bytecode
)
9567 bc_expand_expr (exp
);
9571 /* Stabilize any component ref that might need to be
9572 evaluated more than once below. */
9574 || TREE_CODE (incremented
) == BIT_FIELD_REF
9575 || (TREE_CODE (incremented
) == COMPONENT_REF
9576 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9577 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9578 incremented
= stabilize_reference (incremented
);
9579 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9580 ones into save exprs so that they don't accidentally get evaluated
9581 more than once by the code below. */
9582 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9583 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9584 incremented
= save_expr (incremented
);
9586 /* Compute the operands as RTX.
9587 Note whether OP0 is the actual lvalue or a copy of it:
9588 I believe it is a copy iff it is a register or subreg
9589 and insns were generated in computing it. */
9591 temp
= get_last_insn ();
9592 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9594 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9595 in place but instead must do sign- or zero-extension during assignment,
9596 so we copy it into a new register and let the code below use it as
9599 Note that we can safely modify this SUBREG since it is know not to be
9600 shared (it was made by the expand_expr call above). */
9602 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9605 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9609 else if (GET_CODE (op0
) == SUBREG
9610 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9612 /* We cannot increment this SUBREG in place. If we are
9613 post-incrementing, get a copy of the old value. Otherwise,
9614 just mark that we cannot increment in place. */
9616 op0
= copy_to_reg (op0
);
9621 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9622 && temp
!= get_last_insn ());
9623 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9625 /* Decide whether incrementing or decrementing. */
9626 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9627 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9628 this_optab
= sub_optab
;
9630 /* Convert decrement by a constant into a negative increment. */
9631 if (this_optab
== sub_optab
9632 && GET_CODE (op1
) == CONST_INT
)
9634 op1
= GEN_INT (- INTVAL (op1
));
9635 this_optab
= add_optab
;
9638 /* For a preincrement, see if we can do this with a single instruction. */
9641 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9642 if (icode
!= (int) CODE_FOR_nothing
9643 /* Make sure that OP0 is valid for operands 0 and 1
9644 of the insn we want to queue. */
9645 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9646 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9647 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9651 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9652 then we cannot just increment OP0. We must therefore contrive to
9653 increment the original value. Then, for postincrement, we can return
9654 OP0 since it is a copy of the old value. For preincrement, expand here
9655 unless we can do it with a single insn.
9657 Likewise if storing directly into OP0 would clobber high bits
9658 we need to preserve (bad_subreg). */
9659 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9661 /* This is the easiest way to increment the value wherever it is.
9662 Problems with multiple evaluation of INCREMENTED are prevented
9663 because either (1) it is a component_ref or preincrement,
9664 in which case it was stabilized above, or (2) it is an array_ref
9665 with constant index in an array in a register, which is
9666 safe to reevaluate. */
9667 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9668 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9669 ? MINUS_EXPR
: PLUS_EXPR
),
9672 TREE_OPERAND (exp
, 1));
9674 while (TREE_CODE (incremented
) == NOP_EXPR
9675 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9677 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9678 incremented
= TREE_OPERAND (incremented
, 0);
9681 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9682 return post
? op0
: temp
;
9687 /* We have a true reference to the value in OP0.
9688 If there is an insn to add or subtract in this mode, queue it.
9689 Queueing the increment insn avoids the register shuffling
9690 that often results if we must increment now and first save
9691 the old value for subsequent use. */
9693 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9694 op0
= stabilize (op0
);
9697 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9698 if (icode
!= (int) CODE_FOR_nothing
9699 /* Make sure that OP0 is valid for operands 0 and 1
9700 of the insn we want to queue. */
9701 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9702 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
9704 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9705 op1
= force_reg (mode
, op1
);
9707 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9709 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9711 rtx addr
= force_reg (Pmode
, XEXP (op0
, 0));
9714 op0
= change_address (op0
, VOIDmode
, addr
);
9715 temp
= force_reg (GET_MODE (op0
), op0
);
9716 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9717 op1
= force_reg (mode
, op1
);
9719 /* The increment queue is LIFO, thus we have to `queue'
9720 the instructions in reverse order. */
9721 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9722 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9727 /* Preincrement, or we can't increment with one simple insn. */
9729 /* Save a copy of the value before inc or dec, to return it later. */
9730 temp
= value
= copy_to_reg (op0
);
9732 /* Arrange to return the incremented value. */
9733 /* Copy the rtx because expand_binop will protect from the queue,
9734 and the results of that would be invalid for us to return
9735 if our caller does emit_queue before using our result. */
9736 temp
= copy_rtx (value
= op0
);
9738 /* Increment however we can. */
9739 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9740 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9741 /* Make sure the value is stored into OP0. */
9743 emit_move_insn (op0
, op1
);
9748 /* Expand all function calls contained within EXP, innermost ones first.
9749 But don't look within expressions that have sequence points.
9750 For each CALL_EXPR, record the rtx for its value
9751 in the CALL_EXPR_RTL field. */
9754 preexpand_calls (exp
)
9757 register int nops
, i
;
9758 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9760 if (! do_preexpand_calls
)
9763 /* Only expressions and references can contain calls. */
9765 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9768 switch (TREE_CODE (exp
))
9771 /* Do nothing if already expanded. */
9772 if (CALL_EXPR_RTL (exp
) != 0
9773 /* Do nothing if the call returns a variable-sized object. */
9774 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
9775 /* Do nothing to built-in functions. */
9776 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9777 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9779 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9782 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9787 case TRUTH_ANDIF_EXPR
:
9788 case TRUTH_ORIF_EXPR
:
9789 /* If we find one of these, then we can be sure
9790 the adjust will be done for it (since it makes jumps).
9791 Do it now, so that if this is inside an argument
9792 of a function, we don't get the stack adjustment
9793 after some other args have already been pushed. */
9794 do_pending_stack_adjust ();
9799 case WITH_CLEANUP_EXPR
:
9800 case CLEANUP_POINT_EXPR
:
9804 if (SAVE_EXPR_RTL (exp
) != 0)
9808 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9809 for (i
= 0; i
< nops
; i
++)
9810 if (TREE_OPERAND (exp
, i
) != 0)
9812 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9813 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9815 preexpand_calls (TREE_OPERAND (exp
, i
));
9819 /* At the start of a function, record that we have no previously-pushed
9820 arguments waiting to be popped. */
9823 init_pending_stack_adjust ()
9825 pending_stack_adjust
= 0;
9828 /* When exiting from function, if safe, clear out any pending stack adjust
9829 so the adjustment won't get done. */
9832 clear_pending_stack_adjust ()
9834 #ifdef EXIT_IGNORE_STACK
9836 && ! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
9837 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9838 && ! flag_inline_functions
)
9839 pending_stack_adjust
= 0;
9843 /* Pop any previously-pushed arguments that have not been popped yet. */
9846 do_pending_stack_adjust ()
9848 if (inhibit_defer_pop
== 0)
9850 if (pending_stack_adjust
!= 0)
9851 adjust_stack (GEN_INT (pending_stack_adjust
));
9852 pending_stack_adjust
= 0;
9856 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9857 Returns the cleanups to be performed. */
9860 defer_cleanups_to (old_cleanups
)
9863 tree new_cleanups
= NULL_TREE
;
9864 tree cleanups
= cleanups_this_call
;
9865 tree last
= NULL_TREE
;
9867 while (cleanups_this_call
!= old_cleanups
)
9869 expand_eh_region_end (TREE_VALUE (cleanups_this_call
));
9870 last
= cleanups_this_call
;
9871 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
9876 /* Remove the list from the chain of cleanups. */
9877 TREE_CHAIN (last
) = NULL_TREE
;
9879 /* reverse them so that we can build them in the right order. */
9880 cleanups
= nreverse (cleanups
);
9882 /* All cleanups must be on the function_obstack. */
9883 push_obstacks_nochange ();
9884 resume_temporary_allocation ();
9889 new_cleanups
= build (COMPOUND_EXPR
, TREE_TYPE (new_cleanups
),
9890 TREE_VALUE (cleanups
), new_cleanups
);
9892 new_cleanups
= TREE_VALUE (cleanups
);
9894 cleanups
= TREE_CHAIN (cleanups
);
9900 return new_cleanups
;
9903 /* Expand all cleanups up to OLD_CLEANUPS.
9904 Needed here, and also for language-dependent calls. */
9907 expand_cleanups_to (old_cleanups
)
9910 while (cleanups_this_call
!= old_cleanups
)
9912 expand_eh_region_end (TREE_VALUE (cleanups_this_call
));
9913 expand_expr (TREE_VALUE (cleanups_this_call
), const0_rtx
, VOIDmode
, 0);
9914 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
9918 /* Expand conditional expressions. */
9920 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9921 LABEL is an rtx of code CODE_LABEL, in this function and all the
9925 jumpifnot (exp
, label
)
9929 do_jump (exp
, label
, NULL_RTX
);
9932 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9939 do_jump (exp
, NULL_RTX
, label
);
9942 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9943 the result is zero, or IF_TRUE_LABEL if the result is one.
9944 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9945 meaning fall through in that case.
9947 do_jump always does any pending stack adjust except when it does not
9948 actually perform a jump. An example where there is no jump
9949 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9951 This function is responsible for optimizing cases such as
9952 &&, || and comparison operators in EXP. */
9955 do_jump (exp
, if_false_label
, if_true_label
)
9957 rtx if_false_label
, if_true_label
;
9959 register enum tree_code code
= TREE_CODE (exp
);
9960 /* Some cases need to create a label to jump to
9961 in order to properly fall through.
9962 These cases set DROP_THROUGH_LABEL nonzero. */
9963 rtx drop_through_label
= 0;
9968 enum machine_mode mode
;
9978 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9984 /* This is not true with #pragma weak */
9986 /* The address of something can never be zero. */
9988 emit_jump (if_true_label
);
9993 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9994 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9995 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9998 /* If we are narrowing the operand, we have to do the compare in the
10000 if ((TYPE_PRECISION (TREE_TYPE (exp
))
10001 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10003 case NON_LVALUE_EXPR
:
10004 case REFERENCE_EXPR
:
10009 /* These cannot change zero->non-zero or vice versa. */
10010 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10014 /* This is never less insns than evaluating the PLUS_EXPR followed by
10015 a test and can be longer if the test is eliminated. */
10017 /* Reduce to minus. */
10018 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
10019 TREE_OPERAND (exp
, 0),
10020 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
10021 TREE_OPERAND (exp
, 1))));
10022 /* Process as MINUS. */
10026 /* Non-zero iff operands of minus differ. */
10027 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
10028 TREE_OPERAND (exp
, 0),
10029 TREE_OPERAND (exp
, 1)),
10034 /* If we are AND'ing with a small constant, do this comparison in the
10035 smallest type that fits. If the machine doesn't have comparisons
10036 that small, it will be converted back to the wider comparison.
10037 This helps if we are testing the sign bit of a narrower object.
10038 combine can't do this for us because it can't know whether a
10039 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10041 if (! SLOW_BYTE_ACCESS
10042 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
10043 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
10044 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
10045 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
10046 && (type
= type_for_mode (mode
, 1)) != 0
10047 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10048 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10049 != CODE_FOR_nothing
))
10051 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10056 case TRUTH_NOT_EXPR
:
10057 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10060 case TRUTH_ANDIF_EXPR
:
10063 tree cleanups
, old_cleanups
;
10065 if (if_false_label
== 0)
10066 if_false_label
= drop_through_label
= gen_label_rtx ();
10068 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
10069 seq1
= get_insns ();
10072 old_cleanups
= cleanups_this_call
;
10074 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10075 seq2
= get_insns ();
10076 cleanups
= defer_cleanups_to (old_cleanups
);
10081 rtx flag
= gen_reg_rtx (word_mode
);
10085 /* Flag cleanups as not needed. */
10086 emit_move_insn (flag
, const0_rtx
);
10089 /* Flag cleanups as needed. */
10090 emit_move_insn (flag
, const1_rtx
);
10093 /* All cleanups must be on the function_obstack. */
10094 push_obstacks_nochange ();
10095 resume_temporary_allocation ();
10097 /* convert flag, which is an rtx, into a tree. */
10098 cond
= make_node (RTL_EXPR
);
10099 TREE_TYPE (cond
) = integer_type_node
;
10100 RTL_EXPR_RTL (cond
) = flag
;
10101 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
10102 cond
= save_expr (cond
);
10104 new_cleanups
= build (COND_EXPR
, void_type_node
,
10105 truthvalue_conversion (cond
),
10106 cleanups
, integer_zero_node
);
10107 new_cleanups
= fold (new_cleanups
);
10111 /* Now add in the conditionalized cleanups. */
10113 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
10114 expand_eh_region_start ();
10124 case TRUTH_ORIF_EXPR
:
10127 tree cleanups
, old_cleanups
;
10129 if (if_true_label
== 0)
10130 if_true_label
= drop_through_label
= gen_label_rtx ();
10132 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
10133 seq1
= get_insns ();
10136 old_cleanups
= cleanups_this_call
;
10138 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10139 seq2
= get_insns ();
10140 cleanups
= defer_cleanups_to (old_cleanups
);
10145 rtx flag
= gen_reg_rtx (word_mode
);
10149 /* Flag cleanups as not needed. */
10150 emit_move_insn (flag
, const0_rtx
);
10153 /* Flag cleanups as needed. */
10154 emit_move_insn (flag
, const1_rtx
);
10157 /* All cleanups must be on the function_obstack. */
10158 push_obstacks_nochange ();
10159 resume_temporary_allocation ();
10161 /* convert flag, which is an rtx, into a tree. */
10162 cond
= make_node (RTL_EXPR
);
10163 TREE_TYPE (cond
) = integer_type_node
;
10164 RTL_EXPR_RTL (cond
) = flag
;
10165 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
10166 cond
= save_expr (cond
);
10168 new_cleanups
= build (COND_EXPR
, void_type_node
,
10169 truthvalue_conversion (cond
),
10170 cleanups
, integer_zero_node
);
10171 new_cleanups
= fold (new_cleanups
);
10175 /* Now add in the conditionalized cleanups. */
10177 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
10178 expand_eh_region_start ();
10188 case COMPOUND_EXPR
:
10189 push_temp_slots ();
10190 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
10191 preserve_temp_slots (NULL_RTX
);
10192 free_temp_slots ();
10195 do_pending_stack_adjust ();
10196 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10199 case COMPONENT_REF
:
10200 case BIT_FIELD_REF
:
10203 int bitsize
, bitpos
, unsignedp
;
10204 enum machine_mode mode
;
10210 /* Get description of this reference. We don't actually care
10211 about the underlying object here. */
10212 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10213 &mode
, &unsignedp
, &volatilep
,
10216 type
= type_for_size (bitsize
, unsignedp
);
10217 if (! SLOW_BYTE_ACCESS
10218 && type
!= 0 && bitsize
>= 0
10219 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10220 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10221 != CODE_FOR_nothing
))
10223 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10230 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10231 if (integer_onep (TREE_OPERAND (exp
, 1))
10232 && integer_zerop (TREE_OPERAND (exp
, 2)))
10233 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10235 else if (integer_zerop (TREE_OPERAND (exp
, 1))
10236 && integer_onep (TREE_OPERAND (exp
, 2)))
10237 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10242 tree cleanups_left_side
, cleanups_right_side
, old_cleanups
;
10244 register rtx label1
= gen_label_rtx ();
10245 drop_through_label
= gen_label_rtx ();
10247 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
10249 /* We need to save the cleanups for the lhs and rhs separately.
10250 Keep track of the cleanups seen before the lhs. */
10251 old_cleanups
= cleanups_this_call
;
10253 /* Now the THEN-expression. */
10254 do_jump (TREE_OPERAND (exp
, 1),
10255 if_false_label
? if_false_label
: drop_through_label
,
10256 if_true_label
? if_true_label
: drop_through_label
);
10257 /* In case the do_jump just above never jumps. */
10258 do_pending_stack_adjust ();
10259 emit_label (label1
);
10260 seq1
= get_insns ();
10261 /* Now grab the cleanups for the lhs. */
10262 cleanups_left_side
= defer_cleanups_to (old_cleanups
);
10265 /* And keep track of where we start before the rhs. */
10266 old_cleanups
= cleanups_this_call
;
10268 /* Now the ELSE-expression. */
10269 do_jump (TREE_OPERAND (exp
, 2),
10270 if_false_label
? if_false_label
: drop_through_label
,
10271 if_true_label
? if_true_label
: drop_through_label
);
10272 seq2
= get_insns ();
10273 /* Grab the cleanups for the rhs. */
10274 cleanups_right_side
= defer_cleanups_to (old_cleanups
);
10277 if (cleanups_left_side
|| cleanups_right_side
)
10279 /* Make the cleanups for the THEN and ELSE clauses
10280 conditional based on which half is executed. */
10281 rtx flag
= gen_reg_rtx (word_mode
);
10285 /* Set the flag to 0 so that we know we executed the lhs. */
10286 emit_move_insn (flag
, const0_rtx
);
10289 /* Set the flag to 1 so that we know we executed the rhs. */
10290 emit_move_insn (flag
, const1_rtx
);
10293 /* Make sure the cleanup lives on the function_obstack. */
10294 push_obstacks_nochange ();
10295 resume_temporary_allocation ();
10297 /* Now, build up a COND_EXPR that tests the value of the
10298 flag, and then either do the cleanups for the lhs or the
10300 cond
= make_node (RTL_EXPR
);
10301 TREE_TYPE (cond
) = integer_type_node
;
10302 RTL_EXPR_RTL (cond
) = flag
;
10303 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
10304 cond
= save_expr (cond
);
10306 new_cleanups
= build (COND_EXPR
, void_type_node
,
10307 truthvalue_conversion (cond
),
10308 cleanups_right_side
, cleanups_left_side
);
10309 new_cleanups
= fold (new_cleanups
);
10313 /* Now add in the conditionalized cleanups. */
10315 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
10316 expand_eh_region_start ();
10320 /* No cleanups were needed, so emit the two sequences
10330 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10332 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10333 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10334 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10335 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10338 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
10339 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10340 fold (build1 (REALPART_EXPR
,
10341 TREE_TYPE (inner_type
),
10342 TREE_OPERAND (exp
, 0))),
10343 fold (build1 (REALPART_EXPR
,
10344 TREE_TYPE (inner_type
),
10345 TREE_OPERAND (exp
, 1))))),
10346 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10347 fold (build1 (IMAGPART_EXPR
,
10348 TREE_TYPE (inner_type
),
10349 TREE_OPERAND (exp
, 0))),
10350 fold (build1 (IMAGPART_EXPR
,
10351 TREE_TYPE (inner_type
),
10352 TREE_OPERAND (exp
, 1))))))),
10353 if_false_label
, if_true_label
);
10354 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10355 && !can_compare_p (TYPE_MODE (inner_type
)))
10356 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
10358 comparison
= compare (exp
, EQ
, EQ
);
10364 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10366 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10367 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10368 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10369 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10372 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
10373 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10374 fold (build1 (REALPART_EXPR
,
10375 TREE_TYPE (inner_type
),
10376 TREE_OPERAND (exp
, 0))),
10377 fold (build1 (REALPART_EXPR
,
10378 TREE_TYPE (inner_type
),
10379 TREE_OPERAND (exp
, 1))))),
10380 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10381 fold (build1 (IMAGPART_EXPR
,
10382 TREE_TYPE (inner_type
),
10383 TREE_OPERAND (exp
, 0))),
10384 fold (build1 (IMAGPART_EXPR
,
10385 TREE_TYPE (inner_type
),
10386 TREE_OPERAND (exp
, 1))))))),
10387 if_false_label
, if_true_label
);
10388 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10389 && !can_compare_p (TYPE_MODE (inner_type
)))
10390 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
10392 comparison
= compare (exp
, NE
, NE
);
10397 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10399 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10400 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10402 comparison
= compare (exp
, LT
, LTU
);
10406 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10408 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10409 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10411 comparison
= compare (exp
, LE
, LEU
);
10415 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10417 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10418 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10420 comparison
= compare (exp
, GT
, GTU
);
10424 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10426 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10427 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10429 comparison
= compare (exp
, GE
, GEU
);
10434 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10436 /* This is not needed any more and causes poor code since it causes
10437 comparisons and tests from non-SI objects to have different code
10439 /* Copy to register to avoid generating bad insns by cse
10440 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10441 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10442 temp
= copy_to_reg (temp
);
10444 do_pending_stack_adjust ();
10445 if (GET_CODE (temp
) == CONST_INT
)
10446 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10447 else if (GET_CODE (temp
) == LABEL_REF
)
10448 comparison
= const_true_rtx
;
10449 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10450 && !can_compare_p (GET_MODE (temp
)))
10451 /* Note swapping the labels gives us not-equal. */
10452 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10453 else if (GET_MODE (temp
) != VOIDmode
)
10454 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10455 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10456 GET_MODE (temp
), NULL_RTX
, 0);
10461 /* Do any postincrements in the expression that was tested. */
10464 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10465 straight into a conditional jump instruction as the jump condition.
10466 Otherwise, all the work has been done already. */
10468 if (comparison
== const_true_rtx
)
10471 emit_jump (if_true_label
);
10473 else if (comparison
== const0_rtx
)
10475 if (if_false_label
)
10476 emit_jump (if_false_label
);
10478 else if (comparison
)
10479 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10481 if (drop_through_label
)
10483 /* If do_jump produces code that might be jumped around,
10484 do any stack adjusts from that code, before the place
10485 where control merges in. */
10486 do_pending_stack_adjust ();
10487 emit_label (drop_through_label
);
10491 /* Given a comparison expression EXP for values too wide to be compared
10492 with one insn, test the comparison and jump to the appropriate label.
10493 The code of EXP is ignored; we always test GT if SWAP is 0,
10494 and LT if SWAP is 1. */
10497 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10500 rtx if_false_label
, if_true_label
;
10502 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10503 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10504 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10505 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10506 rtx drop_through_label
= 0;
10507 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10510 if (! if_true_label
|| ! if_false_label
)
10511 drop_through_label
= gen_label_rtx ();
10512 if (! if_true_label
)
10513 if_true_label
= drop_through_label
;
10514 if (! if_false_label
)
10515 if_false_label
= drop_through_label
;
10517 /* Compare a word at a time, high order first. */
10518 for (i
= 0; i
< nwords
; i
++)
10521 rtx op0_word
, op1_word
;
10523 if (WORDS_BIG_ENDIAN
)
10525 op0_word
= operand_subword_force (op0
, i
, mode
);
10526 op1_word
= operand_subword_force (op1
, i
, mode
);
10530 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10531 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10534 /* All but high-order word must be compared as unsigned. */
10535 comp
= compare_from_rtx (op0_word
, op1_word
,
10536 (unsignedp
|| i
> 0) ? GTU
: GT
,
10537 unsignedp
, word_mode
, NULL_RTX
, 0);
10538 if (comp
== const_true_rtx
)
10539 emit_jump (if_true_label
);
10540 else if (comp
!= const0_rtx
)
10541 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10543 /* Consider lower words only if these are equal. */
10544 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10546 if (comp
== const_true_rtx
)
10547 emit_jump (if_false_label
);
10548 else if (comp
!= const0_rtx
)
10549 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10552 if (if_false_label
)
10553 emit_jump (if_false_label
);
10554 if (drop_through_label
)
10555 emit_label (drop_through_label
);
10558 /* Compare OP0 with OP1, word at a time, in mode MODE.
10559 UNSIGNEDP says to do unsigned comparison.
10560 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10563 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10564 enum machine_mode mode
;
10567 rtx if_false_label
, if_true_label
;
10569 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10570 rtx drop_through_label
= 0;
10573 if (! if_true_label
|| ! if_false_label
)
10574 drop_through_label
= gen_label_rtx ();
10575 if (! if_true_label
)
10576 if_true_label
= drop_through_label
;
10577 if (! if_false_label
)
10578 if_false_label
= drop_through_label
;
10580 /* Compare a word at a time, high order first. */
10581 for (i
= 0; i
< nwords
; i
++)
10584 rtx op0_word
, op1_word
;
10586 if (WORDS_BIG_ENDIAN
)
10588 op0_word
= operand_subword_force (op0
, i
, mode
);
10589 op1_word
= operand_subword_force (op1
, i
, mode
);
10593 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10594 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10597 /* All but high-order word must be compared as unsigned. */
10598 comp
= compare_from_rtx (op0_word
, op1_word
,
10599 (unsignedp
|| i
> 0) ? GTU
: GT
,
10600 unsignedp
, word_mode
, NULL_RTX
, 0);
10601 if (comp
== const_true_rtx
)
10602 emit_jump (if_true_label
);
10603 else if (comp
!= const0_rtx
)
10604 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10606 /* Consider lower words only if these are equal. */
10607 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10609 if (comp
== const_true_rtx
)
10610 emit_jump (if_false_label
);
10611 else if (comp
!= const0_rtx
)
10612 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10615 if (if_false_label
)
10616 emit_jump (if_false_label
);
10617 if (drop_through_label
)
10618 emit_label (drop_through_label
);
10621 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10622 with one insn, test the comparison and jump to the appropriate label. */
10625 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10627 rtx if_false_label
, if_true_label
;
10629 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10630 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10631 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10632 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10634 rtx drop_through_label
= 0;
10636 if (! if_false_label
)
10637 drop_through_label
= if_false_label
= gen_label_rtx ();
10639 for (i
= 0; i
< nwords
; i
++)
10641 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10642 operand_subword_force (op1
, i
, mode
),
10643 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10644 word_mode
, NULL_RTX
, 0);
10645 if (comp
== const_true_rtx
)
10646 emit_jump (if_false_label
);
10647 else if (comp
!= const0_rtx
)
10648 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10652 emit_jump (if_true_label
);
10653 if (drop_through_label
)
10654 emit_label (drop_through_label
);
10657 /* Jump according to whether OP0 is 0.
10658 We assume that OP0 has an integer mode that is too wide
10659 for the available compare insns. */
10662 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10664 rtx if_false_label
, if_true_label
;
10666 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10668 rtx drop_through_label
= 0;
10670 if (! if_false_label
)
10671 drop_through_label
= if_false_label
= gen_label_rtx ();
10673 for (i
= 0; i
< nwords
; i
++)
10675 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10677 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10678 if (comp
== const_true_rtx
)
10679 emit_jump (if_false_label
);
10680 else if (comp
!= const0_rtx
)
10681 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10685 emit_jump (if_true_label
);
10686 if (drop_through_label
)
10687 emit_label (drop_through_label
);
10690 /* Given a comparison expression in rtl form, output conditional branches to
10691 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10694 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10695 rtx comparison
, if_false_label
, if_true_label
;
10699 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10700 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10704 if (if_false_label
)
10705 emit_jump (if_false_label
);
10707 else if (if_false_label
)
10710 rtx prev
= get_last_insn ();
10713 /* Output the branch with the opposite condition. Then try to invert
10714 what is generated. If more than one insn is a branch, or if the
10715 branch is not the last insn written, abort. If we can't invert
10716 the branch, emit make a true label, redirect this jump to that,
10717 emit a jump to the false label and define the true label. */
10719 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10720 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10724 /* Here we get the first insn that was just emitted. It used to be the
10725 case that, on some machines, emitting the branch would discard
10726 the previous compare insn and emit a replacement. This isn't
10727 done anymore, but abort if we see that PREV is deleted. */
10730 insn
= get_insns ();
10731 else if (INSN_DELETED_P (prev
))
10734 insn
= NEXT_INSN (prev
);
10736 for (; insn
; insn
= NEXT_INSN (insn
))
10737 if (GET_CODE (insn
) == JUMP_INSN
)
10744 if (branch
!= get_last_insn ())
10747 JUMP_LABEL (branch
) = if_false_label
;
10748 if (! invert_jump (branch
, if_false_label
))
10750 if_true_label
= gen_label_rtx ();
10751 redirect_jump (branch
, if_true_label
);
10752 emit_jump (if_false_label
);
10753 emit_label (if_true_label
);
10758 /* Generate code for a comparison expression EXP
10759 (including code to compute the values to be compared)
10760 and set (CC0) according to the result.
10761 SIGNED_CODE should be the rtx operation for this comparison for
10762 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10764 We force a stack adjustment unless there are currently
10765 things pushed on the stack that aren't yet used. */
10768 compare (exp
, signed_code
, unsigned_code
)
10770 enum rtx_code signed_code
, unsigned_code
;
10773 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10775 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10776 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10777 register enum machine_mode mode
= TYPE_MODE (type
);
10778 int unsignedp
= TREE_UNSIGNED (type
);
10779 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10781 #ifdef HAVE_canonicalize_funcptr_for_compare
10782 /* If function pointers need to be "canonicalized" before they can
10783 be reliably compared, then canonicalize them. */
10784 if (HAVE_canonicalize_funcptr_for_compare
10785 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10786 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10789 rtx new_op0
= gen_reg_rtx (mode
);
10791 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10795 if (HAVE_canonicalize_funcptr_for_compare
10796 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10797 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10800 rtx new_op1
= gen_reg_rtx (mode
);
10802 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10807 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10809 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10810 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10813 /* Like compare but expects the values to compare as two rtx's.
10814 The decision as to signed or unsigned comparison must be made by the caller.
10816 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10819 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10820 size of MODE should be used. */
10823 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10824 register rtx op0
, op1
;
10825 enum rtx_code code
;
10827 enum machine_mode mode
;
10833 /* If one operand is constant, make it the second one. Only do this
10834 if the other operand is not constant as well. */
10836 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10837 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10842 code
= swap_condition (code
);
10845 if (flag_force_mem
)
10847 op0
= force_not_mem (op0
);
10848 op1
= force_not_mem (op1
);
10851 do_pending_stack_adjust ();
10853 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10854 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10858 /* There's no need to do this now that combine.c can eliminate lots of
10859 sign extensions. This can be less efficient in certain cases on other
10862 /* If this is a signed equality comparison, we can do it as an
10863 unsigned comparison since zero-extension is cheaper than sign
10864 extension and comparisons with zero are done as unsigned. This is
10865 the case even on machines that can do fast sign extension, since
10866 zero-extension is easier to combine with other operations than
10867 sign-extension is. If we are comparing against a constant, we must
10868 convert it to what it would look like unsigned. */
10869 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10870 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10872 if (GET_CODE (op1
) == CONST_INT
10873 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10874 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10879 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10881 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10884 /* Generate code to calculate EXP using a store-flag instruction
10885 and return an rtx for the result. EXP is either a comparison
10886 or a TRUTH_NOT_EXPR whose operand is a comparison.
10888 If TARGET is nonzero, store the result there if convenient.
10890 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10893 Return zero if there is no suitable set-flag instruction
10894 available on this machine.
10896 Once expand_expr has been called on the arguments of the comparison,
10897 we are committed to doing the store flag, since it is not safe to
10898 re-evaluate the expression. We emit the store-flag insn by calling
10899 emit_store_flag, but only expand the arguments if we have a reason
10900 to believe that emit_store_flag will be successful. If we think that
10901 it will, but it isn't, we have to simulate the store-flag with a
10902 set/jump/set sequence. */
10905 do_store_flag (exp
, target
, mode
, only_cheap
)
10908 enum machine_mode mode
;
10911 enum rtx_code code
;
10912 tree arg0
, arg1
, type
;
10914 enum machine_mode operand_mode
;
10918 enum insn_code icode
;
10919 rtx subtarget
= target
;
10920 rtx result
, label
, pattern
, jump_pat
;
10922 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10923 result at the end. We can't simply invert the test since it would
10924 have already been inverted if it were valid. This case occurs for
10925 some floating-point comparisons. */
10927 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10928 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10930 arg0
= TREE_OPERAND (exp
, 0);
10931 arg1
= TREE_OPERAND (exp
, 1);
10932 type
= TREE_TYPE (arg0
);
10933 operand_mode
= TYPE_MODE (type
);
10934 unsignedp
= TREE_UNSIGNED (type
);
10936 /* We won't bother with BLKmode store-flag operations because it would mean
10937 passing a lot of information to emit_store_flag. */
10938 if (operand_mode
== BLKmode
)
10941 /* We won't bother with store-flag operations involving function pointers
10942 when function pointers must be canonicalized before comparisons. */
10943 #ifdef HAVE_canonicalize_funcptr_for_compare
10944 if (HAVE_canonicalize_funcptr_for_compare
10945 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10946 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10948 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10949 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10950 == FUNCTION_TYPE
))))
10957 /* Get the rtx comparison code to use. We know that EXP is a comparison
10958 operation of some type. Some comparisons against 1 and -1 can be
10959 converted to comparisons with zero. Do so here so that the tests
10960 below will be aware that we have a comparison with zero. These
10961 tests will not catch constants in the first operand, but constants
10962 are rarely passed as the first operand. */
10964 switch (TREE_CODE (exp
))
10973 if (integer_onep (arg1
))
10974 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10976 code
= unsignedp
? LTU
: LT
;
10979 if (! unsignedp
&& integer_all_onesp (arg1
))
10980 arg1
= integer_zero_node
, code
= LT
;
10982 code
= unsignedp
? LEU
: LE
;
10985 if (! unsignedp
&& integer_all_onesp (arg1
))
10986 arg1
= integer_zero_node
, code
= GE
;
10988 code
= unsignedp
? GTU
: GT
;
10991 if (integer_onep (arg1
))
10992 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10994 code
= unsignedp
? GEU
: GE
;
11000 /* Put a constant second. */
11001 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
11003 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
11004 code
= swap_condition (code
);
11007 /* If this is an equality or inequality test of a single bit, we can
11008 do this by shifting the bit being tested to the low-order bit and
11009 masking the result with the constant 1. If the condition was EQ,
11010 we xor it with 1. This does not require an scc insn and is faster
11011 than an scc insn even if we have it. */
11013 if ((code
== NE
|| code
== EQ
)
11014 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
11015 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11016 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
11018 tree inner
= TREE_OPERAND (arg0
, 0);
11023 tem
= INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
11024 NULL_RTX
, VOIDmode
, 0));
11025 /* In this case, immed_double_const will sign extend the value to make
11026 it look the same on the host and target. We must remove the
11027 sign-extension before calling exact_log2, since exact_log2 will
11028 fail for negative values. */
11029 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
11030 && BITS_PER_WORD
== GET_MODE_BITSIZE (TYPE_MODE (type
)))
11031 /* We don't use the obvious constant shift to generate the mask,
11032 because that generates compiler warnings when BITS_PER_WORD is
11033 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11034 code is unreachable in that case. */
11035 tem
= tem
& GET_MODE_MASK (word_mode
);
11036 bitnum
= exact_log2 (tem
);
11038 /* If INNER is a right shift of a constant and it plus BITNUM does
11039 not overflow, adjust BITNUM and INNER. */
11041 if (TREE_CODE (inner
) == RSHIFT_EXPR
11042 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
11043 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
11044 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
11045 < TYPE_PRECISION (type
)))
11047 bitnum
+=TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
11048 inner
= TREE_OPERAND (inner
, 0);
11051 /* If we are going to be able to omit the AND below, we must do our
11052 operations as unsigned. If we must use the AND, we have a choice.
11053 Normally unsigned is faster, but for some machines signed is. */
11054 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
11055 #ifdef LOAD_EXTEND_OP
11056 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
11062 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11063 || GET_MODE (subtarget
) != operand_mode
11064 || ! safe_from_p (subtarget
, inner
))
11067 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
11070 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
11071 size_int (bitnum
), subtarget
, ops_unsignedp
);
11073 if (GET_MODE (op0
) != mode
)
11074 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
11076 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
11077 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
11078 ops_unsignedp
, OPTAB_LIB_WIDEN
);
11080 /* Put the AND last so it can combine with more things. */
11081 if (bitnum
!= TYPE_PRECISION (type
) - 1)
11082 op0
= expand_and (op0
, const1_rtx
, subtarget
);
11087 /* Now see if we are likely to be able to do this. Return if not. */
11088 if (! can_compare_p (operand_mode
))
11090 icode
= setcc_gen_code
[(int) code
];
11091 if (icode
== CODE_FOR_nothing
11092 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
11094 /* We can only do this if it is one of the special cases that
11095 can be handled without an scc insn. */
11096 if ((code
== LT
&& integer_zerop (arg1
))
11097 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
11099 else if (BRANCH_COST
>= 0
11100 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
11101 && TREE_CODE (type
) != REAL_TYPE
11102 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
11103 != CODE_FOR_nothing
)
11104 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
11105 != CODE_FOR_nothing
)))
11111 preexpand_calls (exp
);
11112 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11113 || GET_MODE (subtarget
) != operand_mode
11114 || ! safe_from_p (subtarget
, arg1
))
11117 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
11118 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
11121 target
= gen_reg_rtx (mode
);
11123 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11124 because, if the emit_store_flag does anything it will succeed and
11125 OP0 and OP1 will not be used subsequently. */
11127 result
= emit_store_flag (target
, code
,
11128 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
11129 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
11130 operand_mode
, unsignedp
, 1);
11135 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
11136 result
, 0, OPTAB_LIB_WIDEN
);
11140 /* If this failed, we have to do this with set/compare/jump/set code. */
11141 if (GET_CODE (target
) != REG
11142 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
11143 target
= gen_reg_rtx (GET_MODE (target
));
11145 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
11146 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
11147 operand_mode
, NULL_RTX
, 0);
11148 if (GET_CODE (result
) == CONST_INT
)
11149 return (((result
== const0_rtx
&& ! invert
)
11150 || (result
!= const0_rtx
&& invert
))
11151 ? const0_rtx
: const1_rtx
);
11153 label
= gen_label_rtx ();
11154 if (bcc_gen_fctn
[(int) code
] == 0)
11157 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
11158 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
11159 emit_label (label
);
11164 /* Generate a tablejump instruction (used for switch statements). */
11166 #ifdef HAVE_tablejump
11168 /* INDEX is the value being switched on, with the lowest value
11169 in the table already subtracted.
11170 MODE is its expected mode (needed if INDEX is constant).
11171 RANGE is the length of the jump table.
11172 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11174 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11175 index value is out of range. */
11178 do_tablejump (index
, mode
, range
, table_label
, default_label
)
11179 rtx index
, range
, table_label
, default_label
;
11180 enum machine_mode mode
;
11182 register rtx temp
, vector
;
11184 /* Do an unsigned comparison (in the proper mode) between the index
11185 expression and the value which represents the length of the range.
11186 Since we just finished subtracting the lower bound of the range
11187 from the index expression, this comparison allows us to simultaneously
11188 check that the original index expression value is both greater than
11189 or equal to the minimum value of the range and less than or equal to
11190 the maximum value of the range. */
11192 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
11193 emit_jump_insn (gen_bgtu (default_label
));
11195 /* If index is in range, it must fit in Pmode.
11196 Convert to Pmode so we can index with it. */
11198 index
= convert_to_mode (Pmode
, index
, 1);
11200 /* Don't let a MEM slip thru, because then INDEX that comes
11201 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11202 and break_out_memory_refs will go to work on it and mess it up. */
11203 #ifdef PIC_CASE_VECTOR_ADDRESS
11204 if (flag_pic
&& GET_CODE (index
) != REG
)
11205 index
= copy_to_mode_reg (Pmode
, index
);
11208 /* If flag_force_addr were to affect this address
11209 it could interfere with the tricky assumptions made
11210 about addresses that contain label-refs,
11211 which may be valid only very near the tablejump itself. */
11212 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11213 GET_MODE_SIZE, because this indicates how large insns are. The other
11214 uses should all be Pmode, because they are addresses. This code
11215 could fail if addresses and insns are not the same size. */
11216 index
= gen_rtx (PLUS
, Pmode
,
11217 gen_rtx (MULT
, Pmode
, index
,
11218 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
11219 gen_rtx (LABEL_REF
, Pmode
, table_label
));
11220 #ifdef PIC_CASE_VECTOR_ADDRESS
11222 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11225 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
11226 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11227 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
11228 RTX_UNCHANGING_P (vector
) = 1;
11229 convert_move (temp
, vector
, 0);
11231 emit_jump_insn (gen_tablejump (temp
, table_label
));
11233 #ifndef CASE_VECTOR_PC_RELATIVE
11234 /* If we are generating PIC code or if the table is PC-relative, the
11235 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11241 #endif /* HAVE_tablejump */
11244 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11245 to that value is on the top of the stack. The resulting type is TYPE, and
11246 the source declaration is DECL. */
11249 bc_load_memory (type
, decl
)
11252 enum bytecode_opcode opcode
;
11255 /* Bit fields are special. We only know about signed and
11256 unsigned ints, and enums. The latter are treated as
11257 signed integers. */
11259 if (DECL_BIT_FIELD (decl
))
11260 if (TREE_CODE (type
) == ENUMERAL_TYPE
11261 || TREE_CODE (type
) == INTEGER_TYPE
)
11262 opcode
= TREE_UNSIGNED (type
) ? zxloadBI
: sxloadBI
;
11266 /* See corresponding comment in bc_store_memory(). */
11267 if (TYPE_MODE (type
) == BLKmode
11268 || TYPE_MODE (type
) == VOIDmode
)
11271 opcode
= mode_to_load_map
[(int) TYPE_MODE (type
)];
11273 if (opcode
== neverneverland
)
11276 bc_emit_bytecode (opcode
);
11278 #ifdef DEBUG_PRINT_CODE
11279 fputc ('\n', stderr
);
11284 /* Store the contents of the second stack slot to the address in the
11285 top stack slot. DECL is the declaration of the destination and is used
11286 to determine whether we're dealing with a bitfield. */
11289 bc_store_memory (type
, decl
)
11292 enum bytecode_opcode opcode
;
11295 if (DECL_BIT_FIELD (decl
))
11297 if (TREE_CODE (type
) == ENUMERAL_TYPE
11298 || TREE_CODE (type
) == INTEGER_TYPE
)
11304 if (TYPE_MODE (type
) == BLKmode
)
11306 /* Copy structure. This expands to a block copy instruction, storeBLK.
11307 In addition to the arguments expected by the other store instructions,
11308 it also expects a type size (SImode) on top of the stack, which is the
11309 structure size in size units (usually bytes). The two first arguments
11310 are already on the stack; so we just put the size on level 1. For some
11311 other languages, the size may be variable, this is why we don't encode
11312 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11314 bc_expand_expr (TYPE_SIZE (type
));
11318 opcode
= mode_to_store_map
[(int) TYPE_MODE (type
)];
11320 if (opcode
== neverneverland
)
11323 bc_emit_bytecode (opcode
);
11325 #ifdef DEBUG_PRINT_CODE
11326 fputc ('\n', stderr
);
11331 /* Allocate local stack space sufficient to hold a value of the given
11332 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11333 integral power of 2. A special case is locals of type VOID, which
11334 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11335 remapped into the corresponding attribute of SI. */
11338 bc_allocate_local (size
, alignment
)
11339 int size
, alignment
;
11342 int byte_alignment
;
11347 /* Normalize size and alignment */
11349 size
= UNITS_PER_WORD
;
11351 if (alignment
< BITS_PER_UNIT
)
11352 byte_alignment
= 1 << (INT_ALIGN
- 1);
11355 byte_alignment
= alignment
/ BITS_PER_UNIT
;
11357 if (local_vars_size
& (byte_alignment
- 1))
11358 local_vars_size
+= byte_alignment
- (local_vars_size
& (byte_alignment
- 1));
11360 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11361 local_vars_size
+= size
;
11367 /* Allocate variable-sized local array. Variable-sized arrays are
11368 actually pointers to the address in memory where they are stored. */
11371 bc_allocate_variable_array (size
)
11375 const int ptralign
= (1 << (PTR_ALIGN
- 1));
11377 /* Align pointer */
11378 if (local_vars_size
& ptralign
)
11379 local_vars_size
+= ptralign
- (local_vars_size
& ptralign
);
11381 /* Note down local space needed: pointer to block; also return
11384 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11385 local_vars_size
+= POINTER_SIZE
/ BITS_PER_UNIT
;
11390 /* Push the machine address for the given external variable offset. */
11393 bc_load_externaddr (externaddr
)
11396 bc_emit_bytecode (constP
);
11397 bc_emit_code_labelref (BYTECODE_LABEL (externaddr
),
11398 BYTECODE_BC_LABEL (externaddr
)->offset
);
11400 #ifdef DEBUG_PRINT_CODE
11401 fputc ('\n', stderr
);
11406 /* Like above, but expects an IDENTIFIER. */
11409 bc_load_externaddr_id (id
, offset
)
11413 if (!IDENTIFIER_POINTER (id
))
11416 bc_emit_bytecode (constP
);
11417 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id
)), offset
);
11419 #ifdef DEBUG_PRINT_CODE
11420 fputc ('\n', stderr
);
11425 /* Push the machine address for the given local variable offset. */
11428 bc_load_localaddr (localaddr
)
11431 bc_emit_instruction (localP
, (HOST_WIDE_INT
) BYTECODE_BC_LABEL (localaddr
)->offset
);
11435 /* Push the machine address for the given parameter offset.
11436 NOTE: offset is in bits. */
11439 bc_load_parmaddr (parmaddr
)
11442 bc_emit_instruction (argP
, ((HOST_WIDE_INT
) BYTECODE_BC_LABEL (parmaddr
)->offset
11447 /* Convert a[i] into *(a + i). */
11450 bc_canonicalize_array_ref (exp
)
11453 tree type
= TREE_TYPE (exp
);
11454 tree array_adr
= build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
),
11455 TREE_OPERAND (exp
, 0));
11456 tree index
= TREE_OPERAND (exp
, 1);
11459 /* Convert the integer argument to a type the same size as a pointer
11460 so the multiply won't overflow spuriously. */
11462 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
11463 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
11465 /* The array address isn't volatile even if the array is.
11466 (Of course this isn't terribly relevant since the bytecode
11467 translator treats nearly everything as volatile anyway.) */
11468 TREE_THIS_VOLATILE (array_adr
) = 0;
11470 return build1 (INDIRECT_REF
, type
,
11471 fold (build (PLUS_EXPR
,
11472 TYPE_POINTER_TO (type
),
11474 fold (build (MULT_EXPR
,
11475 TYPE_POINTER_TO (type
),
11477 size_in_bytes (type
))))));
11481 /* Load the address of the component referenced by the given
11482 COMPONENT_REF expression.
11484 Returns innermost lvalue. */
11487 bc_expand_component_address (exp
)
11491 enum machine_mode mode
;
11493 HOST_WIDE_INT SIval
;
11496 tem
= TREE_OPERAND (exp
, 1);
11497 mode
= DECL_MODE (tem
);
11500 /* Compute cumulative bit offset for nested component refs
11501 and array refs, and find the ultimate containing object. */
11503 for (tem
= exp
;; tem
= TREE_OPERAND (tem
, 0))
11505 if (TREE_CODE (tem
) == COMPONENT_REF
)
11506 bitpos
+= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem
, 1)));
11508 if (TREE_CODE (tem
) == ARRAY_REF
11509 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11510 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
))) == INTEGER_CST
)
11512 bitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1))
11513 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)))
11514 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11519 bc_expand_expr (tem
);
11522 /* For bitfields also push their offset and size */
11523 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
11524 bc_push_offset_and_size (bitpos
, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp
, 1)));
11526 if (SIval
= bitpos
/ BITS_PER_UNIT
)
11527 bc_emit_instruction (addconstPSI
, SIval
);
11529 return (TREE_OPERAND (exp
, 1));
11533 /* Emit code to push two SI constants */
11536 bc_push_offset_and_size (offset
, size
)
11537 HOST_WIDE_INT offset
, size
;
11539 bc_emit_instruction (constSI
, offset
);
11540 bc_emit_instruction (constSI
, size
);
11544 /* Emit byte code to push the address of the given lvalue expression to
11545 the stack. If it's a bit field, we also push offset and size info.
11547 Returns innermost component, which allows us to determine not only
11548 its type, but also whether it's a bitfield. */
11551 bc_expand_address (exp
)
11555 if (!exp
|| TREE_CODE (exp
) == ERROR_MARK
)
11559 switch (TREE_CODE (exp
))
11563 return (bc_expand_address (bc_canonicalize_array_ref (exp
)));
11565 case COMPONENT_REF
:
11567 return (bc_expand_component_address (exp
));
11571 bc_expand_expr (TREE_OPERAND (exp
, 0));
11573 /* For variable-sized types: retrieve pointer. Sometimes the
11574 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11575 also make sure we have an operand, just in case... */
11577 if (TREE_OPERAND (exp
, 0)
11578 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
11579 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))) != INTEGER_CST
)
11580 bc_emit_instruction (loadP
);
11582 /* If packed, also return offset and size */
11583 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 0)))
11585 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 0))),
11586 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp
, 0))));
11588 return (TREE_OPERAND (exp
, 0));
11590 case FUNCTION_DECL
:
11592 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11593 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
11598 bc_load_parmaddr (DECL_RTL (exp
));
11600 /* For variable-sized types: retrieve pointer */
11601 if (TYPE_SIZE (TREE_TYPE (exp
))
11602 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11603 bc_emit_instruction (loadP
);
11605 /* If packed, also return offset and size */
11606 if (DECL_BIT_FIELD (exp
))
11607 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11608 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11614 bc_emit_instruction (returnP
);
11620 if (BYTECODE_LABEL (DECL_RTL (exp
)))
11621 bc_load_externaddr (DECL_RTL (exp
));
11624 if (DECL_EXTERNAL (exp
))
11625 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11626 (BYTECODE_BC_LABEL (DECL_RTL (exp
)))->offset
);
11628 bc_load_localaddr (DECL_RTL (exp
));
11630 /* For variable-sized types: retrieve pointer */
11631 if (TYPE_SIZE (TREE_TYPE (exp
))
11632 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11633 bc_emit_instruction (loadP
);
11635 /* If packed, also return offset and size */
11636 if (DECL_BIT_FIELD (exp
))
11637 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11638 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11646 bc_emit_bytecode (constP
);
11647 r
= output_constant_def (exp
);
11648 bc_emit_code_labelref (BYTECODE_LABEL (r
), BYTECODE_BC_LABEL (r
)->offset
);
11650 #ifdef DEBUG_PRINT_CODE
11651 fputc ('\n', stderr
);
11662 /* Most lvalues don't have components. */
11667 /* Emit a type code to be used by the runtime support in handling
11668 parameter passing. The type code consists of the machine mode
11669 plus the minimal alignment shifted left 8 bits. */
11672 bc_runtime_type_code (type
)
11677 switch (TREE_CODE (type
))
11683 case ENUMERAL_TYPE
:
11687 val
= (int) TYPE_MODE (type
) | TYPE_ALIGN (type
) << 8;
11699 return build_int_2 (val
, 0);
11703 /* Generate constructor label */
11706 bc_gen_constr_label ()
11708 static int label_counter
;
11709 static char label
[20];
11711 sprintf (label
, "*LR%d", label_counter
++);
11713 return (obstack_copy0 (&permanent_obstack
, label
, strlen (label
)));
11717 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11718 expand the constructor data as static data, and push a pointer to it.
11719 The pointer is put in the pointer table and is retrieved by a constP
11720 bytecode instruction. We then loop and store each constructor member in
11721 the corresponding component. Finally, we return the original pointer on
11725 bc_expand_constructor (constr
)
11729 HOST_WIDE_INT ptroffs
;
11733 /* Literal constructors are handled as constants, whereas
11734 non-literals are evaluated and stored element by element
11735 into the data segment. */
11737 /* Allocate space in proper segment and push pointer to space on stack.
11740 l
= bc_gen_constr_label ();
11742 if (TREE_CONSTANT (constr
))
11746 bc_emit_const_labeldef (l
);
11747 bc_output_constructor (constr
, int_size_in_bytes (TREE_TYPE (constr
)));
11753 bc_emit_data_labeldef (l
);
11754 bc_output_data_constructor (constr
);
11758 /* Add reference to pointer table and recall pointer to stack;
11759 this code is common for both types of constructors: literals
11760 and non-literals. */
11762 ptroffs
= bc_define_pointer (l
);
11763 bc_emit_instruction (constP
, ptroffs
);
11765 /* This is all that has to be done if it's a literal. */
11766 if (TREE_CONSTANT (constr
))
11770 /* At this point, we have the pointer to the structure on top of the stack.
11771 Generate sequences of store_memory calls for the constructor. */
11773 /* constructor type is structure */
11774 if (TREE_CODE (TREE_TYPE (constr
)) == RECORD_TYPE
)
11778 /* If the constructor has fewer fields than the structure,
11779 clear the whole structure first. */
11781 if (list_length (CONSTRUCTOR_ELTS (constr
))
11782 != list_length (TYPE_FIELDS (TREE_TYPE (constr
))))
11784 bc_emit_instruction (duplicate
);
11785 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11786 bc_emit_instruction (clearBLK
);
11789 /* Store each element of the constructor into the corresponding
11790 field of TARGET. */
11792 for (elt
= CONSTRUCTOR_ELTS (constr
); elt
; elt
= TREE_CHAIN (elt
))
11794 register tree field
= TREE_PURPOSE (elt
);
11795 register enum machine_mode mode
;
11800 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
)) /* * DECL_SIZE_UNIT (field) */;
11801 mode
= DECL_MODE (field
);
11802 unsignedp
= TREE_UNSIGNED (field
);
11804 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
11806 bc_store_field (elt
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11807 /* The alignment of TARGET is
11808 at least what its type requires. */
11810 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11811 int_size_in_bytes (TREE_TYPE (constr
)));
11816 /* Constructor type is array */
11817 if (TREE_CODE (TREE_TYPE (constr
)) == ARRAY_TYPE
)
11821 tree domain
= TYPE_DOMAIN (TREE_TYPE (constr
));
11822 int minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
11823 int maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
11824 tree elttype
= TREE_TYPE (TREE_TYPE (constr
));
11826 /* If the constructor has fewer fields than the structure,
11827 clear the whole structure first. */
11829 if (list_length (CONSTRUCTOR_ELTS (constr
)) < maxelt
- minelt
+ 1)
11831 bc_emit_instruction (duplicate
);
11832 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11833 bc_emit_instruction (clearBLK
);
11837 /* Store each element of the constructor into the corresponding
11838 element of TARGET, determined by counting the elements. */
11840 for (elt
= CONSTRUCTOR_ELTS (constr
), i
= 0;
11842 elt
= TREE_CHAIN (elt
), i
++)
11844 register enum machine_mode mode
;
11849 mode
= TYPE_MODE (elttype
);
11850 bitsize
= GET_MODE_BITSIZE (mode
);
11851 unsignedp
= TREE_UNSIGNED (elttype
);
11853 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
))
11854 /* * TYPE_SIZE_UNIT (elttype) */ );
11856 bc_store_field (elt
, bitsize
, bitpos
, mode
,
11857 TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11858 /* The alignment of TARGET is
11859 at least what its type requires. */
11861 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11862 int_size_in_bytes (TREE_TYPE (constr
)));
11869 /* Store the value of EXP (an expression tree) into member FIELD of
11870 structure at address on stack, which has type TYPE, mode MODE and
11871 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11874 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11875 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11878 bc_store_field (field
, bitsize
, bitpos
, mode
, exp
, type
,
11879 value_mode
, unsignedp
, align
, total_size
)
11880 int bitsize
, bitpos
;
11881 enum machine_mode mode
;
11882 tree field
, exp
, type
;
11883 enum machine_mode value_mode
;
11889 /* Expand expression and copy pointer */
11890 bc_expand_expr (exp
);
11891 bc_emit_instruction (over
);
11894 /* If the component is a bit field, we cannot use addressing to access
11895 it. Use bit-field techniques to store in it. */
11897 if (DECL_BIT_FIELD (field
))
11899 bc_store_bit_field (bitpos
, bitsize
, unsignedp
);
11903 /* Not bit field */
11905 HOST_WIDE_INT offset
= bitpos
/ BITS_PER_UNIT
;
11907 /* Advance pointer to the desired member */
11909 bc_emit_instruction (addconstPSI
, offset
);
11912 bc_store_memory (type
, field
);
11917 /* Store SI/SU in bitfield */
11920 bc_store_bit_field (offset
, size
, unsignedp
)
11921 int offset
, size
, unsignedp
;
11923 /* Push bitfield offset and size */
11924 bc_push_offset_and_size (offset
, size
);
11927 bc_emit_instruction (sstoreBI
);
11931 /* Load SI/SU from bitfield */
11934 bc_load_bit_field (offset
, size
, unsignedp
)
11935 int offset
, size
, unsignedp
;
11937 /* Push bitfield offset and size */
11938 bc_push_offset_and_size (offset
, size
);
11940 /* Load: sign-extend if signed, else zero-extend */
11941 bc_emit_instruction (unsignedp
? zxloadBI
: sxloadBI
);
11945 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11946 (adjust stack pointer upwards), negative means add that number of
11947 levels (adjust the stack pointer downwards). Only positive values
11948 normally make sense. */
11951 bc_adjust_stack (nlevels
)
11960 bc_emit_instruction (drop
);
11963 bc_emit_instruction (drop
);
11968 bc_emit_instruction (adjstackSI
, (HOST_WIDE_INT
) nlevels
);
11969 stack_depth
-= nlevels
;
11972 #if defined (VALIDATE_STACK_FOR_BC)
11973 VALIDATE_STACK_FOR_BC ();