1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls
= 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust
;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop
;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call
;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level
;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value
;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value
;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from
;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size
;
155 extern int stack_depth
;
156 extern int max_stack_depth
;
157 extern struct obstack permanent_obstack
;
158 extern rtx arg_pointer_save_area
;
160 static rtx enqueue_insn
PROTO((rtx
, rtx
));
161 static int queued_subexp_p
PROTO((rtx
));
162 static void init_queue
PROTO((void));
163 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
164 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
165 static void move_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
166 struct move_by_pieces
*));
167 static void clear_by_pieces
PROTO((rtx
, int, int));
168 static void clear_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
169 struct clear_by_pieces
*));
170 static int is_zeros_p
PROTO((tree
));
171 static int mostly_zeros_p
PROTO((tree
));
172 static void store_constructor
PROTO((tree
, rtx
, int));
173 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
174 enum machine_mode
, int, int, int));
175 static int get_inner_unaligned_p
PROTO((tree
));
176 static tree save_noncopied_parts
PROTO((tree
, tree
));
177 static tree init_noncopied_parts
PROTO((tree
, tree
));
178 static int safe_from_p
PROTO((rtx
, tree
));
179 static int fixed_type_p
PROTO((tree
));
180 static rtx var_rtx
PROTO((tree
));
181 static int get_pointer_alignment
PROTO((tree
, unsigned));
182 static tree string_constant
PROTO((tree
, tree
*));
183 static tree c_strlen
PROTO((tree
));
184 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
185 enum machine_mode
, int));
186 static int apply_args_size
PROTO((void));
187 static int apply_result_size
PROTO((void));
188 static rtx result_vector
PROTO((int, rtx
));
189 static rtx expand_builtin_apply_args
PROTO((void));
190 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
191 static void expand_builtin_return
PROTO((rtx
));
192 static rtx expand_increment
PROTO((tree
, int, int));
193 void bc_expand_increment
PROTO((struct increment_operator
*, tree
));
194 rtx bc_allocate_local
PROTO((int, int));
195 void bc_store_memory
PROTO((tree
, tree
));
196 tree bc_expand_component_address
PROTO((tree
));
197 tree bc_expand_address
PROTO((tree
));
198 void bc_expand_constructor
PROTO((tree
));
199 void bc_adjust_stack
PROTO((int));
200 tree bc_canonicalize_array_ref
PROTO((tree
));
201 void bc_load_memory
PROTO((tree
, tree
));
202 void bc_load_externaddr
PROTO((rtx
));
203 void bc_load_externaddr_id
PROTO((tree
, int));
204 void bc_load_localaddr
PROTO((rtx
));
205 void bc_load_parmaddr
PROTO((rtx
));
206 static void preexpand_calls
PROTO((tree
));
207 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
208 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
209 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
210 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
211 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
212 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
213 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
214 static tree defer_cleanups_to
PROTO((tree
));
215 extern tree truthvalue_conversion
PROTO((tree
));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load
[NUM_MACHINE_MODES
];
222 static char direct_store
[NUM_MACHINE_MODES
];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map
[MAX_MACHINE_MODE
];
259 enum bytecode_opcode mode_to_load_map
[MAX_MACHINE_MODE
];
260 enum bytecode_opcode mode_to_store_map
[MAX_MACHINE_MODE
];
262 /* Initialize maps used to convert modes to const, load, and store
266 bc_init_mode_to_opcode_maps ()
270 for (mode
= 0; mode
< (int) MAX_MACHINE_MODE
; mode
++)
271 mode_to_const_map
[mode
] =
272 mode_to_load_map
[mode
] =
273 mode_to_store_map
[mode
] = neverneverland
;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
291 enum machine_mode mode
;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
296 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
299 insn
= emit_insn (gen_rtx (SET
, 0, 0));
300 pat
= PATTERN (insn
);
302 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
303 mode
= (enum machine_mode
) ((int) mode
+ 1))
309 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
310 PUT_MODE (mem
, mode
);
311 PUT_MODE (mem1
, mode
);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
317 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
318 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
321 if (! HARD_REGNO_MODE_OK (regno
, mode
))
324 reg
= gen_rtx (REG
, mode
, regno
);
327 SET_DEST (pat
) = reg
;
328 if (recog (pat
, insn
, &num_clobbers
) >= 0)
329 direct_load
[(int) mode
] = 1;
331 SET_SRC (pat
) = mem1
;
332 SET_DEST (pat
) = reg
;
333 if (recog (pat
, insn
, &num_clobbers
) >= 0)
334 direct_load
[(int) mode
] = 1;
337 SET_DEST (pat
) = mem
;
338 if (recog (pat
, insn
, &num_clobbers
) >= 0)
339 direct_store
[(int) mode
] = 1;
342 SET_DEST (pat
) = mem1
;
343 if (recog (pat
, insn
, &num_clobbers
) >= 0)
344 direct_store
[(int) mode
] = 1;
351 /* This is run at the start of compiling a function. */
358 pending_stack_adjust
= 0;
359 inhibit_defer_pop
= 0;
360 cleanups_this_call
= 0;
362 apply_args_value
= 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
373 /* Instead of saving the postincrement queue, empty it. */
376 p
->pending_stack_adjust
= pending_stack_adjust
;
377 p
->inhibit_defer_pop
= inhibit_defer_pop
;
378 p
->cleanups_this_call
= cleanups_this_call
;
379 p
->saveregs_value
= saveregs_value
;
380 p
->apply_args_value
= apply_args_value
;
381 p
->forced_labels
= forced_labels
;
383 pending_stack_adjust
= 0;
384 inhibit_defer_pop
= 0;
385 cleanups_this_call
= 0;
387 apply_args_value
= 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
395 restore_expr_status (p
)
398 pending_stack_adjust
= p
->pending_stack_adjust
;
399 inhibit_defer_pop
= p
->inhibit_defer_pop
;
400 cleanups_this_call
= p
->cleanups_this_call
;
401 saveregs_value
= p
->saveregs_value
;
402 apply_args_value
= p
->apply_args_value
;
403 forced_labels
= p
->forced_labels
;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain
;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
419 enqueue_insn (var
, body
)
422 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
423 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
424 return pending_chain
;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x
, modify
)
447 register RTX_CODE code
= GET_CODE (x
);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain
== 0)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
462 if (code
== MEM
&& GET_MODE (x
) != BLKmode
463 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
465 register rtx y
= XEXP (x
, 0);
466 register rtx
new = gen_rtx (MEM
, GET_MODE (x
), QUEUED_VAR (y
));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
474 register rtx temp
= gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp
, new),
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
485 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
486 if (tem
!= XEXP (x
, 0))
492 else if (code
== PLUS
|| code
== MULT
)
494 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
495 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
496 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x
) == 0)
507 return QUEUED_VAR (x
);
508 /* If the increment has happened and a pre-increment copy exists,
510 if (QUEUED_COPY (x
) != 0)
511 return QUEUED_COPY (x
);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
517 return QUEUED_COPY (x
);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
529 register enum rtx_code code
= GET_CODE (x
);
535 return queued_subexp_p (XEXP (x
, 0));
539 return queued_subexp_p (XEXP (x
, 0))
540 || queued_subexp_p (XEXP (x
, 1));
545 /* Perform all the pending incrementations. */
551 while (p
= pending_chain
)
553 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
554 pending_chain
= QUEUED_NEXT (p
);
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
571 convert_move (to
, from
, unsignedp
)
572 register rtx to
, from
;
575 enum machine_mode to_mode
= GET_MODE (to
);
576 enum machine_mode from_mode
= GET_MODE (from
);
577 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
578 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
585 to
= protect_from_queue (to
, 1);
586 from
= protect_from_queue (from
, 0);
588 if (to_real
!= from_real
)
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
595 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
597 >= GET_MODE_SIZE (to_mode
))
598 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
599 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
601 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
604 if (to_mode
== from_mode
605 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
607 emit_move_insn (to
, from
);
615 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
617 /* Try converting directly if the insn is supported. */
618 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
621 emit_unop_insn (code
, to
, from
, UNKNOWN
);
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
629 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
636 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
643 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
650 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
657 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
665 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
672 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
679 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
686 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
693 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
701 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
708 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
715 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
722 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
730 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
737 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
744 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
751 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
758 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
770 libcall
= extendsfdf2_libfunc
;
774 libcall
= extendsfxf2_libfunc
;
778 libcall
= extendsftf2_libfunc
;
787 libcall
= truncdfsf2_libfunc
;
791 libcall
= extenddfxf2_libfunc
;
795 libcall
= extenddftf2_libfunc
;
804 libcall
= truncxfsf2_libfunc
;
808 libcall
= truncxfdf2_libfunc
;
817 libcall
= trunctfsf2_libfunc
;
821 libcall
= trunctfdf2_libfunc
;
827 if (libcall
== (rtx
) 0)
828 /* This conversion is not implemented yet. */
831 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
833 emit_move_insn (to
, value
);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
841 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
848 enum machine_mode lowpart_mode
;
849 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
851 /* Try converting directly if the insn is supported. */
852 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
860 from
= force_reg (from_mode
, from
);
861 emit_unop_insn (code
, to
, from
, equiv_code
);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
866 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
867 != CODE_FOR_nothing
))
869 if (GET_CODE (to
) == REG
)
870 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, to
));
871 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
872 emit_unop_insn (code
, to
,
873 gen_lowpart (word_mode
, to
), equiv_code
);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to
, from
))
884 from
= force_reg (from_mode
, from
);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
888 lowpart_mode
= word_mode
;
890 lowpart_mode
= from_mode
;
892 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
894 lowpart
= gen_lowpart (lowpart_mode
, to
);
895 emit_move_insn (lowpart
, lowfrom
);
897 /* Compute the value to put in each remaining word. */
899 fill_value
= const0_rtx
;
904 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
905 && STORE_FLAG_VALUE
== -1)
907 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
909 fill_value
= gen_reg_rtx (word_mode
);
910 emit_insn (gen_slt (fill_value
));
916 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
917 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
919 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
923 /* Fill the remaining words. */
924 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
926 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
927 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
932 if (fill_value
!= subword
)
933 emit_move_insn (subword
, fill_value
);
936 insns
= get_insns ();
939 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
940 gen_rtx (equiv_code
, to_mode
, copy_rtx (from
)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
948 if (!((GET_CODE (from
) == MEM
949 && ! MEM_VOLATILE_P (from
)
950 && direct_load
[(int) to_mode
]
951 && ! mode_dependent_address_p (XEXP (from
, 0)))
952 || GET_CODE (from
) == REG
953 || GET_CODE (from
) == SUBREG
))
954 from
= force_reg (from_mode
, from
);
955 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode
== PSImode
)
962 if (from_mode
!= SImode
)
963 from
= convert_to_mode (SImode
, from
, unsignedp
);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2
)
968 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode
== PSImode
)
977 if (to_mode
!= SImode
)
979 from
= convert_to_mode (SImode
, from
, unsignedp
);
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2
)
987 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
990 #endif /* HAVE_extendpsisi2 */
995 if (to_mode
== PDImode
)
997 if (from_mode
!= DImode
)
998 from
= convert_to_mode (DImode
, from
, unsignedp
);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2
)
1003 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1006 #endif /* HAVE_truncdipdi2 */
1010 if (from_mode
== PDImode
)
1012 if (to_mode
!= DImode
)
1014 from
= convert_to_mode (DImode
, from
, unsignedp
);
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2
)
1022 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1025 #endif /* HAVE_extendpdidi2 */
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1036 GET_MODE_BITSIZE (from_mode
)))
1038 if (!((GET_CODE (from
) == MEM
1039 && ! MEM_VOLATILE_P (from
)
1040 && direct_load
[(int) to_mode
]
1041 && ! mode_dependent_address_p (XEXP (from
, 0)))
1042 || GET_CODE (from
) == REG
1043 || GET_CODE (from
) == SUBREG
))
1044 from
= force_reg (from_mode
, from
);
1045 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1047 from
= copy_to_reg (from
);
1048 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1055 /* Convert directly if that works. */
1056 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1057 != CODE_FOR_nothing
)
1059 emit_unop_insn (code
, to
, from
, equiv_code
);
1064 enum machine_mode intermediate
;
1066 /* Search for a mode to convert via. */
1067 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1068 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1069 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1070 != CODE_FOR_nothing
)
1071 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1072 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1073 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1074 != CODE_FOR_nothing
))
1076 convert_move (to
, convert_to_mode (intermediate
, from
,
1077 unsignedp
), unsignedp
);
1081 /* No suitable intermediate mode. */
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode
== DImode
&& to_mode
== SImode
)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2
)
1093 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1097 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1101 if (from_mode
== DImode
&& to_mode
== HImode
)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2
)
1106 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1110 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1114 if (from_mode
== DImode
&& to_mode
== QImode
)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2
)
1119 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1123 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1127 if (from_mode
== SImode
&& to_mode
== HImode
)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2
)
1132 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1136 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1140 if (from_mode
== SImode
&& to_mode
== QImode
)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2
)
1145 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1149 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1153 if (from_mode
== HImode
&& to_mode
== QImode
)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2
)
1158 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1162 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1166 if (from_mode
== TImode
&& to_mode
== DImode
)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2
)
1171 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== TImode
&& to_mode
== SImode
)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2
)
1184 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== TImode
&& to_mode
== HImode
)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2
)
1197 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== TImode
&& to_mode
== QImode
)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2
)
1210 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1223 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1224 emit_move_insn (to
, temp
);
1228 /* Mode combination is not recognized. */
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode
, x
, unsignedp
)
1244 enum machine_mode mode
;
1248 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode
, oldmode
, x
, unsignedp
)
1266 enum machine_mode mode
, oldmode
;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1278 x
= gen_lowpart (mode
, x
);
1280 if (GET_MODE (x
) != VOIDmode
)
1281 oldmode
= GET_MODE (x
);
1283 if (mode
== oldmode
)
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1293 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1295 return immed_double_const (INTVAL (x
), (HOST_WIDE_INT
) 0, mode
);
1297 /* We can do this with a gen_lowpart if both desired and current modes
1298 are integer, and this is either a constant integer, a register, or a
1299 non-volatile MEM. Except for the constant case where MODE is no
1300 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1302 if ((GET_CODE (x
) == CONST_INT
1303 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1304 || (GET_MODE_CLASS (mode
) == MODE_INT
1305 && GET_MODE_CLASS (oldmode
) == MODE_INT
1306 && (GET_CODE (x
) == CONST_DOUBLE
1307 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1308 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1309 && direct_load
[(int) mode
])
1310 || (GET_CODE (x
) == REG
1311 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1312 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1314 /* ?? If we don't know OLDMODE, we have to assume here that
1315 X does not need sign- or zero-extension. This may not be
1316 the case, but it's the best we can do. */
1317 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1318 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1320 HOST_WIDE_INT val
= INTVAL (x
);
1321 int width
= GET_MODE_BITSIZE (oldmode
);
1323 /* We must sign or zero-extend in this case. Start by
1324 zero-extending, then sign extend if we need to. */
1325 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1327 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1328 val
|= (HOST_WIDE_INT
) (-1) << width
;
1330 return GEN_INT (val
);
1333 return gen_lowpart (mode
, x
);
1336 temp
= gen_reg_rtx (mode
);
1337 convert_move (temp
, x
, unsignedp
);
1341 /* Generate several move instructions to copy LEN bytes
1342 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1343 The caller must pass FROM and TO
1344 through protect_from_queue before calling.
1345 ALIGN (in bytes) is maximum alignment we can assume. */
1348 move_by_pieces (to
, from
, len
, align
)
1352 struct move_by_pieces data
;
1353 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1354 int max_size
= MOVE_MAX
+ 1;
1357 data
.to_addr
= to_addr
;
1358 data
.from_addr
= from_addr
;
1362 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1363 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1365 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1366 || GET_CODE (from_addr
) == POST_INC
1367 || GET_CODE (from_addr
) == POST_DEC
);
1369 data
.explicit_inc_from
= 0;
1370 data
.explicit_inc_to
= 0;
1372 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1373 if (data
.reverse
) data
.offset
= len
;
1376 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1377 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1379 /* If copying requires more than two move insns,
1380 copy addresses to registers (to make displacements shorter)
1381 and use post-increment if available. */
1382 if (!(data
.autinc_from
&& data
.autinc_to
)
1383 && move_by_pieces_ninsns (len
, align
) > 2)
1385 #ifdef HAVE_PRE_DECREMENT
1386 if (data
.reverse
&& ! data
.autinc_from
)
1388 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1389 data
.autinc_from
= 1;
1390 data
.explicit_inc_from
= -1;
1393 #ifdef HAVE_POST_INCREMENT
1394 if (! data
.autinc_from
)
1396 data
.from_addr
= copy_addr_to_reg (from_addr
);
1397 data
.autinc_from
= 1;
1398 data
.explicit_inc_from
= 1;
1401 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1402 data
.from_addr
= copy_addr_to_reg (from_addr
);
1403 #ifdef HAVE_PRE_DECREMENT
1404 if (data
.reverse
&& ! data
.autinc_to
)
1406 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1408 data
.explicit_inc_to
= -1;
1411 #ifdef HAVE_POST_INCREMENT
1412 if (! data
.reverse
&& ! data
.autinc_to
)
1414 data
.to_addr
= copy_addr_to_reg (to_addr
);
1416 data
.explicit_inc_to
= 1;
1419 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1420 data
.to_addr
= copy_addr_to_reg (to_addr
);
1423 if (! SLOW_UNALIGNED_ACCESS
1424 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1427 /* First move what we can in the largest integer mode, then go to
1428 successively smaller modes. */
1430 while (max_size
> 1)
1432 enum machine_mode mode
= VOIDmode
, tmode
;
1433 enum insn_code icode
;
1435 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1436 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1437 if (GET_MODE_SIZE (tmode
) < max_size
)
1440 if (mode
== VOIDmode
)
1443 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1444 if (icode
!= CODE_FOR_nothing
1445 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1446 GET_MODE_SIZE (mode
)))
1447 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1449 max_size
= GET_MODE_SIZE (mode
);
1452 /* The code above should have handled everything. */
1457 /* Return number of insns required to move L bytes by pieces.
1458 ALIGN (in bytes) is maximum alignment we can assume. */
1461 move_by_pieces_ninsns (l
, align
)
1465 register int n_insns
= 0;
1466 int max_size
= MOVE_MAX
+ 1;
1468 if (! SLOW_UNALIGNED_ACCESS
1469 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1472 while (max_size
> 1)
1474 enum machine_mode mode
= VOIDmode
, tmode
;
1475 enum insn_code icode
;
1477 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1478 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1479 if (GET_MODE_SIZE (tmode
) < max_size
)
1482 if (mode
== VOIDmode
)
1485 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1486 if (icode
!= CODE_FOR_nothing
1487 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1488 GET_MODE_SIZE (mode
)))
1489 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1491 max_size
= GET_MODE_SIZE (mode
);
1497 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1498 with move instructions for mode MODE. GENFUN is the gen_... function
1499 to make a move insn for that mode. DATA has all the other info. */
1502 move_by_pieces_1 (genfun
, mode
, data
)
1504 enum machine_mode mode
;
1505 struct move_by_pieces
*data
;
1507 register int size
= GET_MODE_SIZE (mode
);
1508 register rtx to1
, from1
;
1510 while (data
->len
>= size
)
1512 if (data
->reverse
) data
->offset
-= size
;
1514 to1
= (data
->autinc_to
1515 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1516 : change_address (data
->to
, mode
,
1517 plus_constant (data
->to_addr
, data
->offset
)));
1518 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1521 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1522 : change_address (data
->from
, mode
,
1523 plus_constant (data
->from_addr
, data
->offset
)));
1524 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1526 #ifdef HAVE_PRE_DECREMENT
1527 if (data
->explicit_inc_to
< 0)
1528 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1529 if (data
->explicit_inc_from
< 0)
1530 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1533 emit_insn ((*genfun
) (to1
, from1
));
1534 #ifdef HAVE_POST_INCREMENT
1535 if (data
->explicit_inc_to
> 0)
1536 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1537 if (data
->explicit_inc_from
> 0)
1538 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1541 if (! data
->reverse
) data
->offset
+= size
;
1547 /* Emit code to move a block Y to a block X.
1548 This may be done with string-move instructions,
1549 with multiple scalar move instructions, or with a library call.
1551 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1553 SIZE is an rtx that says how long they are.
1554 ALIGN is the maximum alignment we can assume they have,
1555 measured in bytes. */
1558 emit_block_move (x
, y
, size
, align
)
1563 if (GET_MODE (x
) != BLKmode
)
1566 if (GET_MODE (y
) != BLKmode
)
1569 x
= protect_from_queue (x
, 1);
1570 y
= protect_from_queue (y
, 0);
1571 size
= protect_from_queue (size
, 0);
1573 if (GET_CODE (x
) != MEM
)
1575 if (GET_CODE (y
) != MEM
)
1580 if (GET_CODE (size
) == CONST_INT
1581 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1582 move_by_pieces (x
, y
, INTVAL (size
), align
);
1585 /* Try the most limited insn first, because there's no point
1586 including more than one in the machine description unless
1587 the more limited one has some advantage. */
1589 rtx opalign
= GEN_INT (align
);
1590 enum machine_mode mode
;
1592 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1593 mode
= GET_MODE_WIDER_MODE (mode
))
1595 enum insn_code code
= movstr_optab
[(int) mode
];
1597 if (code
!= CODE_FOR_nothing
1598 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1599 here because if SIZE is less than the mode mask, as it is
1600 returned by the macro, it will definitely be less than the
1601 actual mode mask. */
1602 && ((GET_CODE (size
) == CONST_INT
1603 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1604 <= GET_MODE_MASK (mode
)))
1605 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1606 && (insn_operand_predicate
[(int) code
][0] == 0
1607 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1608 && (insn_operand_predicate
[(int) code
][1] == 0
1609 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1610 && (insn_operand_predicate
[(int) code
][3] == 0
1611 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1615 rtx last
= get_last_insn ();
1618 op2
= convert_to_mode (mode
, size
, 1);
1619 if (insn_operand_predicate
[(int) code
][2] != 0
1620 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1621 op2
= copy_to_mode_reg (mode
, op2
);
1623 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1630 delete_insns_since (last
);
1634 #ifdef TARGET_MEM_FUNCTIONS
1635 emit_library_call (memcpy_libfunc
, 0,
1636 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1638 convert_to_mode (TYPE_MODE (sizetype
), size
,
1639 TREE_UNSIGNED (sizetype
)),
1640 TYPE_MODE (sizetype
));
1642 emit_library_call (bcopy_libfunc
, 0,
1643 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1645 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1646 TREE_UNSIGNED (integer_type_node
)),
1647 TYPE_MODE (integer_type_node
));
1652 /* Copy all or part of a value X into registers starting at REGNO.
1653 The number of registers to be filled is NREGS. */
1656 move_block_to_reg (regno
, x
, nregs
, mode
)
1660 enum machine_mode mode
;
1668 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1669 x
= validize_mem (force_const_mem (mode
, x
));
1671 /* See if the machine can do this with a load multiple insn. */
1672 #ifdef HAVE_load_multiple
1673 if (HAVE_load_multiple
)
1675 last
= get_last_insn ();
1676 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1684 delete_insns_since (last
);
1688 for (i
= 0; i
< nregs
; i
++)
1689 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1690 operand_subword_force (x
, i
, mode
));
1693 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1694 The number of registers to be filled is NREGS. SIZE indicates the number
1695 of bytes in the object X. */
1699 move_block_from_reg (regno
, x
, nregs
, size
)
1708 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1709 to the left before storing to memory. */
1710 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1712 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1718 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1719 gen_rtx (REG
, word_mode
, regno
),
1720 build_int_2 ((UNITS_PER_WORD
- size
)
1721 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1722 emit_move_insn (tem
, shift
);
1726 /* See if the machine can do this with a store multiple insn. */
1727 #ifdef HAVE_store_multiple
1728 if (HAVE_store_multiple
)
1730 last
= get_last_insn ();
1731 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1739 delete_insns_since (last
);
1743 for (i
= 0; i
< nregs
; i
++)
1745 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1750 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1754 /* Emit code to move a block Y to a block X, where X is non-consecutive
1755 registers represented by a PARALLEL. */
1758 emit_group_load (x
, y
)
1761 rtx target_reg
, source
;
1764 if (GET_CODE (x
) != PARALLEL
)
1767 /* Check for a NULL entry, used to indicate that the parameter goes
1768 both on the stack and in registers. */
1769 if (XEXP (XVECEXP (x
, 0, 0), 0))
1774 for (; i
< XVECLEN (x
, 0); i
++)
1776 rtx element
= XVECEXP (x
, 0, i
);
1778 target_reg
= XEXP (element
, 0);
1780 if (GET_CODE (y
) == MEM
)
1781 source
= change_address (y
, GET_MODE (target_reg
),
1782 plus_constant (XEXP (y
, 0),
1783 INTVAL (XEXP (element
, 1))));
1784 else if (XEXP (element
, 1) == const0_rtx
)
1786 if (GET_MODE (target_reg
) == GET_MODE (y
))
1788 /* Allow for the target_reg to be smaller than the input register
1789 to allow for AIX with 4 DF arguments after a single SI arg. The
1790 last DF argument will only load 1 word into the integer registers,
1791 but load a DF value into the float registers. */
1792 else if (GET_MODE_SIZE (GET_MODE (target_reg
))
1793 <= GET_MODE_SIZE (GET_MODE (y
)))
1794 source
= gen_rtx (SUBREG
, GET_MODE (target_reg
), y
, 0);
1801 emit_move_insn (target_reg
, source
);
1805 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1806 registers represented by a PARALLEL. */
1809 emit_group_store (x
, y
)
1812 rtx source_reg
, target
;
1815 if (GET_CODE (y
) != PARALLEL
)
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
1820 if (XEXP (XVECEXP (y
, 0, 0), 0))
1825 for (; i
< XVECLEN (y
, 0); i
++)
1827 rtx element
= XVECEXP (y
, 0, i
);
1829 source_reg
= XEXP (element
, 0);
1831 if (GET_CODE (x
) == MEM
)
1832 target
= change_address (x
, GET_MODE (source_reg
),
1833 plus_constant (XEXP (x
, 0),
1834 INTVAL (XEXP (element
, 1))));
1835 else if (XEXP (element
, 1) == const0_rtx
)
1840 emit_move_insn (target
, source_reg
);
1844 /* Add a USE expression for REG to the (possibly empty) list pointed
1845 to by CALL_FUSAGE. REG must denote a hard register. */
1848 use_reg (call_fusage
, reg
)
1849 rtx
*call_fusage
, reg
;
1851 if (GET_CODE (reg
) != REG
1852 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1856 = gen_rtx (EXPR_LIST
, VOIDmode
,
1857 gen_rtx (USE
, VOIDmode
, reg
), *call_fusage
);
1860 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1861 starting at REGNO. All of these registers must be hard registers. */
1864 use_regs (call_fusage
, regno
, nregs
)
1871 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1874 for (i
= 0; i
< nregs
; i
++)
1875 use_reg (call_fusage
, gen_rtx (REG
, reg_raw_mode
[regno
+ i
], regno
+ i
));
1878 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1879 PARALLEL REGS. This is for calls that pass values in multiple
1880 non-contiguous locations. The Irix 6 ABI has examples of this. */
1883 use_group_regs (call_fusage
, regs
)
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (regs
, 0, 0), 0))
1896 for (; i
< XVECLEN (regs
, 0); i
++)
1897 use_reg (call_fusage
, XEXP (XVECEXP (regs
, 0, i
), 0));
1900 /* Generate several move instructions to clear LEN bytes of block TO.
1901 (A MEM rtx with BLKmode). The caller must pass TO through
1902 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1906 clear_by_pieces (to
, len
, align
)
1910 struct clear_by_pieces data
;
1911 rtx to_addr
= XEXP (to
, 0);
1912 int max_size
= MOVE_MAX
+ 1;
1915 data
.to_addr
= to_addr
;
1918 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1919 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1921 data
.explicit_inc_to
= 0;
1923 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1924 if (data
.reverse
) data
.offset
= len
;
1927 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1929 /* If copying requires more than two move insns,
1930 copy addresses to registers (to make displacements shorter)
1931 and use post-increment if available. */
1933 && move_by_pieces_ninsns (len
, align
) > 2)
1935 #ifdef HAVE_PRE_DECREMENT
1936 if (data
.reverse
&& ! data
.autinc_to
)
1938 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1940 data
.explicit_inc_to
= -1;
1943 #ifdef HAVE_POST_INCREMENT
1944 if (! data
.reverse
&& ! data
.autinc_to
)
1946 data
.to_addr
= copy_addr_to_reg (to_addr
);
1948 data
.explicit_inc_to
= 1;
1951 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1952 data
.to_addr
= copy_addr_to_reg (to_addr
);
1955 if (! SLOW_UNALIGNED_ACCESS
1956 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1959 /* First move what we can in the largest integer mode, then go to
1960 successively smaller modes. */
1962 while (max_size
> 1)
1964 enum machine_mode mode
= VOIDmode
, tmode
;
1965 enum insn_code icode
;
1967 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1968 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1969 if (GET_MODE_SIZE (tmode
) < max_size
)
1972 if (mode
== VOIDmode
)
1975 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1976 if (icode
!= CODE_FOR_nothing
1977 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1978 GET_MODE_SIZE (mode
)))
1979 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1981 max_size
= GET_MODE_SIZE (mode
);
1984 /* The code above should have handled everything. */
1989 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1990 with move instructions for mode MODE. GENFUN is the gen_... function
1991 to make a move insn for that mode. DATA has all the other info. */
1994 clear_by_pieces_1 (genfun
, mode
, data
)
1996 enum machine_mode mode
;
1997 struct clear_by_pieces
*data
;
1999 register int size
= GET_MODE_SIZE (mode
);
2002 while (data
->len
>= size
)
2004 if (data
->reverse
) data
->offset
-= size
;
2006 to1
= (data
->autinc_to
2007 ? gen_rtx (MEM
, mode
, data
->to_addr
)
2008 : change_address (data
->to
, mode
,
2009 plus_constant (data
->to_addr
, data
->offset
)));
2010 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2012 #ifdef HAVE_PRE_DECREMENT
2013 if (data
->explicit_inc_to
< 0)
2014 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2017 emit_insn ((*genfun
) (to1
, const0_rtx
));
2018 #ifdef HAVE_POST_INCREMENT
2019 if (data
->explicit_inc_to
> 0)
2020 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2023 if (! data
->reverse
) data
->offset
+= size
;
2029 /* Write zeros through the storage of OBJECT.
2030 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2031 the maximum alignment we can is has, measured in bytes. */
2034 clear_storage (object
, size
, align
)
2039 if (GET_MODE (object
) == BLKmode
)
2041 object
= protect_from_queue (object
, 1);
2042 size
= protect_from_queue (size
, 0);
2044 if (GET_CODE (size
) == CONST_INT
2045 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2046 clear_by_pieces (object
, INTVAL (size
), align
);
2050 /* Try the most limited insn first, because there's no point
2051 including more than one in the machine description unless
2052 the more limited one has some advantage. */
2054 rtx opalign
= GEN_INT (align
);
2055 enum machine_mode mode
;
2057 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2058 mode
= GET_MODE_WIDER_MODE (mode
))
2060 enum insn_code code
= clrstr_optab
[(int) mode
];
2062 if (code
!= CODE_FOR_nothing
2063 /* We don't need MODE to be narrower than
2064 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2065 the mode mask, as it is returned by the macro, it will
2066 definitely be less than the actual mode mask. */
2067 && ((GET_CODE (size
) == CONST_INT
2068 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2069 <= GET_MODE_MASK (mode
)))
2070 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2071 && (insn_operand_predicate
[(int) code
][0] == 0
2072 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2074 && (insn_operand_predicate
[(int) code
][2] == 0
2075 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2079 rtx last
= get_last_insn ();
2082 op1
= convert_to_mode (mode
, size
, 1);
2083 if (insn_operand_predicate
[(int) code
][1] != 0
2084 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2086 op1
= copy_to_mode_reg (mode
, op1
);
2088 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2095 delete_insns_since (last
);
2100 #ifdef TARGET_MEM_FUNCTIONS
2101 emit_library_call (memset_libfunc
, 0,
2103 XEXP (object
, 0), Pmode
,
2104 const0_rtx
, TYPE_MODE (integer_type_node
),
2105 convert_to_mode (TYPE_MODE (sizetype
),
2106 size
, TREE_UNSIGNED (sizetype
)),
2107 TYPE_MODE (sizetype
));
2109 emit_library_call (bzero_libfunc
, 0,
2111 XEXP (object
, 0), Pmode
,
2112 convert_to_mode (TYPE_MODE (integer_type_node
),
2114 TREE_UNSIGNED (integer_type_node
)),
2115 TYPE_MODE (integer_type_node
));
2120 emit_move_insn (object
, const0_rtx
);
2123 /* Generate code to copy Y into X.
2124 Both Y and X must have the same mode, except that
2125 Y can be a constant with VOIDmode.
2126 This mode cannot be BLKmode; use emit_block_move for that.
2128 Return the last instruction emitted. */
2131 emit_move_insn (x
, y
)
2134 enum machine_mode mode
= GET_MODE (x
);
2136 x
= protect_from_queue (x
, 1);
2137 y
= protect_from_queue (y
, 0);
2139 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2142 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2143 y
= force_const_mem (mode
, y
);
2145 /* If X or Y are memory references, verify that their addresses are valid
2147 if (GET_CODE (x
) == MEM
2148 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2149 && ! push_operand (x
, GET_MODE (x
)))
2151 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2152 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2154 if (GET_CODE (y
) == MEM
2155 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2157 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2158 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2160 if (mode
== BLKmode
)
2163 return emit_move_insn_1 (x
, y
);
2166 /* Low level part of emit_move_insn.
2167 Called just like emit_move_insn, but assumes X and Y
2168 are basically valid. */
2171 emit_move_insn_1 (x
, y
)
2174 enum machine_mode mode
= GET_MODE (x
);
2175 enum machine_mode submode
;
2176 enum mode_class
class = GET_MODE_CLASS (mode
);
2179 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2181 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2183 /* Expand complex moves by moving real part and imag part, if possible. */
2184 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2185 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2187 (class == MODE_COMPLEX_INT
2188 ? MODE_INT
: MODE_FLOAT
),
2190 && (mov_optab
->handlers
[(int) submode
].insn_code
2191 != CODE_FOR_nothing
))
2193 /* Don't split destination if it is a stack push. */
2194 int stack
= push_operand (x
, GET_MODE (x
));
2197 /* If this is a stack, push the highpart first, so it
2198 will be in the argument order.
2200 In that case, change_address is used only to convert
2201 the mode, not to change the address. */
2204 /* Note that the real part always precedes the imag part in memory
2205 regardless of machine's endianness. */
2206 #ifdef STACK_GROWS_DOWNWARD
2207 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2208 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2209 gen_imagpart (submode
, y
)));
2210 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2211 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2212 gen_realpart (submode
, y
)));
2214 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2215 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2216 gen_realpart (submode
, y
)));
2217 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2218 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2219 gen_imagpart (submode
, y
)));
2224 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2225 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2226 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2227 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2230 return get_last_insn ();
2233 /* This will handle any multi-word mode that lacks a move_insn pattern.
2234 However, you will get better code if you define such patterns,
2235 even if they must turn into multiple assembler instructions. */
2236 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2241 #ifdef PUSH_ROUNDING
2243 /* If X is a push on the stack, do the push now and replace
2244 X with a reference to the stack pointer. */
2245 if (push_operand (x
, GET_MODE (x
)))
2247 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2248 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2252 /* Show the output dies here. */
2254 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
2257 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2260 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2261 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2263 /* If we can't get a part of Y, put Y into memory if it is a
2264 constant. Otherwise, force it into a register. If we still
2265 can't get a part of Y, abort. */
2266 if (ypart
== 0 && CONSTANT_P (y
))
2268 y
= force_const_mem (mode
, y
);
2269 ypart
= operand_subword (y
, i
, 1, mode
);
2271 else if (ypart
== 0)
2272 ypart
= operand_subword_force (y
, i
, mode
);
2274 if (xpart
== 0 || ypart
== 0)
2277 last_insn
= emit_move_insn (xpart
, ypart
);
2286 /* Pushing data onto the stack. */
2288 /* Push a block of length SIZE (perhaps variable)
2289 and return an rtx to address the beginning of the block.
2290 Note that it is not possible for the value returned to be a QUEUED.
2291 The value may be virtual_outgoing_args_rtx.
2293 EXTRA is the number of bytes of padding to push in addition to SIZE.
2294 BELOW nonzero means this padding comes at low addresses;
2295 otherwise, the padding comes at high addresses. */
2298 push_block (size
, extra
, below
)
2304 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2305 if (CONSTANT_P (size
))
2306 anti_adjust_stack (plus_constant (size
, extra
));
2307 else if (GET_CODE (size
) == REG
&& extra
== 0)
2308 anti_adjust_stack (size
);
2311 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2313 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2314 temp
, 0, OPTAB_LIB_WIDEN
);
2315 anti_adjust_stack (temp
);
2318 #ifdef STACK_GROWS_DOWNWARD
2319 temp
= virtual_outgoing_args_rtx
;
2320 if (extra
!= 0 && below
)
2321 temp
= plus_constant (temp
, extra
);
2323 if (GET_CODE (size
) == CONST_INT
)
2324 temp
= plus_constant (virtual_outgoing_args_rtx
,
2325 - INTVAL (size
) - (below
? 0 : extra
));
2326 else if (extra
!= 0 && !below
)
2327 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2328 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2330 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2331 negate_rtx (Pmode
, size
));
2334 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2340 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2343 /* Generate code to push X onto the stack, assuming it has mode MODE and
2345 MODE is redundant except when X is a CONST_INT (since they don't
2347 SIZE is an rtx for the size of data to be copied (in bytes),
2348 needed only if X is BLKmode.
2350 ALIGN (in bytes) is maximum alignment we can assume.
2352 If PARTIAL and REG are both nonzero, then copy that many of the first
2353 words of X into registers starting with REG, and push the rest of X.
2354 The amount of space pushed is decreased by PARTIAL words,
2355 rounded *down* to a multiple of PARM_BOUNDARY.
2356 REG must be a hard register in this case.
2357 If REG is zero but PARTIAL is not, take any all others actions for an
2358 argument partially in registers, but do not actually load any
2361 EXTRA is the amount in bytes of extra space to leave next to this arg.
2362 This is ignored if an argument block has already been allocated.
2364 On a machine that lacks real push insns, ARGS_ADDR is the address of
2365 the bottom of the argument block for this call. We use indexing off there
2366 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2367 argument block has not been preallocated.
2369 ARGS_SO_FAR is the size of args previously pushed for this call. */
2372 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2373 args_addr
, args_so_far
)
2375 enum machine_mode mode
;
2386 enum direction stack_direction
2387 #ifdef STACK_GROWS_DOWNWARD
2393 /* Decide where to pad the argument: `downward' for below,
2394 `upward' for above, or `none' for don't pad it.
2395 Default is below for small data on big-endian machines; else above. */
2396 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2398 /* If we're placing part of X into a register and part of X onto
2399 the stack, indicate that the entire register is clobbered to
2400 keep flow from thinking the unused part of the register is live. */
2401 if (partial
> 0 && reg
!= 0)
2402 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, reg
));
2404 /* Invert direction if stack is post-update. */
2405 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2406 if (where_pad
!= none
)
2407 where_pad
= (where_pad
== downward
? upward
: downward
);
2409 xinner
= x
= protect_from_queue (x
, 0);
2411 if (mode
== BLKmode
)
2413 /* Copy a block into the stack, entirely or partially. */
2416 int used
= partial
* UNITS_PER_WORD
;
2417 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2425 /* USED is now the # of bytes we need not copy to the stack
2426 because registers will take care of them. */
2429 xinner
= change_address (xinner
, BLKmode
,
2430 plus_constant (XEXP (xinner
, 0), used
));
2432 /* If the partial register-part of the arg counts in its stack size,
2433 skip the part of stack space corresponding to the registers.
2434 Otherwise, start copying to the beginning of the stack space,
2435 by setting SKIP to 0. */
2436 #ifndef REG_PARM_STACK_SPACE
2442 #ifdef PUSH_ROUNDING
2443 /* Do it with several push insns if that doesn't take lots of insns
2444 and if there is no difficulty with push insns that skip bytes
2445 on the stack for alignment purposes. */
2447 && GET_CODE (size
) == CONST_INT
2449 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2451 /* Here we avoid the case of a structure whose weak alignment
2452 forces many pushes of a small amount of data,
2453 and such small pushes do rounding that causes trouble. */
2454 && ((! SLOW_UNALIGNED_ACCESS
)
2455 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2456 || PUSH_ROUNDING (align
) == align
)
2457 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2459 /* Push padding now if padding above and stack grows down,
2460 or if padding below and stack grows up.
2461 But if space already allocated, this has already been done. */
2462 if (extra
&& args_addr
== 0
2463 && where_pad
!= none
&& where_pad
!= stack_direction
)
2464 anti_adjust_stack (GEN_INT (extra
));
2466 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
2467 INTVAL (size
) - used
, align
);
2470 #endif /* PUSH_ROUNDING */
2472 /* Otherwise make space on the stack and copy the data
2473 to the address of that space. */
2475 /* Deduct words put into registers from the size we must copy. */
2478 if (GET_CODE (size
) == CONST_INT
)
2479 size
= GEN_INT (INTVAL (size
) - used
);
2481 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2482 GEN_INT (used
), NULL_RTX
, 0,
2486 /* Get the address of the stack space.
2487 In this case, we do not deal with EXTRA separately.
2488 A single stack adjust will do. */
2491 temp
= push_block (size
, extra
, where_pad
== downward
);
2494 else if (GET_CODE (args_so_far
) == CONST_INT
)
2495 temp
= memory_address (BLKmode
,
2496 plus_constant (args_addr
,
2497 skip
+ INTVAL (args_so_far
)));
2499 temp
= memory_address (BLKmode
,
2500 plus_constant (gen_rtx (PLUS
, Pmode
,
2501 args_addr
, args_so_far
),
2504 /* TEMP is the address of the block. Copy the data there. */
2505 if (GET_CODE (size
) == CONST_INT
2506 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2509 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
2510 INTVAL (size
), align
);
2513 /* Try the most limited insn first, because there's no point
2514 including more than one in the machine description unless
2515 the more limited one has some advantage. */
2516 #ifdef HAVE_movstrqi
2518 && GET_CODE (size
) == CONST_INT
2519 && ((unsigned) INTVAL (size
)
2520 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
2522 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
2523 xinner
, size
, GEN_INT (align
));
2531 #ifdef HAVE_movstrhi
2533 && GET_CODE (size
) == CONST_INT
2534 && ((unsigned) INTVAL (size
)
2535 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
2537 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
2538 xinner
, size
, GEN_INT (align
));
2546 #ifdef HAVE_movstrsi
2549 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
2550 xinner
, size
, GEN_INT (align
));
2558 #ifdef HAVE_movstrdi
2561 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
2562 xinner
, size
, GEN_INT (align
));
2571 #ifndef ACCUMULATE_OUTGOING_ARGS
2572 /* If the source is referenced relative to the stack pointer,
2573 copy it to another register to stabilize it. We do not need
2574 to do this if we know that we won't be changing sp. */
2576 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2577 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2578 temp
= copy_to_reg (temp
);
2581 /* Make inhibit_defer_pop nonzero around the library call
2582 to force it to pop the bcopy-arguments right away. */
2584 #ifdef TARGET_MEM_FUNCTIONS
2585 emit_library_call (memcpy_libfunc
, 0,
2586 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2587 convert_to_mode (TYPE_MODE (sizetype
),
2588 size
, TREE_UNSIGNED (sizetype
)),
2589 TYPE_MODE (sizetype
));
2591 emit_library_call (bcopy_libfunc
, 0,
2592 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2593 convert_to_mode (TYPE_MODE (integer_type_node
),
2595 TREE_UNSIGNED (integer_type_node
)),
2596 TYPE_MODE (integer_type_node
));
2601 else if (partial
> 0)
2603 /* Scalar partly in registers. */
2605 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2608 /* # words of start of argument
2609 that we must make space for but need not store. */
2610 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2611 int args_offset
= INTVAL (args_so_far
);
2614 /* Push padding now if padding above and stack grows down,
2615 or if padding below and stack grows up.
2616 But if space already allocated, this has already been done. */
2617 if (extra
&& args_addr
== 0
2618 && where_pad
!= none
&& where_pad
!= stack_direction
)
2619 anti_adjust_stack (GEN_INT (extra
));
2621 /* If we make space by pushing it, we might as well push
2622 the real data. Otherwise, we can leave OFFSET nonzero
2623 and leave the space uninitialized. */
2627 /* Now NOT_STACK gets the number of words that we don't need to
2628 allocate on the stack. */
2629 not_stack
= partial
- offset
;
2631 /* If the partial register-part of the arg counts in its stack size,
2632 skip the part of stack space corresponding to the registers.
2633 Otherwise, start copying to the beginning of the stack space,
2634 by setting SKIP to 0. */
2635 #ifndef REG_PARM_STACK_SPACE
2641 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2642 x
= validize_mem (force_const_mem (mode
, x
));
2644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2645 SUBREGs of such registers are not allowed. */
2646 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2647 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2648 x
= copy_to_reg (x
);
2650 /* Loop over all the words allocated on the stack for this arg. */
2651 /* We can do it by words, because any scalar bigger than a word
2652 has a size a multiple of a word. */
2653 #ifndef PUSH_ARGS_REVERSED
2654 for (i
= not_stack
; i
< size
; i
++)
2656 for (i
= size
- 1; i
>= not_stack
; i
--)
2658 if (i
>= not_stack
+ offset
)
2659 emit_push_insn (operand_subword_force (x
, i
, mode
),
2660 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2662 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2663 * UNITS_PER_WORD
)));
2669 /* Push padding now if padding above and stack grows down,
2670 or if padding below and stack grows up.
2671 But if space already allocated, this has already been done. */
2672 if (extra
&& args_addr
== 0
2673 && where_pad
!= none
&& where_pad
!= stack_direction
)
2674 anti_adjust_stack (GEN_INT (extra
));
2676 #ifdef PUSH_ROUNDING
2678 addr
= gen_push_operand ();
2681 if (GET_CODE (args_so_far
) == CONST_INT
)
2683 = memory_address (mode
,
2684 plus_constant (args_addr
, INTVAL (args_so_far
)));
2686 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2689 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2693 /* If part should go in registers, copy that part
2694 into the appropriate registers. Do this now, at the end,
2695 since mem-to-mem copies above may do function calls. */
2696 if (partial
> 0 && reg
!= 0)
2698 /* Handle calls that pass values in multiple non-contiguous locations.
2699 The Irix 6 ABI has examples of this. */
2700 if (GET_CODE (reg
) == PARALLEL
)
2701 emit_group_load (reg
, x
);
2703 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2706 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2707 anti_adjust_stack (GEN_INT (extra
));
2710 /* Expand an assignment that stores the value of FROM into TO.
2711 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2712 (This may contain a QUEUED rtx;
2713 if the value is constant, this rtx is a constant.)
2714 Otherwise, the returned value is NULL_RTX.
2716 SUGGEST_REG is no longer actually used.
2717 It used to mean, copy the value through a register
2718 and return that register, if that is possible.
2719 We now use WANT_VALUE to decide whether to do this. */
2722 expand_assignment (to
, from
, want_value
, suggest_reg
)
2727 register rtx to_rtx
= 0;
2730 /* Don't crash if the lhs of the assignment was erroneous. */
2732 if (TREE_CODE (to
) == ERROR_MARK
)
2734 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2735 return want_value
? result
: NULL_RTX
;
2738 if (output_bytecode
)
2740 tree dest_innermost
;
2742 bc_expand_expr (from
);
2743 bc_emit_instruction (duplicate
);
2745 dest_innermost
= bc_expand_address (to
);
2747 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2748 take care of it here. */
2750 bc_store_memory (TREE_TYPE (to
), dest_innermost
);
2754 /* Assignment of a structure component needs special treatment
2755 if the structure component's rtx is not simply a MEM.
2756 Assignment of an array element at a constant index, and assignment of
2757 an array element in an unaligned packed structure field, has the same
2760 if (TREE_CODE (to
) == COMPONENT_REF
2761 || TREE_CODE (to
) == BIT_FIELD_REF
2762 || (TREE_CODE (to
) == ARRAY_REF
2763 && ((TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2764 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
)
2765 || (SLOW_UNALIGNED_ACCESS
&& get_inner_unaligned_p (to
)))))
2767 enum machine_mode mode1
;
2777 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
,
2778 &mode1
, &unsignedp
, &volatilep
);
2780 /* If we are going to use store_bit_field and extract_bit_field,
2781 make sure to_rtx will be safe for multiple use. */
2783 if (mode1
== VOIDmode
&& want_value
)
2784 tem
= stabilize_reference (tem
);
2786 alignment
= TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
;
2787 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2790 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2792 if (GET_CODE (to_rtx
) != MEM
)
2794 to_rtx
= change_address (to_rtx
, VOIDmode
,
2795 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
2796 force_reg (ptr_mode
, offset_rtx
)));
2797 /* If we have a variable offset, the known alignment
2798 is only that of the innermost structure containing the field.
2799 (Actually, we could sometimes do better by using the
2800 align of an element of the innermost array, but no need.) */
2801 if (TREE_CODE (to
) == COMPONENT_REF
2802 || TREE_CODE (to
) == BIT_FIELD_REF
)
2804 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to
, 0))) / BITS_PER_UNIT
;
2808 if (GET_CODE (to_rtx
) == MEM
)
2810 /* When the offset is zero, to_rtx is the address of the
2811 structure we are storing into, and hence may be shared.
2812 We must make a new MEM before setting the volatile bit. */
2814 to_rtx
= change_address (to_rtx
, VOIDmode
, XEXP (to_rtx
, 0));
2815 MEM_VOLATILE_P (to_rtx
) = 1;
2817 #if 0 /* This was turned off because, when a field is volatile
2818 in an object which is not volatile, the object may be in a register,
2819 and then we would abort over here. */
2825 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2827 /* Spurious cast makes HPUX compiler happy. */
2828 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2831 /* Required alignment of containing datum. */
2833 int_size_in_bytes (TREE_TYPE (tem
)));
2834 preserve_temp_slots (result
);
2838 /* If the value is meaningful, convert RESULT to the proper mode.
2839 Otherwise, return nothing. */
2840 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
2841 TYPE_MODE (TREE_TYPE (from
)),
2843 TREE_UNSIGNED (TREE_TYPE (to
)))
2847 /* If the rhs is a function call and its value is not an aggregate,
2848 call the function before we start to compute the lhs.
2849 This is needed for correct code for cases such as
2850 val = setjmp (buf) on machines where reference to val
2851 requires loading up part of an address in a separate insn.
2853 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2854 a promoted variable where the zero- or sign- extension needs to be done.
2855 Handling this in the normal way is safe because no computation is done
2857 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
2858 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
2859 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
2864 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2866 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2868 /* Handle calls that return values in multiple non-contiguous locations.
2869 The Irix 6 ABI has examples of this. */
2870 if (GET_CODE (to_rtx
) == PARALLEL
)
2871 emit_group_load (to_rtx
, value
);
2872 else if (GET_MODE (to_rtx
) == BLKmode
)
2873 emit_block_move (to_rtx
, value
, expr_size (from
),
2874 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
2876 emit_move_insn (to_rtx
, value
);
2877 preserve_temp_slots (to_rtx
);
2880 return want_value
? to_rtx
: NULL_RTX
;
2883 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2884 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2887 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2889 /* Don't move directly into a return register. */
2890 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
2895 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
2896 emit_move_insn (to_rtx
, temp
);
2897 preserve_temp_slots (to_rtx
);
2900 return want_value
? to_rtx
: NULL_RTX
;
2903 /* In case we are returning the contents of an object which overlaps
2904 the place the value is being stored, use a safe function when copying
2905 a value through a pointer into a structure value return block. */
2906 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2907 && current_function_returns_struct
2908 && !current_function_returns_pcc_struct
)
2913 size
= expr_size (from
);
2914 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2916 #ifdef TARGET_MEM_FUNCTIONS
2917 emit_library_call (memcpy_libfunc
, 0,
2918 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2919 XEXP (from_rtx
, 0), Pmode
,
2920 convert_to_mode (TYPE_MODE (sizetype
),
2921 size
, TREE_UNSIGNED (sizetype
)),
2922 TYPE_MODE (sizetype
));
2924 emit_library_call (bcopy_libfunc
, 0,
2925 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2926 XEXP (to_rtx
, 0), Pmode
,
2927 convert_to_mode (TYPE_MODE (integer_type_node
),
2928 size
, TREE_UNSIGNED (integer_type_node
)),
2929 TYPE_MODE (integer_type_node
));
2932 preserve_temp_slots (to_rtx
);
2935 return want_value
? to_rtx
: NULL_RTX
;
2938 /* Compute FROM and store the value in the rtx we got. */
2941 result
= store_expr (from
, to_rtx
, want_value
);
2942 preserve_temp_slots (result
);
2945 return want_value
? result
: NULL_RTX
;
2948 /* Generate code for computing expression EXP,
2949 and storing the value into TARGET.
2950 TARGET may contain a QUEUED rtx.
2952 If WANT_VALUE is nonzero, return a copy of the value
2953 not in TARGET, so that we can be sure to use the proper
2954 value in a containing expression even if TARGET has something
2955 else stored in it. If possible, we copy the value through a pseudo
2956 and return that pseudo. Or, if the value is constant, we try to
2957 return the constant. In some cases, we return a pseudo
2958 copied *from* TARGET.
2960 If the mode is BLKmode then we may return TARGET itself.
2961 It turns out that in BLKmode it doesn't cause a problem.
2962 because C has no operators that could combine two different
2963 assignments into the same BLKmode object with different values
2964 with no sequence point. Will other languages need this to
2967 If WANT_VALUE is 0, we return NULL, to make sure
2968 to catch quickly any cases where the caller uses the value
2969 and fails to set WANT_VALUE. */
2972 store_expr (exp
, target
, want_value
)
2974 register rtx target
;
2978 int dont_return_target
= 0;
2980 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2982 /* Perform first part of compound expression, then assign from second
2984 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
2986 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
2988 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
2990 /* For conditional expression, get safe form of the target. Then
2991 test the condition, doing the appropriate assignment on either
2992 side. This avoids the creation of unnecessary temporaries.
2993 For non-BLKmode, it is more efficient not to do this. */
2995 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
2996 rtx flag
= NULL_RTX
;
2997 tree left_cleanups
= NULL_TREE
;
2998 tree right_cleanups
= NULL_TREE
;
2999 tree old_cleanups
= cleanups_this_call
;
3001 /* Used to save a pointer to the place to put the setting of
3002 the flag that indicates if this side of the conditional was
3003 taken. We backpatch the code, if we find out later that we
3004 have any conditional cleanups that need to be performed. */
3005 rtx dest_right_flag
= NULL_RTX
;
3006 rtx dest_left_flag
= NULL_RTX
;
3009 target
= protect_from_queue (target
, 1);
3011 do_pending_stack_adjust ();
3013 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3014 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3015 dest_left_flag
= get_last_insn ();
3016 /* Handle conditional cleanups, if any. */
3017 left_cleanups
= defer_cleanups_to (old_cleanups
);
3019 emit_jump_insn (gen_jump (lab2
));
3022 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3023 dest_right_flag
= get_last_insn ();
3024 /* Handle conditional cleanups, if any. */
3025 right_cleanups
= defer_cleanups_to (old_cleanups
);
3030 /* Add back in any conditional cleanups. */
3031 if (left_cleanups
|| right_cleanups
)
3037 /* Now that we know that a flag is needed, go back and add in the
3038 setting of the flag. */
3040 flag
= gen_reg_rtx (word_mode
);
3042 /* Do the left side flag. */
3043 last
= get_last_insn ();
3044 /* Flag left cleanups as needed. */
3045 emit_move_insn (flag
, const1_rtx
);
3046 /* ??? deprecated, use sequences instead. */
3047 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_left_flag
);
3049 /* Do the right side flag. */
3050 last
= get_last_insn ();
3051 /* Flag left cleanups as needed. */
3052 emit_move_insn (flag
, const0_rtx
);
3053 /* ??? deprecated, use sequences instead. */
3054 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_right_flag
);
3056 /* All cleanups must be on the function_obstack. */
3057 push_obstacks_nochange ();
3058 resume_temporary_allocation ();
3060 /* convert flag, which is an rtx, into a tree. */
3061 cond
= make_node (RTL_EXPR
);
3062 TREE_TYPE (cond
) = integer_type_node
;
3063 RTL_EXPR_RTL (cond
) = flag
;
3064 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
3065 cond
= save_expr (cond
);
3067 if (! left_cleanups
)
3068 left_cleanups
= integer_zero_node
;
3069 if (! right_cleanups
)
3070 right_cleanups
= integer_zero_node
;
3071 new_cleanups
= build (COND_EXPR
, void_type_node
,
3072 truthvalue_conversion (cond
),
3073 left_cleanups
, right_cleanups
);
3074 new_cleanups
= fold (new_cleanups
);
3078 /* Now add in the conditionalized cleanups. */
3080 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
3081 expand_eh_region_start ();
3083 return want_value
? target
: NULL_RTX
;
3085 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3086 && GET_MODE (target
) != BLKmode
)
3087 /* If target is in memory and caller wants value in a register instead,
3088 arrange that. Pass TARGET as target for expand_expr so that,
3089 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3090 We know expand_expr will not use the target in that case.
3091 Don't do this if TARGET is volatile because we are supposed
3092 to write it and then read it. */
3094 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3095 GET_MODE (target
), 0);
3096 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3097 temp
= copy_to_reg (temp
);
3098 dont_return_target
= 1;
3100 else if (queued_subexp_p (target
))
3101 /* If target contains a postincrement, let's not risk
3102 using it as the place to generate the rhs. */
3104 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3106 /* Expand EXP into a new pseudo. */
3107 temp
= gen_reg_rtx (GET_MODE (target
));
3108 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3111 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3113 /* If target is volatile, ANSI requires accessing the value
3114 *from* the target, if it is accessed. So make that happen.
3115 In no case return the target itself. */
3116 if (! MEM_VOLATILE_P (target
) && want_value
)
3117 dont_return_target
= 1;
3119 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3120 /* If this is an scalar in a register that is stored in a wider mode
3121 than the declared mode, compute the result into its declared mode
3122 and then convert to the wider mode. Our value is the computed
3125 /* If we don't want a value, we can do the conversion inside EXP,
3126 which will often result in some optimizations. Do the conversion
3127 in two steps: first change the signedness, if needed, then
3131 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3132 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3135 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3139 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3140 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3144 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3146 /* If TEMP is a volatile MEM and we want a result value, make
3147 the access now so it gets done only once. Likewise if
3148 it contains TARGET. */
3149 if (GET_CODE (temp
) == MEM
&& want_value
3150 && (MEM_VOLATILE_P (temp
)
3151 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3152 temp
= copy_to_reg (temp
);
3154 /* If TEMP is a VOIDmode constant, use convert_modes to make
3155 sure that we properly convert it. */
3156 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3157 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3158 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3159 SUBREG_PROMOTED_UNSIGNED_P (target
));
3161 convert_move (SUBREG_REG (target
), temp
,
3162 SUBREG_PROMOTED_UNSIGNED_P (target
));
3163 return want_value
? temp
: NULL_RTX
;
3167 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3168 /* Return TARGET if it's a specified hardware register.
3169 If TARGET is a volatile mem ref, either return TARGET
3170 or return a reg copied *from* TARGET; ANSI requires this.
3172 Otherwise, if TEMP is not TARGET, return TEMP
3173 if it is constant (for efficiency),
3174 or if we really want the correct value. */
3175 if (!(target
&& GET_CODE (target
) == REG
3176 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3177 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3179 && (CONSTANT_P (temp
) || want_value
))
3180 dont_return_target
= 1;
3183 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3184 the same as that of TARGET, adjust the constant. This is needed, for
3185 example, in case it is a CONST_DOUBLE and we want only a word-sized
3187 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3188 && TREE_CODE (exp
) != ERROR_MARK
3189 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3190 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3191 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3193 /* If value was not generated in the target, store it there.
3194 Convert the value to TARGET's type first if nec. */
3196 if (temp
!= target
&& TREE_CODE (exp
) != ERROR_MARK
)
3198 target
= protect_from_queue (target
, 1);
3199 if (GET_MODE (temp
) != GET_MODE (target
)
3200 && GET_MODE (temp
) != VOIDmode
)
3202 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3203 if (dont_return_target
)
3205 /* In this case, we will return TEMP,
3206 so make sure it has the proper mode.
3207 But don't forget to store the value into TARGET. */
3208 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3209 emit_move_insn (target
, temp
);
3212 convert_move (target
, temp
, unsignedp
);
3215 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3217 /* Handle copying a string constant into an array.
3218 The string constant may be shorter than the array.
3219 So copy just the string's actual length, and clear the rest. */
3223 /* Get the size of the data type of the string,
3224 which is actually the size of the target. */
3225 size
= expr_size (exp
);
3226 if (GET_CODE (size
) == CONST_INT
3227 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3228 emit_block_move (target
, temp
, size
,
3229 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3232 /* Compute the size of the data to copy from the string. */
3234 = size_binop (MIN_EXPR
,
3235 make_tree (sizetype
, size
),
3237 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3238 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3242 /* Copy that much. */
3243 emit_block_move (target
, temp
, copy_size_rtx
,
3244 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3246 /* Figure out how much is left in TARGET that we have to clear.
3247 Do all calculations in ptr_mode. */
3249 addr
= XEXP (target
, 0);
3250 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3252 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3254 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3255 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3259 addr
= force_reg (ptr_mode
, addr
);
3260 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3261 copy_size_rtx
, NULL_RTX
, 0,
3264 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3265 copy_size_rtx
, NULL_RTX
, 0,
3268 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3269 GET_MODE (size
), 0, 0);
3270 label
= gen_label_rtx ();
3271 emit_jump_insn (gen_blt (label
));
3274 if (size
!= const0_rtx
)
3276 #ifdef TARGET_MEM_FUNCTIONS
3277 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3279 const0_rtx
, TYPE_MODE (integer_type_node
),
3280 convert_to_mode (TYPE_MODE (sizetype
),
3282 TREE_UNSIGNED (sizetype
)),
3283 TYPE_MODE (sizetype
));
3285 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3287 convert_to_mode (TYPE_MODE (integer_type_node
),
3289 TREE_UNSIGNED (integer_type_node
)),
3290 TYPE_MODE (integer_type_node
));
3298 /* Handle calls that return values in multiple non-contiguous locations.
3299 The Irix 6 ABI has examples of this. */
3300 else if (GET_CODE (target
) == PARALLEL
)
3301 emit_group_load (target
, temp
);
3302 else if (GET_MODE (temp
) == BLKmode
)
3303 emit_block_move (target
, temp
, expr_size (exp
),
3304 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3306 emit_move_insn (target
, temp
);
3309 /* If we don't want a value, return NULL_RTX. */
3313 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3314 ??? The latter test doesn't seem to make sense. */
3315 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3318 /* Return TARGET itself if it is a hard register. */
3319 else if (want_value
&& GET_MODE (target
) != BLKmode
3320 && ! (GET_CODE (target
) == REG
3321 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3322 return copy_to_reg (target
);
3328 /* Return 1 if EXP just contains zeros. */
3336 switch (TREE_CODE (exp
))
3340 case NON_LVALUE_EXPR
:
3341 return is_zeros_p (TREE_OPERAND (exp
, 0));
3344 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3348 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3351 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp
), dconst0
);
3354 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3355 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3356 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3357 if (! is_zeros_p (TREE_VALUE (elt
)))
3366 /* Return 1 if EXP contains mostly (3/4) zeros. */
3369 mostly_zeros_p (exp
)
3372 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3374 int elts
= 0, zeros
= 0;
3375 tree elt
= CONSTRUCTOR_ELTS (exp
);
3376 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3378 /* If there are no ranges of true bits, it is all zero. */
3379 return elt
== NULL_TREE
;
3381 for (; elt
; elt
= TREE_CHAIN (elt
))
3383 /* We do not handle the case where the index is a RANGE_EXPR,
3384 so the statistic will be somewhat inaccurate.
3385 We do make a more accurate count in store_constructor itself,
3386 so since this function is only used for nested array elements,
3387 this should be close enough. */
3388 if (mostly_zeros_p (TREE_VALUE (elt
)))
3393 return 4 * zeros
>= 3 * elts
;
3396 return is_zeros_p (exp
);
3399 /* Helper function for store_constructor.
3400 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3401 TYPE is the type of the CONSTRUCTOR, not the element type.
3402 CLEARED is as for store_constructor.
3404 This provides a recursive shortcut back to store_constructor when it isn't
3405 necessary to go through store_field. This is so that we can pass through
3406 the cleared field to let store_constructor know that we may not have to
3407 clear a substructure if the outer structure has already been cleared. */
3410 store_constructor_field (target
, bitsize
, bitpos
,
3411 mode
, exp
, type
, cleared
)
3413 int bitsize
, bitpos
;
3414 enum machine_mode mode
;
3418 if (TREE_CODE (exp
) == CONSTRUCTOR
3419 && bitpos
% BITS_PER_UNIT
== 0
3420 /* If we have a non-zero bitpos for a register target, then we just
3421 let store_field do the bitfield handling. This is unlikely to
3422 generate unnecessary clear instructions anyways. */
3423 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3426 target
= change_address (target
, VOIDmode
,
3427 plus_constant (XEXP (target
, 0),
3428 bitpos
/ BITS_PER_UNIT
));
3429 store_constructor (exp
, target
, cleared
);
3432 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3433 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3434 int_size_in_bytes (type
));
3437 /* Store the value of constructor EXP into the rtx TARGET.
3438 TARGET is either a REG or a MEM.
3439 CLEARED is true if TARGET is known to have been zero'd. */
3442 store_constructor (exp
, target
, cleared
)
3447 tree type
= TREE_TYPE (exp
);
3449 /* We know our target cannot conflict, since safe_from_p has been called. */
3451 /* Don't try copying piece by piece into a hard register
3452 since that is vulnerable to being clobbered by EXP.
3453 Instead, construct in a pseudo register and then copy it all. */
3454 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3456 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3457 store_constructor (exp
, temp
, 0);
3458 emit_move_insn (target
, temp
);
3463 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3464 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3468 /* Inform later passes that the whole union value is dead. */
3469 if (TREE_CODE (type
) == UNION_TYPE
3470 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3471 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3473 /* If we are building a static constructor into a register,
3474 set the initial value as zero so we can fold the value into
3475 a constant. But if more than one register is involved,
3476 this probably loses. */
3477 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3478 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3481 emit_move_insn (target
, const0_rtx
);
3486 /* If the constructor has fewer fields than the structure
3487 or if we are initializing the structure to mostly zeros,
3488 clear the whole structure first. */
3489 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3490 != list_length (TYPE_FIELDS (type
)))
3491 || mostly_zeros_p (exp
))
3494 clear_storage (target
, expr_size (exp
),
3495 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3500 /* Inform later passes that the old value is dead. */
3501 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3503 /* Store each element of the constructor into
3504 the corresponding field of TARGET. */
3506 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3508 register tree field
= TREE_PURPOSE (elt
);
3509 register enum machine_mode mode
;
3513 tree pos
, constant
= 0, offset
= 0;
3514 rtx to_rtx
= target
;
3516 /* Just ignore missing fields.
3517 We cleared the whole structure, above,
3518 if any fields are missing. */
3522 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3525 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3526 unsignedp
= TREE_UNSIGNED (field
);
3527 mode
= DECL_MODE (field
);
3528 if (DECL_BIT_FIELD (field
))
3531 pos
= DECL_FIELD_BITPOS (field
);
3532 if (TREE_CODE (pos
) == INTEGER_CST
)
3534 else if (TREE_CODE (pos
) == PLUS_EXPR
3535 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3536 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3541 bitpos
= TREE_INT_CST_LOW (constant
);
3547 if (contains_placeholder_p (offset
))
3548 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3551 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3552 size_int (BITS_PER_UNIT
));
3554 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3555 if (GET_CODE (to_rtx
) != MEM
)
3559 = change_address (to_rtx
, VOIDmode
,
3560 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
3561 force_reg (ptr_mode
, offset_rtx
)));
3563 if (TREE_READONLY (field
))
3565 if (GET_CODE (to_rtx
) == MEM
)
3566 to_rtx
= change_address (to_rtx
, GET_MODE (to_rtx
),
3568 RTX_UNCHANGING_P (to_rtx
) = 1;
3571 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3572 mode
, TREE_VALUE (elt
), type
, cleared
);
3575 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3580 tree domain
= TYPE_DOMAIN (type
);
3581 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3582 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3583 tree elttype
= TREE_TYPE (type
);
3585 /* If the constructor has fewer elements than the array,
3586 clear the whole array first. Similarly if this this is
3587 static constructor of a non-BLKmode object. */
3588 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3592 HOST_WIDE_INT count
= 0, zero_count
= 0;
3594 /* This loop is a more accurate version of the loop in
3595 mostly_zeros_p (it handles RANGE_EXPR in an index).
3596 It is also needed to check for missing elements. */
3597 for (elt
= CONSTRUCTOR_ELTS (exp
);
3599 elt
= TREE_CHAIN (elt
), i
++)
3601 tree index
= TREE_PURPOSE (elt
);
3602 HOST_WIDE_INT this_node_count
;
3603 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3605 tree lo_index
= TREE_OPERAND (index
, 0);
3606 tree hi_index
= TREE_OPERAND (index
, 1);
3607 if (TREE_CODE (lo_index
) != INTEGER_CST
3608 || TREE_CODE (hi_index
) != INTEGER_CST
)
3613 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3614 - TREE_INT_CST_LOW (lo_index
) + 1;
3617 this_node_count
= 1;
3618 count
+= this_node_count
;
3619 if (mostly_zeros_p (TREE_VALUE (elt
)))
3620 zero_count
+= this_node_count
;
3622 /* Clear the entire array first if there are any missing elements,
3623 or if the incidence of zero elements is >= 75%. */
3624 if (count
< maxelt
- minelt
+ 1
3625 || 4 * zero_count
>= 3 * count
)
3631 clear_storage (target
, expr_size (exp
),
3632 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3636 /* Inform later passes that the old value is dead. */
3637 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3639 /* Store each element of the constructor into
3640 the corresponding element of TARGET, determined
3641 by counting the elements. */
3642 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3644 elt
= TREE_CHAIN (elt
), i
++)
3646 register enum machine_mode mode
;
3650 tree value
= TREE_VALUE (elt
);
3651 tree index
= TREE_PURPOSE (elt
);
3652 rtx xtarget
= target
;
3654 if (cleared
&& is_zeros_p (value
))
3657 mode
= TYPE_MODE (elttype
);
3658 bitsize
= GET_MODE_BITSIZE (mode
);
3659 unsignedp
= TREE_UNSIGNED (elttype
);
3661 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3663 tree lo_index
= TREE_OPERAND (index
, 0);
3664 tree hi_index
= TREE_OPERAND (index
, 1);
3665 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3666 struct nesting
*loop
;
3667 HOST_WIDE_INT lo
, hi
, count
;
3670 /* If the range is constant and "small", unroll the loop. */
3671 if (TREE_CODE (lo_index
) == INTEGER_CST
3672 && TREE_CODE (hi_index
) == INTEGER_CST
3673 && (lo
= TREE_INT_CST_LOW (lo_index
),
3674 hi
= TREE_INT_CST_LOW (hi_index
),
3675 count
= hi
- lo
+ 1,
3676 (GET_CODE (target
) != MEM
3678 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3679 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3682 lo
-= minelt
; hi
-= minelt
;
3683 for (; lo
<= hi
; lo
++)
3685 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3686 store_constructor_field (target
, bitsize
, bitpos
,
3687 mode
, value
, type
, cleared
);
3692 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3693 loop_top
= gen_label_rtx ();
3694 loop_end
= gen_label_rtx ();
3696 unsignedp
= TREE_UNSIGNED (domain
);
3698 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3700 DECL_RTL (index
) = index_r
3701 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3704 if (TREE_CODE (value
) == SAVE_EXPR
3705 && SAVE_EXPR_RTL (value
) == 0)
3707 /* Make sure value gets expanded once before the
3709 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3712 store_expr (lo_index
, index_r
, 0);
3713 loop
= expand_start_loop (0);
3715 /* Assign value to element index. */
3716 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3717 size_int (BITS_PER_UNIT
));
3718 position
= size_binop (MULT_EXPR
,
3719 size_binop (MINUS_EXPR
, index
,
3720 TYPE_MIN_VALUE (domain
)),
3722 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3723 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3724 xtarget
= change_address (target
, mode
, addr
);
3725 if (TREE_CODE (value
) == CONSTRUCTOR
)
3726 store_constructor (value
, xtarget
, cleared
);
3728 store_expr (value
, xtarget
, 0);
3730 expand_exit_loop_if_false (loop
,
3731 build (LT_EXPR
, integer_type_node
,
3734 expand_increment (build (PREINCREMENT_EXPR
,
3736 index
, integer_one_node
), 0, 0);
3738 emit_label (loop_end
);
3740 /* Needed by stupid register allocation. to extend the
3741 lifetime of pseudo-regs used by target past the end
3743 emit_insn (gen_rtx (USE
, GET_MODE (target
), target
));
3746 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3747 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3753 index
= size_int (i
);
3756 index
= size_binop (MINUS_EXPR
, index
,
3757 TYPE_MIN_VALUE (domain
));
3758 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3759 size_int (BITS_PER_UNIT
));
3760 position
= size_binop (MULT_EXPR
, index
, position
);
3761 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3762 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3763 xtarget
= change_address (target
, mode
, addr
);
3764 store_expr (value
, xtarget
, 0);
3769 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3770 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3772 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3773 store_constructor_field (target
, bitsize
, bitpos
,
3774 mode
, value
, type
, cleared
);
3778 /* set constructor assignments */
3779 else if (TREE_CODE (type
) == SET_TYPE
)
3781 tree elt
= CONSTRUCTOR_ELTS (exp
);
3782 rtx xtarget
= XEXP (target
, 0);
3783 int set_word_size
= TYPE_ALIGN (type
);
3784 int nbytes
= int_size_in_bytes (type
), nbits
;
3785 tree domain
= TYPE_DOMAIN (type
);
3786 tree domain_min
, domain_max
, bitlength
;
3788 /* The default implementation strategy is to extract the constant
3789 parts of the constructor, use that to initialize the target,
3790 and then "or" in whatever non-constant ranges we need in addition.
3792 If a large set is all zero or all ones, it is
3793 probably better to set it using memset (if available) or bzero.
3794 Also, if a large set has just a single range, it may also be
3795 better to first clear all the first clear the set (using
3796 bzero/memset), and set the bits we want. */
3798 /* Check for all zeros. */
3799 if (elt
== NULL_TREE
)
3802 clear_storage (target
, expr_size (exp
),
3803 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3807 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3808 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3809 bitlength
= size_binop (PLUS_EXPR
,
3810 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3813 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
3815 nbits
= TREE_INT_CST_LOW (bitlength
);
3817 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3818 are "complicated" (more than one range), initialize (the
3819 constant parts) by copying from a constant. */
3820 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
3821 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
3823 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3824 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3825 char *bit_buffer
= (char *) alloca (nbits
);
3826 HOST_WIDE_INT word
= 0;
3829 int offset
= 0; /* In bytes from beginning of set. */
3830 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
3833 if (bit_buffer
[ibit
])
3835 if (BYTES_BIG_ENDIAN
)
3836 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3838 word
|= 1 << bit_pos
;
3841 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
3843 if (word
!= 0 || ! cleared
)
3845 rtx datum
= GEN_INT (word
);
3847 /* The assumption here is that it is safe to use
3848 XEXP if the set is multi-word, but not if
3849 it's single-word. */
3850 if (GET_CODE (target
) == MEM
)
3852 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
3853 to_rtx
= change_address (target
, mode
, to_rtx
);
3855 else if (offset
== 0)
3859 emit_move_insn (to_rtx
, datum
);
3865 offset
+= set_word_size
/ BITS_PER_UNIT
;
3871 /* Don't bother clearing storage if the set is all ones. */
3872 if (TREE_CHAIN (elt
) != NULL_TREE
3873 || (TREE_PURPOSE (elt
) == NULL_TREE
3875 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
3876 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
3877 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
3878 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
3880 clear_storage (target
, expr_size (exp
),
3881 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3884 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
3886 /* start of range of element or NULL */
3887 tree startbit
= TREE_PURPOSE (elt
);
3888 /* end of range of element, or element value */
3889 tree endbit
= TREE_VALUE (elt
);
3890 HOST_WIDE_INT startb
, endb
;
3891 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
3893 bitlength_rtx
= expand_expr (bitlength
,
3894 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
3896 /* handle non-range tuple element like [ expr ] */
3897 if (startbit
== NULL_TREE
)
3899 startbit
= save_expr (endbit
);
3902 startbit
= convert (sizetype
, startbit
);
3903 endbit
= convert (sizetype
, endbit
);
3904 if (! integer_zerop (domain_min
))
3906 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
3907 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
3909 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
3910 EXPAND_CONST_ADDRESS
);
3911 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
3912 EXPAND_CONST_ADDRESS
);
3916 targetx
= assign_stack_temp (GET_MODE (target
),
3917 GET_MODE_SIZE (GET_MODE (target
)),
3919 emit_move_insn (targetx
, target
);
3921 else if (GET_CODE (target
) == MEM
)
3926 #ifdef TARGET_MEM_FUNCTIONS
3927 /* Optimization: If startbit and endbit are
3928 constants divisible by BITS_PER_UNIT,
3929 call memset instead. */
3930 if (TREE_CODE (startbit
) == INTEGER_CST
3931 && TREE_CODE (endbit
) == INTEGER_CST
3932 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
3933 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
3935 emit_library_call (memset_libfunc
, 0,
3937 plus_constant (XEXP (targetx
, 0),
3938 startb
/ BITS_PER_UNIT
),
3940 constm1_rtx
, TYPE_MODE (integer_type_node
),
3941 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
3942 TYPE_MODE (sizetype
));
3947 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__setbits"),
3948 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
3949 bitlength_rtx
, TYPE_MODE (sizetype
),
3950 startbit_rtx
, TYPE_MODE (sizetype
),
3951 endbit_rtx
, TYPE_MODE (sizetype
));
3954 emit_move_insn (target
, targetx
);
3962 /* Store the value of EXP (an expression tree)
3963 into a subfield of TARGET which has mode MODE and occupies
3964 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3965 If MODE is VOIDmode, it means that we are storing into a bit-field.
3967 If VALUE_MODE is VOIDmode, return nothing in particular.
3968 UNSIGNEDP is not used in this case.
3970 Otherwise, return an rtx for the value stored. This rtx
3971 has mode VALUE_MODE if that is convenient to do.
3972 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3974 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3975 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3978 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
3979 unsignedp
, align
, total_size
)
3981 int bitsize
, bitpos
;
3982 enum machine_mode mode
;
3984 enum machine_mode value_mode
;
3989 HOST_WIDE_INT width_mask
= 0;
3991 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
3992 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
3994 /* If we are storing into an unaligned field of an aligned union that is
3995 in a register, we may have the mode of TARGET being an integer mode but
3996 MODE == BLKmode. In that case, get an aligned object whose size and
3997 alignment are the same as TARGET and store TARGET into it (we can avoid
3998 the store if the field being stored is the entire width of TARGET). Then
3999 call ourselves recursively to store the field into a BLKmode version of
4000 that object. Finally, load from the object into TARGET. This is not
4001 very efficient in general, but should only be slightly more expensive
4002 than the otherwise-required unaligned accesses. Perhaps this can be
4003 cleaned up later. */
4006 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4008 rtx object
= assign_stack_temp (GET_MODE (target
),
4009 GET_MODE_SIZE (GET_MODE (target
)), 0);
4010 rtx blk_object
= copy_rtx (object
);
4012 MEM_IN_STRUCT_P (object
) = 1;
4013 MEM_IN_STRUCT_P (blk_object
) = 1;
4014 PUT_MODE (blk_object
, BLKmode
);
4016 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4017 emit_move_insn (object
, target
);
4019 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4022 /* Even though we aren't returning target, we need to
4023 give it the updated value. */
4024 emit_move_insn (target
, object
);
4029 /* If the structure is in a register or if the component
4030 is a bit field, we cannot use addressing to access it.
4031 Use bit-field techniques or SUBREG to store in it. */
4033 if (mode
== VOIDmode
4034 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
4035 || GET_CODE (target
) == REG
4036 || GET_CODE (target
) == SUBREG
4037 /* If the field isn't aligned enough to store as an ordinary memref,
4038 store it as a bit field. */
4039 || (SLOW_UNALIGNED_ACCESS
4040 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
4041 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
4043 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4045 /* If BITSIZE is narrower than the size of the type of EXP
4046 we will be narrowing TEMP. Normally, what's wanted are the
4047 low-order bits. However, if EXP's type is a record and this is
4048 big-endian machine, we want the upper BITSIZE bits. */
4049 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4050 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4051 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4052 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4053 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4057 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4059 if (mode
!= VOIDmode
&& mode
!= BLKmode
4060 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4061 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4063 /* If the modes of TARGET and TEMP are both BLKmode, both
4064 must be in memory and BITPOS must be aligned on a byte
4065 boundary. If so, we simply do a block copy. */
4066 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4068 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4069 || bitpos
% BITS_PER_UNIT
!= 0)
4072 target
= change_address (target
, VOIDmode
,
4073 plus_constant (XEXP (target
, 0),
4074 bitpos
/ BITS_PER_UNIT
));
4076 emit_block_move (target
, temp
,
4077 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4081 return value_mode
== VOIDmode
? const0_rtx
: target
;
4084 /* Store the value in the bitfield. */
4085 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4086 if (value_mode
!= VOIDmode
)
4088 /* The caller wants an rtx for the value. */
4089 /* If possible, avoid refetching from the bitfield itself. */
4091 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4094 enum machine_mode tmode
;
4097 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4098 tmode
= GET_MODE (temp
);
4099 if (tmode
== VOIDmode
)
4101 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4102 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4103 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4105 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4106 NULL_RTX
, value_mode
, 0, align
,
4113 rtx addr
= XEXP (target
, 0);
4116 /* If a value is wanted, it must be the lhs;
4117 so make the address stable for multiple use. */
4119 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4120 && ! CONSTANT_ADDRESS_P (addr
)
4121 /* A frame-pointer reference is already stable. */
4122 && ! (GET_CODE (addr
) == PLUS
4123 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4124 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4125 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4126 addr
= copy_to_reg (addr
);
4128 /* Now build a reference to just the desired component. */
4130 to_rtx
= change_address (target
, mode
,
4131 plus_constant (addr
, (bitpos
/ BITS_PER_UNIT
)));
4132 MEM_IN_STRUCT_P (to_rtx
) = 1;
4134 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4138 /* Return true if any object containing the innermost array is an unaligned
4139 packed structure field. */
4142 get_inner_unaligned_p (exp
)
4145 int needed_alignment
= TYPE_ALIGN (TREE_TYPE (exp
));
4149 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4151 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4155 else if (TREE_CODE (exp
) != ARRAY_REF
4156 && TREE_CODE (exp
) != NON_LVALUE_EXPR
4157 && ! ((TREE_CODE (exp
) == NOP_EXPR
4158 || TREE_CODE (exp
) == CONVERT_EXPR
)
4159 && (TYPE_MODE (TREE_TYPE (exp
))
4160 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4163 exp
= TREE_OPERAND (exp
, 0);
4169 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4170 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4171 ARRAY_REFs and find the ultimate containing object, which we return.
4173 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4174 bit position, and *PUNSIGNEDP to the signedness of the field.
4175 If the position of the field is variable, we store a tree
4176 giving the variable offset (in units) in *POFFSET.
4177 This offset is in addition to the bit position.
4178 If the position is not variable, we store 0 in *POFFSET.
4180 If any of the extraction expressions is volatile,
4181 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4183 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4184 is a mode that can be used to access the field. In that case, *PBITSIZE
4187 If the field describes a variable-sized object, *PMODE is set to
4188 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4189 this case, but the address of the object can be found. */
4192 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4193 punsignedp
, pvolatilep
)
4198 enum machine_mode
*pmode
;
4202 tree orig_exp
= exp
;
4204 enum machine_mode mode
= VOIDmode
;
4205 tree offset
= integer_zero_node
;
4207 if (TREE_CODE (exp
) == COMPONENT_REF
)
4209 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4210 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4211 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4212 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4214 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4216 size_tree
= TREE_OPERAND (exp
, 1);
4217 *punsignedp
= TREE_UNSIGNED (exp
);
4221 mode
= TYPE_MODE (TREE_TYPE (exp
));
4222 *pbitsize
= GET_MODE_BITSIZE (mode
);
4223 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4228 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4229 mode
= BLKmode
, *pbitsize
= -1;
4231 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4234 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4235 and find the ultimate containing object. */
4241 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4243 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4244 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4245 : TREE_OPERAND (exp
, 2));
4246 tree constant
= integer_zero_node
, var
= pos
;
4248 /* If this field hasn't been filled in yet, don't go
4249 past it. This should only happen when folding expressions
4250 made during type construction. */
4254 /* Assume here that the offset is a multiple of a unit.
4255 If not, there should be an explicitly added constant. */
4256 if (TREE_CODE (pos
) == PLUS_EXPR
4257 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4258 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4259 else if (TREE_CODE (pos
) == INTEGER_CST
)
4260 constant
= pos
, var
= integer_zero_node
;
4262 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4263 offset
= size_binop (PLUS_EXPR
, offset
,
4264 size_binop (EXACT_DIV_EXPR
, var
,
4265 size_int (BITS_PER_UNIT
)));
4268 else if (TREE_CODE (exp
) == ARRAY_REF
)
4270 /* This code is based on the code in case ARRAY_REF in expand_expr
4271 below. We assume here that the size of an array element is
4272 always an integral multiple of BITS_PER_UNIT. */
4274 tree index
= TREE_OPERAND (exp
, 1);
4275 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4277 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4278 tree index_type
= TREE_TYPE (index
);
4280 if (! integer_zerop (low_bound
))
4281 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4283 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4285 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4287 index_type
= TREE_TYPE (index
);
4290 index
= fold (build (MULT_EXPR
, index_type
, index
,
4291 TYPE_SIZE (TREE_TYPE (exp
))));
4293 if (TREE_CODE (index
) == INTEGER_CST
4294 && TREE_INT_CST_HIGH (index
) == 0)
4295 *pbitpos
+= TREE_INT_CST_LOW (index
);
4297 offset
= size_binop (PLUS_EXPR
, offset
,
4298 size_binop (FLOOR_DIV_EXPR
, index
,
4299 size_int (BITS_PER_UNIT
)));
4301 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4302 && ! ((TREE_CODE (exp
) == NOP_EXPR
4303 || TREE_CODE (exp
) == CONVERT_EXPR
)
4304 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4305 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4307 && (TYPE_MODE (TREE_TYPE (exp
))
4308 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4311 /* If any reference in the chain is volatile, the effect is volatile. */
4312 if (TREE_THIS_VOLATILE (exp
))
4314 exp
= TREE_OPERAND (exp
, 0);
4317 if (integer_zerop (offset
))
4320 if (offset
!= 0 && contains_placeholder_p (offset
))
4321 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4328 /* Given an rtx VALUE that may contain additions and multiplications,
4329 return an equivalent value that just refers to a register or memory.
4330 This is done by generating instructions to perform the arithmetic
4331 and returning a pseudo-register containing the value.
4333 The returned value may be a REG, SUBREG, MEM or constant. */
4336 force_operand (value
, target
)
4339 register optab binoptab
= 0;
4340 /* Use a temporary to force order of execution of calls to
4344 /* Use subtarget as the target for operand 0 of a binary operation. */
4345 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4347 if (GET_CODE (value
) == PLUS
)
4348 binoptab
= add_optab
;
4349 else if (GET_CODE (value
) == MINUS
)
4350 binoptab
= sub_optab
;
4351 else if (GET_CODE (value
) == MULT
)
4353 op2
= XEXP (value
, 1);
4354 if (!CONSTANT_P (op2
)
4355 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4357 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4358 return expand_mult (GET_MODE (value
), tmp
,
4359 force_operand (op2
, NULL_RTX
),
4365 op2
= XEXP (value
, 1);
4366 if (!CONSTANT_P (op2
)
4367 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4369 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4371 binoptab
= add_optab
;
4372 op2
= negate_rtx (GET_MODE (value
), op2
);
4375 /* Check for an addition with OP2 a constant integer and our first
4376 operand a PLUS of a virtual register and something else. In that
4377 case, we want to emit the sum of the virtual register and the
4378 constant first and then add the other value. This allows virtual
4379 register instantiation to simply modify the constant rather than
4380 creating another one around this addition. */
4381 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4382 && GET_CODE (XEXP (value
, 0)) == PLUS
4383 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4384 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4385 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4387 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4388 XEXP (XEXP (value
, 0), 0), op2
,
4389 subtarget
, 0, OPTAB_LIB_WIDEN
);
4390 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4391 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4392 target
, 0, OPTAB_LIB_WIDEN
);
4395 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4396 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4397 force_operand (op2
, NULL_RTX
),
4398 target
, 0, OPTAB_LIB_WIDEN
);
4399 /* We give UNSIGNEDP = 0 to expand_binop
4400 because the only operations we are expanding here are signed ones. */
4405 /* Subroutine of expand_expr:
4406 save the non-copied parts (LIST) of an expr (LHS), and return a list
4407 which can restore these values to their previous values,
4408 should something modify their storage. */
4411 save_noncopied_parts (lhs
, list
)
4418 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4419 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4420 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4423 tree part
= TREE_VALUE (tail
);
4424 tree part_type
= TREE_TYPE (part
);
4425 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4426 rtx target
= assign_temp (part_type
, 0, 1, 1);
4427 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4428 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4429 parts
= tree_cons (to_be_saved
,
4430 build (RTL_EXPR
, part_type
, NULL_TREE
,
4433 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4438 /* Subroutine of expand_expr:
4439 record the non-copied parts (LIST) of an expr (LHS), and return a list
4440 which specifies the initial values of these parts. */
4443 init_noncopied_parts (lhs
, list
)
4450 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4451 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4452 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4455 tree part
= TREE_VALUE (tail
);
4456 tree part_type
= TREE_TYPE (part
);
4457 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4458 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4463 /* Subroutine of expand_expr: return nonzero iff there is no way that
4464 EXP can reference X, which is being modified. */
4467 safe_from_p (x
, exp
)
4475 /* If EXP has varying size, we MUST use a target since we currently
4476 have no way of allocating temporaries of variable size
4477 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4478 So we assume here that something at a higher level has prevented a
4479 clash. This is somewhat bogus, but the best we can do. Only
4480 do this when X is BLKmode. */
4481 || (TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4482 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4483 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
4484 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
4485 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
4487 && GET_MODE (x
) == BLKmode
))
4490 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4491 find the underlying pseudo. */
4492 if (GET_CODE (x
) == SUBREG
)
4495 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4499 /* If X is a location in the outgoing argument area, it is always safe. */
4500 if (GET_CODE (x
) == MEM
4501 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4502 || (GET_CODE (XEXP (x
, 0)) == PLUS
4503 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4506 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4509 exp_rtl
= DECL_RTL (exp
);
4516 if (TREE_CODE (exp
) == TREE_LIST
)
4517 return ((TREE_VALUE (exp
) == 0
4518 || safe_from_p (x
, TREE_VALUE (exp
)))
4519 && (TREE_CHAIN (exp
) == 0
4520 || safe_from_p (x
, TREE_CHAIN (exp
))));
4525 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4529 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
4530 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
4534 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4535 the expression. If it is set, we conflict iff we are that rtx or
4536 both are in memory. Otherwise, we check all operands of the
4537 expression recursively. */
4539 switch (TREE_CODE (exp
))
4542 return (staticp (TREE_OPERAND (exp
, 0))
4543 || safe_from_p (x
, TREE_OPERAND (exp
, 0)));
4546 if (GET_CODE (x
) == MEM
)
4551 exp_rtl
= CALL_EXPR_RTL (exp
);
4554 /* Assume that the call will clobber all hard registers and
4556 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4557 || GET_CODE (x
) == MEM
)
4564 /* If a sequence exists, we would have to scan every instruction
4565 in the sequence to see if it was safe. This is probably not
4567 if (RTL_EXPR_SEQUENCE (exp
))
4570 exp_rtl
= RTL_EXPR_RTL (exp
);
4573 case WITH_CLEANUP_EXPR
:
4574 exp_rtl
= RTL_EXPR_RTL (exp
);
4577 case CLEANUP_POINT_EXPR
:
4578 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4581 exp_rtl
= SAVE_EXPR_RTL (exp
);
4585 /* The only operand we look at is operand 1. The rest aren't
4586 part of the expression. */
4587 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
4589 case METHOD_CALL_EXPR
:
4590 /* This takes a rtx argument, but shouldn't appear here. */
4594 /* If we have an rtx, we do not need to scan our operands. */
4598 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
4599 for (i
= 0; i
< nops
; i
++)
4600 if (TREE_OPERAND (exp
, i
) != 0
4601 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
4605 /* If we have an rtl, find any enclosed object. Then see if we conflict
4609 if (GET_CODE (exp_rtl
) == SUBREG
)
4611 exp_rtl
= SUBREG_REG (exp_rtl
);
4612 if (GET_CODE (exp_rtl
) == REG
4613 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
4617 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4618 are memory and EXP is not readonly. */
4619 return ! (rtx_equal_p (x
, exp_rtl
)
4620 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
4621 && ! TREE_READONLY (exp
)));
4624 /* If we reach here, it is safe. */
4628 /* Subroutine of expand_expr: return nonzero iff EXP is an
4629 expression whose type is statically determinable. */
4635 if (TREE_CODE (exp
) == PARM_DECL
4636 || TREE_CODE (exp
) == VAR_DECL
4637 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
4638 || TREE_CODE (exp
) == COMPONENT_REF
4639 || TREE_CODE (exp
) == ARRAY_REF
)
4644 /* Subroutine of expand_expr: return rtx if EXP is a
4645 variable or parameter; else return 0. */
4652 switch (TREE_CODE (exp
))
4656 return DECL_RTL (exp
);
4662 /* expand_expr: generate code for computing expression EXP.
4663 An rtx for the computed value is returned. The value is never null.
4664 In the case of a void EXP, const0_rtx is returned.
4666 The value may be stored in TARGET if TARGET is nonzero.
4667 TARGET is just a suggestion; callers must assume that
4668 the rtx returned may not be the same as TARGET.
4670 If TARGET is CONST0_RTX, it means that the value will be ignored.
4672 If TMODE is not VOIDmode, it suggests generating the
4673 result in mode TMODE. But this is done only when convenient.
4674 Otherwise, TMODE is ignored and the value generated in its natural mode.
4675 TMODE is just a suggestion; callers must assume that
4676 the rtx returned may not have mode TMODE.
4678 Note that TARGET may have neither TMODE nor MODE. In that case, it
4679 probably will not be used.
4681 If MODIFIER is EXPAND_SUM then when EXP is an addition
4682 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4683 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4684 products as above, or REG or MEM, or constant.
4685 Ordinarily in such cases we would output mul or add instructions
4686 and then return a pseudo reg containing the sum.
4688 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4689 it also marks a label as absolutely required (it can't be dead).
4690 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4691 This is used for outputting expressions used in initializers.
4693 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4694 with a constant address even if that address is not normally legitimate.
4695 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4698 expand_expr (exp
, target
, tmode
, modifier
)
4701 enum machine_mode tmode
;
4702 enum expand_modifier modifier
;
4704 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4705 This is static so it will be accessible to our recursive callees. */
4706 static tree placeholder_list
= 0;
4707 register rtx op0
, op1
, temp
;
4708 tree type
= TREE_TYPE (exp
);
4709 int unsignedp
= TREE_UNSIGNED (type
);
4710 register enum machine_mode mode
= TYPE_MODE (type
);
4711 register enum tree_code code
= TREE_CODE (exp
);
4713 /* Use subtarget as the target for operand 0 of a binary operation. */
4714 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4715 rtx original_target
= target
;
4716 /* Maybe defer this until sure not doing bytecode? */
4717 int ignore
= (target
== const0_rtx
4718 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4719 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4720 || code
== COND_EXPR
)
4721 && TREE_CODE (type
) == VOID_TYPE
));
4725 if (output_bytecode
&& modifier
!= EXPAND_INITIALIZER
)
4727 bc_expand_expr (exp
);
4731 /* Don't use hard regs as subtargets, because the combiner
4732 can only handle pseudo regs. */
4733 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4735 /* Avoid subtargets inside loops,
4736 since they hide some invariant expressions. */
4737 if (preserve_subexpressions_p ())
4740 /* If we are going to ignore this result, we need only do something
4741 if there is a side-effect somewhere in the expression. If there
4742 is, short-circuit the most common cases here. Note that we must
4743 not call expand_expr with anything but const0_rtx in case this
4744 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4748 if (! TREE_SIDE_EFFECTS (exp
))
4751 /* Ensure we reference a volatile object even if value is ignored. */
4752 if (TREE_THIS_VOLATILE (exp
)
4753 && TREE_CODE (exp
) != FUNCTION_DECL
4754 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4756 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
4757 if (GET_CODE (temp
) == MEM
)
4758 temp
= copy_to_reg (temp
);
4762 if (TREE_CODE_CLASS (code
) == '1')
4763 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4764 VOIDmode
, modifier
);
4765 else if (TREE_CODE_CLASS (code
) == '2'
4766 || TREE_CODE_CLASS (code
) == '<')
4768 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
4769 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
4772 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4773 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4774 /* If the second operand has no side effects, just evaluate
4776 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4777 VOIDmode
, modifier
);
4782 /* If will do cse, generate all results into pseudo registers
4783 since 1) that allows cse to find more things
4784 and 2) otherwise cse could produce an insn the machine
4787 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4788 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4795 tree function
= decl_function_context (exp
);
4796 /* Handle using a label in a containing function. */
4797 if (function
!= current_function_decl
&& function
!= 0)
4799 struct function
*p
= find_function_data (function
);
4800 /* Allocate in the memory associated with the function
4801 that the label is in. */
4802 push_obstacks (p
->function_obstack
,
4803 p
->function_maybepermanent_obstack
);
4805 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4806 label_rtx (exp
), p
->forced_labels
);
4809 else if (modifier
== EXPAND_INITIALIZER
)
4810 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4811 label_rtx (exp
), forced_labels
);
4812 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
4813 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
4814 if (function
!= current_function_decl
&& function
!= 0)
4815 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
4820 if (DECL_RTL (exp
) == 0)
4822 error_with_decl (exp
, "prior parameter's size depends on `%s'");
4823 return CONST0_RTX (mode
);
4826 /* ... fall through ... */
4829 /* If a static var's type was incomplete when the decl was written,
4830 but the type is complete now, lay out the decl now. */
4831 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4832 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
4834 push_obstacks_nochange ();
4835 end_temporary_allocation ();
4836 layout_decl (exp
, 0);
4837 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
4841 /* ... fall through ... */
4845 if (DECL_RTL (exp
) == 0)
4848 /* Ensure variable marked as used even if it doesn't go through
4849 a parser. If it hasn't be used yet, write out an external
4851 if (! TREE_USED (exp
))
4853 assemble_external (exp
);
4854 TREE_USED (exp
) = 1;
4857 /* Show we haven't gotten RTL for this yet. */
4860 /* Handle variables inherited from containing functions. */
4861 context
= decl_function_context (exp
);
4863 /* We treat inline_function_decl as an alias for the current function
4864 because that is the inline function whose vars, types, etc.
4865 are being merged into the current function.
4866 See expand_inline_function. */
4868 if (context
!= 0 && context
!= current_function_decl
4869 && context
!= inline_function_decl
4870 /* If var is static, we don't need a static chain to access it. */
4871 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
4872 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
4876 /* Mark as non-local and addressable. */
4877 DECL_NONLOCAL (exp
) = 1;
4878 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
4880 mark_addressable (exp
);
4881 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
4883 addr
= XEXP (DECL_RTL (exp
), 0);
4884 if (GET_CODE (addr
) == MEM
)
4885 addr
= gen_rtx (MEM
, Pmode
,
4886 fix_lexical_addr (XEXP (addr
, 0), exp
));
4888 addr
= fix_lexical_addr (addr
, exp
);
4889 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
4892 /* This is the case of an array whose size is to be determined
4893 from its initializer, while the initializer is still being parsed.
4896 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4897 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
4898 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
4899 XEXP (DECL_RTL (exp
), 0));
4901 /* If DECL_RTL is memory, we are in the normal case and either
4902 the address is not valid or it is not a register and -fforce-addr
4903 is specified, get the address into a register. */
4905 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4906 && modifier
!= EXPAND_CONST_ADDRESS
4907 && modifier
!= EXPAND_SUM
4908 && modifier
!= EXPAND_INITIALIZER
4909 && (! memory_address_p (DECL_MODE (exp
),
4910 XEXP (DECL_RTL (exp
), 0))
4912 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
4913 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
4914 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
4916 /* If we got something, return it. But first, set the alignment
4917 the address is a register. */
4920 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
4921 mark_reg_pointer (XEXP (temp
, 0),
4922 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
4927 /* If the mode of DECL_RTL does not match that of the decl, it
4928 must be a promoted value. We return a SUBREG of the wanted mode,
4929 but mark it so that we know that it was already extended. */
4931 if (GET_CODE (DECL_RTL (exp
)) == REG
4932 && GET_MODE (DECL_RTL (exp
)) != mode
)
4934 /* Get the signedness used for this variable. Ensure we get the
4935 same mode we got when the variable was declared. */
4936 if (GET_MODE (DECL_RTL (exp
))
4937 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
4940 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
4941 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4942 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4946 return DECL_RTL (exp
);
4949 return immed_double_const (TREE_INT_CST_LOW (exp
),
4950 TREE_INT_CST_HIGH (exp
),
4954 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
4957 /* If optimized, generate immediate CONST_DOUBLE
4958 which will be turned into memory by reload if necessary.
4960 We used to force a register so that loop.c could see it. But
4961 this does not allow gen_* patterns to perform optimizations with
4962 the constants. It also produces two insns in cases like "x = 1.0;".
4963 On most machines, floating-point constants are not permitted in
4964 many insns, so we'd end up copying it to a register in any case.
4966 Now, we do the copying in expand_binop, if appropriate. */
4967 return immed_real_const (exp
);
4971 if (! TREE_CST_RTL (exp
))
4972 output_constant_def (exp
);
4974 /* TREE_CST_RTL probably contains a constant address.
4975 On RISC machines where a constant address isn't valid,
4976 make some insns to get that address into a register. */
4977 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
4978 && modifier
!= EXPAND_CONST_ADDRESS
4979 && modifier
!= EXPAND_INITIALIZER
4980 && modifier
!= EXPAND_SUM
4981 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
4983 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
4984 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
4985 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
4986 return TREE_CST_RTL (exp
);
4989 context
= decl_function_context (exp
);
4991 /* We treat inline_function_decl as an alias for the current function
4992 because that is the inline function whose vars, types, etc.
4993 are being merged into the current function.
4994 See expand_inline_function. */
4995 if (context
== current_function_decl
|| context
== inline_function_decl
)
4998 /* If this is non-local, handle it. */
5001 temp
= SAVE_EXPR_RTL (exp
);
5002 if (temp
&& GET_CODE (temp
) == REG
)
5004 put_var_into_stack (exp
);
5005 temp
= SAVE_EXPR_RTL (exp
);
5007 if (temp
== 0 || GET_CODE (temp
) != MEM
)
5009 return change_address (temp
, mode
,
5010 fix_lexical_addr (XEXP (temp
, 0), exp
));
5012 if (SAVE_EXPR_RTL (exp
) == 0)
5014 if (mode
== VOIDmode
)
5017 temp
= assign_temp (type
, 0, 0, 0);
5019 SAVE_EXPR_RTL (exp
) = temp
;
5020 if (!optimize
&& GET_CODE (temp
) == REG
)
5021 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
5024 /* If the mode of TEMP does not match that of the expression, it
5025 must be a promoted value. We pass store_expr a SUBREG of the
5026 wanted mode but mark it so that we know that it was already
5027 extended. Note that `unsignedp' was modified above in
5030 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5032 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5033 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5034 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5037 if (temp
== const0_rtx
)
5038 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
5040 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5043 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5044 must be a promoted value. We return a SUBREG of the wanted mode,
5045 but mark it so that we know that it was already extended. */
5047 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5048 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5050 /* Compute the signedness and make the proper SUBREG. */
5051 promote_mode (type
, mode
, &unsignedp
, 0);
5052 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5053 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5054 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5058 return SAVE_EXPR_RTL (exp
);
5063 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5064 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5068 case PLACEHOLDER_EXPR
:
5069 /* If there is an object on the head of the placeholder list,
5070 see if some object in it's references is of type TYPE. For
5071 further information, see tree.def. */
5072 if (placeholder_list
)
5075 tree old_list
= placeholder_list
;
5077 for (object
= TREE_PURPOSE (placeholder_list
);
5078 (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
5079 != TYPE_MAIN_VARIANT (type
))
5080 && (TREE_CODE_CLASS (TREE_CODE (object
)) == 'r'
5081 || TREE_CODE_CLASS (TREE_CODE (object
)) == '1'
5082 || TREE_CODE_CLASS (TREE_CODE (object
)) == '2'
5083 || TREE_CODE_CLASS (TREE_CODE (object
)) == 'e');
5084 object
= TREE_OPERAND (object
, 0))
5088 && (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
5089 == TYPE_MAIN_VARIANT (type
)))
5091 /* Expand this object skipping the list entries before
5092 it was found in case it is also a PLACEHOLDER_EXPR.
5093 In that case, we want to translate it using subsequent
5095 placeholder_list
= TREE_CHAIN (placeholder_list
);
5096 temp
= expand_expr (object
, original_target
, tmode
, modifier
);
5097 placeholder_list
= old_list
;
5102 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5105 case WITH_RECORD_EXPR
:
5106 /* Put the object on the placeholder list, expand our first operand,
5107 and pop the list. */
5108 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5110 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5112 placeholder_list
= TREE_CHAIN (placeholder_list
);
5116 expand_exit_loop_if_false (NULL_PTR
,
5117 invert_truthvalue (TREE_OPERAND (exp
, 0)));
5122 expand_start_loop (1);
5123 expand_expr_stmt (TREE_OPERAND (exp
, 0));
5131 tree vars
= TREE_OPERAND (exp
, 0);
5132 int vars_need_expansion
= 0;
5134 /* Need to open a binding contour here because
5135 if there are any cleanups they most be contained here. */
5136 expand_start_bindings (0);
5138 /* Mark the corresponding BLOCK for output in its proper place. */
5139 if (TREE_OPERAND (exp
, 2) != 0
5140 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
5141 insert_block (TREE_OPERAND (exp
, 2));
5143 /* If VARS have not yet been expanded, expand them now. */
5146 if (DECL_RTL (vars
) == 0)
5148 vars_need_expansion
= 1;
5151 expand_decl_init (vars
);
5152 vars
= TREE_CHAIN (vars
);
5155 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
5157 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
5163 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
5165 emit_insns (RTL_EXPR_SEQUENCE (exp
));
5166 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
5167 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
5168 free_temps_for_rtl_expr (exp
);
5169 return RTL_EXPR_RTL (exp
);
5172 /* If we don't need the result, just ensure we evaluate any
5177 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5178 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
5182 /* All elts simple constants => refer to a constant in memory. But
5183 if this is a non-BLKmode mode, let it store a field at a time
5184 since that should make a CONST_INT or CONST_DOUBLE when we
5185 fold. Likewise, if we have a target we can use, it is best to
5186 store directly into the target unless the type is large enough
5187 that memcpy will be used. If we are making an initializer and
5188 all operands are constant, put it in memory as well. */
5189 else if ((TREE_STATIC (exp
)
5190 && ((mode
== BLKmode
5191 && ! (target
!= 0 && safe_from_p (target
, exp
)))
5192 || TREE_ADDRESSABLE (exp
)
5193 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5194 && (move_by_pieces_ninsns
5195 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
5196 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
5198 && ! mostly_zeros_p (exp
))))
5199 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
5201 rtx constructor
= output_constant_def (exp
);
5202 if (modifier
!= EXPAND_CONST_ADDRESS
5203 && modifier
!= EXPAND_INITIALIZER
5204 && modifier
!= EXPAND_SUM
5205 && (! memory_address_p (GET_MODE (constructor
),
5206 XEXP (constructor
, 0))
5208 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5209 constructor
= change_address (constructor
, VOIDmode
,
5210 XEXP (constructor
, 0));
5216 if (target
== 0 || ! safe_from_p (target
, exp
))
5218 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5219 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5221 target
= assign_temp (type
, 0, 1, 1);
5224 if (TREE_READONLY (exp
))
5226 if (GET_CODE (target
) == MEM
)
5227 target
= change_address (target
, GET_MODE (target
),
5229 RTX_UNCHANGING_P (target
) = 1;
5232 store_constructor (exp
, target
, 0);
5238 tree exp1
= TREE_OPERAND (exp
, 0);
5241 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5242 op0
= memory_address (mode
, op0
);
5244 temp
= gen_rtx (MEM
, mode
, op0
);
5245 /* If address was computed by addition,
5246 mark this as an element of an aggregate. */
5247 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5248 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
5249 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
5250 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5251 || (TREE_CODE (exp1
) == ADDR_EXPR
5252 && (exp2
= TREE_OPERAND (exp1
, 0))
5253 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
5254 MEM_IN_STRUCT_P (temp
) = 1;
5255 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5257 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5258 here, because, in C and C++, the fact that a location is accessed
5259 through a pointer to const does not mean that the value there can
5260 never change. Languages where it can never change should
5261 also set TREE_STATIC. */
5262 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5267 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5271 tree array
= TREE_OPERAND (exp
, 0);
5272 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5273 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5274 tree index
= TREE_OPERAND (exp
, 1);
5275 tree index_type
= TREE_TYPE (index
);
5278 if (TREE_CODE (low_bound
) != INTEGER_CST
5279 && contains_placeholder_p (low_bound
))
5280 low_bound
= build (WITH_RECORD_EXPR
, sizetype
, low_bound
, exp
);
5282 /* Optimize the special-case of a zero lower bound.
5284 We convert the low_bound to sizetype to avoid some problems
5285 with constant folding. (E.g. suppose the lower bound is 1,
5286 and its mode is QI. Without the conversion, (ARRAY
5287 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5288 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5290 But sizetype isn't quite right either (especially if
5291 the lowbound is negative). FIXME */
5293 if (! integer_zerop (low_bound
))
5294 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5295 convert (sizetype
, low_bound
)));
5297 if ((TREE_CODE (index
) != INTEGER_CST
5298 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5299 && (! SLOW_UNALIGNED_ACCESS
|| ! get_inner_unaligned_p (exp
)))
5301 /* Nonconstant array index or nonconstant element size, and
5302 not an array in an unaligned (packed) structure field.
5303 Generate the tree for *(&array+index) and expand that,
5304 except do it in a language-independent way
5305 and don't complain about non-lvalue arrays.
5306 `mark_addressable' should already have been called
5307 for any array for which this case will be reached. */
5309 /* Don't forget the const or volatile flag from the array
5311 tree variant_type
= build_type_variant (type
,
5312 TREE_READONLY (exp
),
5313 TREE_THIS_VOLATILE (exp
));
5314 tree array_adr
= build1 (ADDR_EXPR
,
5315 build_pointer_type (variant_type
), array
);
5317 tree size
= size_in_bytes (type
);
5319 /* Convert the integer argument to a type the same size as sizetype
5320 so the multiply won't overflow spuriously. */
5321 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
5322 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
5325 if (TREE_CODE (size
) != INTEGER_CST
5326 && contains_placeholder_p (size
))
5327 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, exp
);
5329 /* Don't think the address has side effects
5330 just because the array does.
5331 (In some cases the address might have side effects,
5332 and we fail to record that fact here. However, it should not
5333 matter, since expand_expr should not care.) */
5334 TREE_SIDE_EFFECTS (array_adr
) = 0;
5338 (INDIRECT_REF
, type
,
5339 fold (build (PLUS_EXPR
,
5340 TYPE_POINTER_TO (variant_type
),
5345 TYPE_POINTER_TO (variant_type
),
5346 fold (build (MULT_EXPR
, TREE_TYPE (index
),
5348 convert (TREE_TYPE (index
),
5351 /* Volatility, etc., of new expression is same as old
5353 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
5354 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
5355 TREE_READONLY (elt
) = TREE_READONLY (exp
);
5357 return expand_expr (elt
, target
, tmode
, modifier
);
5360 /* Fold an expression like: "foo"[2].
5361 This is not done in fold so it won't happen inside &.
5362 Don't fold if this is for wide characters since it's too
5363 difficult to do correctly and this is a very rare case. */
5365 if (TREE_CODE (array
) == STRING_CST
5366 && TREE_CODE (index
) == INTEGER_CST
5367 && !TREE_INT_CST_HIGH (index
)
5368 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5369 && GET_MODE_CLASS (mode
) == MODE_INT
5370 && GET_MODE_SIZE (mode
) == 1)
5371 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5373 /* If this is a constant index into a constant array,
5374 just get the value from the array. Handle both the cases when
5375 we have an explicit constructor and when our operand is a variable
5376 that was declared const. */
5378 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5380 if (TREE_CODE (index
) == INTEGER_CST
5381 && TREE_INT_CST_HIGH (index
) == 0)
5383 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5385 i
= TREE_INT_CST_LOW (index
);
5387 elem
= TREE_CHAIN (elem
);
5389 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5394 else if (optimize
>= 1
5395 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5396 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5397 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5399 if (TREE_CODE (index
) == INTEGER_CST
5400 && TREE_INT_CST_HIGH (index
) == 0)
5402 tree init
= DECL_INITIAL (array
);
5404 i
= TREE_INT_CST_LOW (index
);
5405 if (TREE_CODE (init
) == CONSTRUCTOR
)
5407 tree elem
= CONSTRUCTOR_ELTS (init
);
5410 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5411 elem
= TREE_CHAIN (elem
);
5413 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5416 else if (TREE_CODE (init
) == STRING_CST
5417 && i
< TREE_STRING_LENGTH (init
))
5418 return GEN_INT (TREE_STRING_POINTER (init
)[i
]);
5423 /* Treat array-ref with constant index as a component-ref. */
5427 /* If the operand is a CONSTRUCTOR, we can just extract the
5428 appropriate field if it is present. Don't do this if we have
5429 already written the data since we want to refer to that copy
5430 and varasm.c assumes that's what we'll do. */
5431 if (code
!= ARRAY_REF
5432 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5433 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5437 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5438 elt
= TREE_CHAIN (elt
))
5439 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
5440 return expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5444 enum machine_mode mode1
;
5449 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
5450 &mode1
, &unsignedp
, &volatilep
);
5453 /* If we got back the original object, something is wrong. Perhaps
5454 we are evaluating an expression too early. In any event, don't
5455 infinitely recurse. */
5459 /* If TEM's type is a union of variable size, pass TARGET to the inner
5460 computation, since it will need a temporary and TARGET is known
5461 to have to do. This occurs in unchecked conversion in Ada. */
5463 op0
= expand_expr (tem
,
5464 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
5465 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
5467 ? target
: NULL_RTX
),
5469 modifier
== EXPAND_INITIALIZER
? modifier
: 0);
5471 /* If this is a constant, put it into a register if it is a
5472 legitimate constant and memory if it isn't. */
5473 if (CONSTANT_P (op0
))
5475 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
5476 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
5477 op0
= force_reg (mode
, op0
);
5479 op0
= validize_mem (force_const_mem (mode
, op0
));
5482 alignment
= TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
;
5485 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5487 if (GET_CODE (op0
) != MEM
)
5489 op0
= change_address (op0
, VOIDmode
,
5490 gen_rtx (PLUS
, ptr_mode
, XEXP (op0
, 0),
5491 force_reg (ptr_mode
, offset_rtx
)));
5492 /* If we have a variable offset, the known alignment
5493 is only that of the innermost structure containing the field.
5494 (Actually, we could sometimes do better by using the
5495 size of an element of the innermost array, but no need.) */
5496 if (TREE_CODE (exp
) == COMPONENT_REF
5497 || TREE_CODE (exp
) == BIT_FIELD_REF
)
5498 alignment
= (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5502 /* Don't forget about volatility even if this is a bitfield. */
5503 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
5505 op0
= copy_rtx (op0
);
5506 MEM_VOLATILE_P (op0
) = 1;
5509 /* In cases where an aligned union has an unaligned object
5510 as a field, we might be extracting a BLKmode value from
5511 an integer-mode (e.g., SImode) object. Handle this case
5512 by doing the extract into an object as wide as the field
5513 (which we know to be the width of a basic mode), then
5514 storing into memory, and changing the mode to BLKmode.
5515 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5516 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5517 if (mode1
== VOIDmode
5518 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
5519 || (modifier
!= EXPAND_CONST_ADDRESS
5520 && modifier
!= EXPAND_INITIALIZER
5521 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
])
5522 /* If the field isn't aligned enough to fetch as a memref,
5523 fetch it as a bit field. */
5524 || (SLOW_UNALIGNED_ACCESS
5525 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
5526 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
5528 enum machine_mode ext_mode
= mode
;
5530 if (ext_mode
== BLKmode
)
5531 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
5533 if (ext_mode
== BLKmode
)
5535 /* In this case, BITPOS must start at a byte boundary and
5536 TARGET, if specified, must be a MEM. */
5537 if (GET_CODE (op0
) != MEM
5538 || (target
!= 0 && GET_CODE (target
) != MEM
)
5539 || bitpos
% BITS_PER_UNIT
!= 0)
5542 op0
= change_address (op0
, VOIDmode
,
5543 plus_constant (XEXP (op0
, 0),
5544 bitpos
/ BITS_PER_UNIT
));
5546 target
= assign_temp (type
, 0, 1, 1);
5548 emit_block_move (target
, op0
,
5549 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5556 op0
= validize_mem (op0
);
5558 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
5559 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5561 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
5562 unsignedp
, target
, ext_mode
, ext_mode
,
5564 int_size_in_bytes (TREE_TYPE (tem
)));
5566 /* If the result is a record type and BITSIZE is narrower than
5567 the mode of OP0, an integral mode, and this is a big endian
5568 machine, we must put the field into the high-order bits. */
5569 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
5570 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
5571 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
5572 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
5573 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
5577 if (mode
== BLKmode
)
5579 rtx
new = assign_stack_temp (ext_mode
,
5580 bitsize
/ BITS_PER_UNIT
, 0);
5582 emit_move_insn (new, op0
);
5583 op0
= copy_rtx (new);
5584 PUT_MODE (op0
, BLKmode
);
5585 MEM_IN_STRUCT_P (op0
) = 1;
5591 /* If the result is BLKmode, use that to access the object
5593 if (mode
== BLKmode
)
5596 /* Get a reference to just this component. */
5597 if (modifier
== EXPAND_CONST_ADDRESS
5598 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5599 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
5600 (bitpos
/ BITS_PER_UNIT
)));
5602 op0
= change_address (op0
, mode1
,
5603 plus_constant (XEXP (op0
, 0),
5604 (bitpos
/ BITS_PER_UNIT
)));
5605 if (GET_CODE (XEXP (op0
, 0)) == REG
)
5606 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5608 MEM_IN_STRUCT_P (op0
) = 1;
5609 MEM_VOLATILE_P (op0
) |= volatilep
;
5610 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
)
5613 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5614 convert_move (target
, op0
, unsignedp
);
5618 /* Intended for a reference to a buffer of a file-object in Pascal.
5619 But it's not certain that a special tree code will really be
5620 necessary for these. INDIRECT_REF might work for them. */
5626 /* Pascal set IN expression.
5629 rlo = set_low - (set_low%bits_per_word);
5630 the_word = set [ (index - rlo)/bits_per_word ];
5631 bit_index = index % bits_per_word;
5632 bitmask = 1 << bit_index;
5633 return !!(the_word & bitmask); */
5635 tree set
= TREE_OPERAND (exp
, 0);
5636 tree index
= TREE_OPERAND (exp
, 1);
5637 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
5638 tree set_type
= TREE_TYPE (set
);
5639 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
5640 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
5641 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
5642 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
5643 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
5644 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
5645 rtx setaddr
= XEXP (setval
, 0);
5646 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
5648 rtx diff
, quo
, rem
, addr
, bit
, result
;
5650 preexpand_calls (exp
);
5652 /* If domain is empty, answer is no. Likewise if index is constant
5653 and out of bounds. */
5654 if ((TREE_CODE (set_high_bound
) == INTEGER_CST
5655 && TREE_CODE (set_low_bound
) == INTEGER_CST
5656 && tree_int_cst_lt (set_high_bound
, set_low_bound
)
5657 || (TREE_CODE (index
) == INTEGER_CST
5658 && TREE_CODE (set_low_bound
) == INTEGER_CST
5659 && tree_int_cst_lt (index
, set_low_bound
))
5660 || (TREE_CODE (set_high_bound
) == INTEGER_CST
5661 && TREE_CODE (index
) == INTEGER_CST
5662 && tree_int_cst_lt (set_high_bound
, index
))))
5666 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5668 /* If we get here, we have to generate the code for both cases
5669 (in range and out of range). */
5671 op0
= gen_label_rtx ();
5672 op1
= gen_label_rtx ();
5674 if (! (GET_CODE (index_val
) == CONST_INT
5675 && GET_CODE (lo_r
) == CONST_INT
))
5677 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
5678 GET_MODE (index_val
), iunsignedp
, 0);
5679 emit_jump_insn (gen_blt (op1
));
5682 if (! (GET_CODE (index_val
) == CONST_INT
5683 && GET_CODE (hi_r
) == CONST_INT
))
5685 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5686 GET_MODE (index_val
), iunsignedp
, 0);
5687 emit_jump_insn (gen_bgt (op1
));
5690 /* Calculate the element number of bit zero in the first word
5692 if (GET_CODE (lo_r
) == CONST_INT
)
5693 rlow
= GEN_INT (INTVAL (lo_r
)
5694 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5696 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5697 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5698 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5700 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5701 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5703 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5704 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5705 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5706 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5708 addr
= memory_address (byte_mode
,
5709 expand_binop (index_mode
, add_optab
, diff
,
5710 setaddr
, NULL_RTX
, iunsignedp
,
5713 /* Extract the bit we want to examine */
5714 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5715 gen_rtx (MEM
, byte_mode
, addr
),
5716 make_tree (TREE_TYPE (index
), rem
),
5718 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
5719 GET_MODE (target
) == byte_mode
? target
: 0,
5720 1, OPTAB_LIB_WIDEN
);
5722 if (result
!= target
)
5723 convert_move (target
, result
, 1);
5725 /* Output the code to handle the out-of-range case. */
5728 emit_move_insn (target
, const0_rtx
);
5733 case WITH_CLEANUP_EXPR
:
5734 if (RTL_EXPR_RTL (exp
) == 0)
5737 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5739 = tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2), cleanups_this_call
);
5740 /* That's it for this cleanup. */
5741 TREE_OPERAND (exp
, 2) = 0;
5742 expand_eh_region_start ();
5744 return RTL_EXPR_RTL (exp
);
5746 case CLEANUP_POINT_EXPR
:
5748 extern int temp_slot_level
;
5749 tree old_cleanups
= cleanups_this_call
;
5750 int old_temp_level
= target_temp_slot_level
;
5752 target_temp_slot_level
= temp_slot_level
;
5753 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5754 /* If we're going to use this value, load it up now. */
5756 op0
= force_not_mem (op0
);
5757 expand_cleanups_to (old_cleanups
);
5758 preserve_temp_slots (op0
);
5761 target_temp_slot_level
= old_temp_level
;
5766 /* Check for a built-in function. */
5767 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5768 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5770 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5771 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
5773 /* If this call was expanded already by preexpand_calls,
5774 just return the result we got. */
5775 if (CALL_EXPR_RTL (exp
) != 0)
5776 return CALL_EXPR_RTL (exp
);
5778 return expand_call (exp
, target
, ignore
);
5780 case NON_LVALUE_EXPR
:
5783 case REFERENCE_EXPR
:
5784 if (TREE_CODE (type
) == UNION_TYPE
)
5786 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5789 if (mode
!= BLKmode
)
5790 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5792 target
= assign_temp (type
, 0, 1, 1);
5795 if (GET_CODE (target
) == MEM
)
5796 /* Store data into beginning of memory target. */
5797 store_expr (TREE_OPERAND (exp
, 0),
5798 change_address (target
, TYPE_MODE (valtype
), 0), 0);
5800 else if (GET_CODE (target
) == REG
)
5801 /* Store this field into a union of the proper type. */
5802 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
5803 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
5805 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5809 /* Return the entire union. */
5813 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5815 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
5818 /* If the signedness of the conversion differs and OP0 is
5819 a promoted SUBREG, clear that indication since we now
5820 have to do the proper extension. */
5821 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
5822 && GET_CODE (op0
) == SUBREG
)
5823 SUBREG_PROMOTED_VAR_P (op0
) = 0;
5828 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
5829 if (GET_MODE (op0
) == mode
)
5832 /* If OP0 is a constant, just convert it into the proper mode. */
5833 if (CONSTANT_P (op0
))
5835 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5836 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5838 if (modifier
== EXPAND_INITIALIZER
)
5839 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
5843 convert_to_mode (mode
, op0
,
5844 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5846 convert_move (target
, op0
,
5847 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5851 /* We come here from MINUS_EXPR when the second operand is a
5854 this_optab
= add_optab
;
5856 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5857 something else, make sure we add the register to the constant and
5858 then to the other thing. This case can occur during strength
5859 reduction and doing it this way will produce better code if the
5860 frame pointer or argument pointer is eliminated.
5862 fold-const.c will ensure that the constant is always in the inner
5863 PLUS_EXPR, so the only case we need to do anything about is if
5864 sp, ap, or fp is our second argument, in which case we must swap
5865 the innermost first argument and our second argument. */
5867 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5868 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
5869 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
5870 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
5871 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
5872 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
5874 tree t
= TREE_OPERAND (exp
, 1);
5876 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5877 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
5880 /* If the result is to be ptr_mode and we are adding an integer to
5881 something, we might be forming a constant. So try to use
5882 plus_constant. If it produces a sum and we can't accept it,
5883 use force_operand. This allows P = &ARR[const] to generate
5884 efficient code on machines where a SYMBOL_REF is not a valid
5887 If this is an EXPAND_SUM call, always return the sum. */
5888 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
5889 || mode
== ptr_mode
)
5891 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
5892 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5893 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
5895 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
5897 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
5898 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5899 op1
= force_operand (op1
, target
);
5903 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5904 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
5905 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
5907 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
5909 if (! CONSTANT_P (op0
))
5911 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5912 VOIDmode
, modifier
);
5913 /* Don't go to both_summands if modifier
5914 says it's not right to return a PLUS. */
5915 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5919 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
5920 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5921 op0
= force_operand (op0
, target
);
5926 /* No sense saving up arithmetic to be done
5927 if it's all in the wrong mode to form part of an address.
5928 And force_operand won't know whether to sign-extend or
5930 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5931 || mode
!= ptr_mode
)
5934 preexpand_calls (exp
);
5935 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5938 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
5939 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
5942 /* Make sure any term that's a sum with a constant comes last. */
5943 if (GET_CODE (op0
) == PLUS
5944 && CONSTANT_P (XEXP (op0
, 1)))
5950 /* If adding to a sum including a constant,
5951 associate it to put the constant outside. */
5952 if (GET_CODE (op1
) == PLUS
5953 && CONSTANT_P (XEXP (op1
, 1)))
5955 rtx constant_term
= const0_rtx
;
5957 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
5960 /* Ensure that MULT comes first if there is one. */
5961 else if (GET_CODE (op0
) == MULT
)
5962 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
5964 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
5966 /* Let's also eliminate constants from op0 if possible. */
5967 op0
= eliminate_constant_term (op0
, &constant_term
);
5969 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5970 their sum should be a constant. Form it into OP1, since the
5971 result we want will then be OP0 + OP1. */
5973 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
5978 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
5981 /* Put a constant term last and put a multiplication first. */
5982 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
5983 temp
= op1
, op1
= op0
, op0
= temp
;
5985 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
5986 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
5989 /* For initializers, we are allowed to return a MINUS of two
5990 symbolic constants. Here we handle all cases when both operands
5992 /* Handle difference of two symbolic constants,
5993 for the sake of an initializer. */
5994 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5995 && really_constant_p (TREE_OPERAND (exp
, 0))
5996 && really_constant_p (TREE_OPERAND (exp
, 1)))
5998 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
5999 VOIDmode
, modifier
);
6000 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6001 VOIDmode
, modifier
);
6003 /* If the last operand is a CONST_INT, use plus_constant of
6004 the negated constant. Else make the MINUS. */
6005 if (GET_CODE (op1
) == CONST_INT
)
6006 return plus_constant (op0
, - INTVAL (op1
));
6008 return gen_rtx (MINUS
, mode
, op0
, op1
);
6010 /* Convert A - const to A + (-const). */
6011 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6013 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6014 TREE_OPERAND (exp
, 1)));
6016 /* Deal with the case where we can't negate the constant
6018 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6020 tree newtype
= signed_type (type
);
6021 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6022 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
6023 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
6025 if (! TREE_OVERFLOW (newneg
))
6026 return expand_expr (convert (type
,
6027 build (PLUS_EXPR
, newtype
,
6029 target
, tmode
, modifier
);
6033 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
6037 this_optab
= sub_optab
;
6041 preexpand_calls (exp
);
6042 /* If first operand is constant, swap them.
6043 Thus the following special case checks need only
6044 check the second operand. */
6045 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6047 register tree t1
= TREE_OPERAND (exp
, 0);
6048 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
6049 TREE_OPERAND (exp
, 1) = t1
;
6052 /* Attempt to return something suitable for generating an
6053 indexed address, for machines that support that. */
6055 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
6056 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6057 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6059 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
6061 /* Apply distributive law if OP0 is x+c. */
6062 if (GET_CODE (op0
) == PLUS
6063 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
6064 return gen_rtx (PLUS
, mode
,
6065 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
6066 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
6067 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
6068 * INTVAL (XEXP (op0
, 1))));
6070 if (GET_CODE (op0
) != REG
)
6071 op0
= force_operand (op0
, NULL_RTX
);
6072 if (GET_CODE (op0
) != REG
)
6073 op0
= copy_to_mode_reg (mode
, op0
);
6075 return gen_rtx (MULT
, mode
, op0
,
6076 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
6079 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6082 /* Check for multiplying things that have been extended
6083 from a narrower type. If this machine supports multiplying
6084 in that narrower type with a result in the desired type,
6085 do it that way, and avoid the explicit type-conversion. */
6086 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
6087 && TREE_CODE (type
) == INTEGER_TYPE
6088 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6089 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6090 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6091 && int_fits_type_p (TREE_OPERAND (exp
, 1),
6092 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6093 /* Don't use a widening multiply if a shift will do. */
6094 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
6095 > HOST_BITS_PER_WIDE_INT
)
6096 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
6098 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6099 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6101 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
6102 /* If both operands are extended, they must either both
6103 be zero-extended or both be sign-extended. */
6104 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6106 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
6108 enum machine_mode innermode
6109 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
6110 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6111 ? smul_widen_optab
: umul_widen_optab
);
6112 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6113 ? umul_widen_optab
: smul_widen_optab
);
6114 if (mode
== GET_MODE_WIDER_MODE (innermode
))
6116 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
6118 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6119 NULL_RTX
, VOIDmode
, 0);
6120 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6121 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6124 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6125 NULL_RTX
, VOIDmode
, 0);
6128 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
6129 && innermode
== word_mode
)
6132 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6133 NULL_RTX
, VOIDmode
, 0);
6134 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6135 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6138 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6139 NULL_RTX
, VOIDmode
, 0);
6140 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
6141 unsignedp
, OPTAB_LIB_WIDEN
);
6142 htem
= expand_mult_highpart_adjust (innermode
,
6143 gen_highpart (innermode
, temp
),
6145 gen_highpart (innermode
, temp
),
6147 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
6152 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6153 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6154 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
6156 case TRUNC_DIV_EXPR
:
6157 case FLOOR_DIV_EXPR
:
6159 case ROUND_DIV_EXPR
:
6160 case EXACT_DIV_EXPR
:
6161 preexpand_calls (exp
);
6162 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6164 /* Possible optimization: compute the dividend with EXPAND_SUM
6165 then if the divisor is constant can optimize the case
6166 where some terms of the dividend have coeffs divisible by it. */
6167 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6168 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6169 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
6172 this_optab
= flodiv_optab
;
6175 case TRUNC_MOD_EXPR
:
6176 case FLOOR_MOD_EXPR
:
6178 case ROUND_MOD_EXPR
:
6179 preexpand_calls (exp
);
6180 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6182 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6183 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6184 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
6186 case FIX_ROUND_EXPR
:
6187 case FIX_FLOOR_EXPR
:
6189 abort (); /* Not used for C. */
6191 case FIX_TRUNC_EXPR
:
6192 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6194 target
= gen_reg_rtx (mode
);
6195 expand_fix (target
, op0
, unsignedp
);
6199 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6201 target
= gen_reg_rtx (mode
);
6202 /* expand_float can't figure out what to do if FROM has VOIDmode.
6203 So give it the correct mode. With -O, cse will optimize this. */
6204 if (GET_MODE (op0
) == VOIDmode
)
6205 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6207 expand_float (target
, op0
,
6208 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6212 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6213 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6219 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6221 /* Handle complex values specially. */
6222 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6223 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6224 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6226 /* Unsigned abs is simply the operand. Testing here means we don't
6227 risk generating incorrect code below. */
6228 if (TREE_UNSIGNED (type
))
6231 return expand_abs (mode
, op0
, target
, unsignedp
,
6232 safe_from_p (target
, TREE_OPERAND (exp
, 0)));
6236 target
= original_target
;
6237 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
6238 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6239 || GET_MODE (target
) != mode
6240 || (GET_CODE (target
) == REG
6241 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6242 target
= gen_reg_rtx (mode
);
6243 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6244 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6246 /* First try to do it with a special MIN or MAX instruction.
6247 If that does not win, use a conditional jump to select the proper
6249 this_optab
= (TREE_UNSIGNED (type
)
6250 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6251 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6253 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6258 /* At this point, a MEM target is no longer useful; we will get better
6261 if (GET_CODE (target
) == MEM
)
6262 target
= gen_reg_rtx (mode
);
6265 emit_move_insn (target
, op0
);
6267 op0
= gen_label_rtx ();
6269 /* If this mode is an integer too wide to compare properly,
6270 compare word by word. Rely on cse to optimize constant cases. */
6271 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6273 if (code
== MAX_EXPR
)
6274 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6275 target
, op1
, NULL_RTX
, op0
);
6277 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6278 op1
, target
, NULL_RTX
, op0
);
6279 emit_move_insn (target
, op1
);
6283 if (code
== MAX_EXPR
)
6284 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6285 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6286 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6288 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6289 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6290 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6291 if (temp
== const0_rtx
)
6292 emit_move_insn (target
, op1
);
6293 else if (temp
!= const_true_rtx
)
6295 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6296 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6299 emit_move_insn (target
, op1
);
6306 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6307 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6313 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6314 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6319 /* ??? Can optimize bitwise operations with one arg constant.
6320 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6321 and (a bitwise1 b) bitwise2 b (etc)
6322 but that is probably not worth while. */
6324 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6325 boolean values when we want in all cases to compute both of them. In
6326 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6327 as actual zero-or-1 values and then bitwise anding. In cases where
6328 there cannot be any side effects, better code would be made by
6329 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6330 how to recognize those cases. */
6332 case TRUTH_AND_EXPR
:
6334 this_optab
= and_optab
;
6339 this_optab
= ior_optab
;
6342 case TRUTH_XOR_EXPR
:
6344 this_optab
= xor_optab
;
6351 preexpand_calls (exp
);
6352 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6354 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6355 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6358 /* Could determine the answer when only additive constants differ. Also,
6359 the addition of one can be handled by changing the condition. */
6366 preexpand_calls (exp
);
6367 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
6371 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6372 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
6374 && GET_CODE (original_target
) == REG
6375 && (GET_MODE (original_target
)
6376 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6378 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6381 if (temp
!= original_target
)
6382 temp
= copy_to_reg (temp
);
6384 op1
= gen_label_rtx ();
6385 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
6386 GET_MODE (temp
), unsignedp
, 0);
6387 emit_jump_insn (gen_beq (op1
));
6388 emit_move_insn (temp
, const1_rtx
);
6393 /* If no set-flag instruction, must generate a conditional
6394 store into a temporary variable. Drop through
6395 and handle this like && and ||. */
6397 case TRUTH_ANDIF_EXPR
:
6398 case TRUTH_ORIF_EXPR
:
6400 && (target
== 0 || ! safe_from_p (target
, exp
)
6401 /* Make sure we don't have a hard reg (such as function's return
6402 value) live across basic blocks, if not optimizing. */
6403 || (!optimize
&& GET_CODE (target
) == REG
6404 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
6405 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6408 emit_clr_insn (target
);
6410 op1
= gen_label_rtx ();
6411 jumpifnot (exp
, op1
);
6414 emit_0_to_1_insn (target
);
6417 return ignore
? const0_rtx
: target
;
6419 case TRUTH_NOT_EXPR
:
6420 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6421 /* The parser is careful to generate TRUTH_NOT_EXPR
6422 only with operands that are always zero or one. */
6423 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
6424 target
, 1, OPTAB_LIB_WIDEN
);
6430 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6432 return expand_expr (TREE_OPERAND (exp
, 1),
6433 (ignore
? const0_rtx
: target
),
6438 rtx flag
= NULL_RTX
;
6439 tree left_cleanups
= NULL_TREE
;
6440 tree right_cleanups
= NULL_TREE
;
6442 /* Used to save a pointer to the place to put the setting of
6443 the flag that indicates if this side of the conditional was
6444 taken. We backpatch the code, if we find out later that we
6445 have any conditional cleanups that need to be performed. */
6446 rtx dest_right_flag
= NULL_RTX
;
6447 rtx dest_left_flag
= NULL_RTX
;
6449 /* Note that COND_EXPRs whose type is a structure or union
6450 are required to be constructed to contain assignments of
6451 a temporary variable, so that we can evaluate them here
6452 for side effect only. If type is void, we must do likewise. */
6454 /* If an arm of the branch requires a cleanup,
6455 only that cleanup is performed. */
6458 tree binary_op
= 0, unary_op
= 0;
6459 tree old_cleanups
= cleanups_this_call
;
6461 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6462 convert it to our mode, if necessary. */
6463 if (integer_onep (TREE_OPERAND (exp
, 1))
6464 && integer_zerop (TREE_OPERAND (exp
, 2))
6465 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6469 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6474 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
6475 if (GET_MODE (op0
) == mode
)
6479 target
= gen_reg_rtx (mode
);
6480 convert_move (target
, op0
, unsignedp
);
6484 /* Check for X ? A + B : A. If we have this, we can copy
6485 A to the output and conditionally add B. Similarly for unary
6486 operations. Don't do this if X has side-effects because
6487 those side effects might affect A or B and the "?" operation is
6488 a sequence point in ANSI. (We test for side effects later.) */
6490 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
6491 && operand_equal_p (TREE_OPERAND (exp
, 2),
6492 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6493 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
6494 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
6495 && operand_equal_p (TREE_OPERAND (exp
, 1),
6496 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6497 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
6498 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
6499 && operand_equal_p (TREE_OPERAND (exp
, 2),
6500 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6501 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
6502 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
6503 && operand_equal_p (TREE_OPERAND (exp
, 1),
6504 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6505 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
6507 /* If we are not to produce a result, we have no target. Otherwise,
6508 if a target was specified use it; it will not be used as an
6509 intermediate target unless it is safe. If no target, use a
6514 else if (original_target
6515 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0))
6516 || (singleton
&& GET_CODE (original_target
) == REG
6517 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
6518 && original_target
== var_rtx (singleton
)))
6519 && GET_MODE (original_target
) == mode
6520 && ! (GET_CODE (original_target
) == MEM
6521 && MEM_VOLATILE_P (original_target
)))
6522 temp
= original_target
;
6523 else if (TREE_ADDRESSABLE (type
))
6526 temp
= assign_temp (type
, 0, 0, 1);
6528 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6529 operation, do this as A + (X != 0). Similarly for other simple
6530 binary operators. */
6531 if (temp
&& singleton
&& binary_op
6532 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6533 && (TREE_CODE (binary_op
) == PLUS_EXPR
6534 || TREE_CODE (binary_op
) == MINUS_EXPR
6535 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
6536 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
6537 && integer_onep (TREE_OPERAND (binary_op
, 1))
6538 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6541 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
6542 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
6543 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
6546 /* If we had X ? A : A + 1, do this as A + (X == 0).
6548 We have to invert the truth value here and then put it
6549 back later if do_store_flag fails. We cannot simply copy
6550 TREE_OPERAND (exp, 0) to another variable and modify that
6551 because invert_truthvalue can modify the tree pointed to
6553 if (singleton
== TREE_OPERAND (exp
, 1))
6554 TREE_OPERAND (exp
, 0)
6555 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6557 result
= do_store_flag (TREE_OPERAND (exp
, 0),
6558 (safe_from_p (temp
, singleton
)
6560 mode
, BRANCH_COST
<= 1);
6564 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
6565 return expand_binop (mode
, boptab
, op1
, result
, temp
,
6566 unsignedp
, OPTAB_LIB_WIDEN
);
6568 else if (singleton
== TREE_OPERAND (exp
, 1))
6569 TREE_OPERAND (exp
, 0)
6570 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6573 do_pending_stack_adjust ();
6575 op0
= gen_label_rtx ();
6577 flag
= gen_reg_rtx (word_mode
);
6578 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
6582 /* If the target conflicts with the other operand of the
6583 binary op, we can't use it. Also, we can't use the target
6584 if it is a hard register, because evaluating the condition
6585 might clobber it. */
6587 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
6588 || (GET_CODE (temp
) == REG
6589 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
6590 temp
= gen_reg_rtx (mode
);
6591 store_expr (singleton
, temp
, 0);
6594 expand_expr (singleton
,
6595 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6596 dest_left_flag
= get_last_insn ();
6597 if (singleton
== TREE_OPERAND (exp
, 1))
6598 jumpif (TREE_OPERAND (exp
, 0), op0
);
6600 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6602 /* Allows cleanups up to here. */
6603 old_cleanups
= cleanups_this_call
;
6604 if (binary_op
&& temp
== 0)
6605 /* Just touch the other operand. */
6606 expand_expr (TREE_OPERAND (binary_op
, 1),
6607 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6609 store_expr (build (TREE_CODE (binary_op
), type
,
6610 make_tree (type
, temp
),
6611 TREE_OPERAND (binary_op
, 1)),
6614 store_expr (build1 (TREE_CODE (unary_op
), type
,
6615 make_tree (type
, temp
)),
6618 dest_right_flag
= get_last_insn ();
6621 /* This is now done in jump.c and is better done there because it
6622 produces shorter register lifetimes. */
6624 /* Check for both possibilities either constants or variables
6625 in registers (but not the same as the target!). If so, can
6626 save branches by assigning one, branching, and assigning the
6628 else if (temp
&& GET_MODE (temp
) != BLKmode
6629 && (TREE_CONSTANT (TREE_OPERAND (exp
, 1))
6630 || ((TREE_CODE (TREE_OPERAND (exp
, 1)) == PARM_DECL
6631 || TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
)
6632 && DECL_RTL (TREE_OPERAND (exp
, 1))
6633 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 1))) == REG
6634 && DECL_RTL (TREE_OPERAND (exp
, 1)) != temp
))
6635 && (TREE_CONSTANT (TREE_OPERAND (exp
, 2))
6636 || ((TREE_CODE (TREE_OPERAND (exp
, 2)) == PARM_DECL
6637 || TREE_CODE (TREE_OPERAND (exp
, 2)) == VAR_DECL
)
6638 && DECL_RTL (TREE_OPERAND (exp
, 2))
6639 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 2))) == REG
6640 && DECL_RTL (TREE_OPERAND (exp
, 2)) != temp
)))
6642 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6643 temp
= gen_reg_rtx (mode
);
6644 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6645 dest_left_flag
= get_last_insn ();
6646 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6648 /* Allows cleanups up to here. */
6649 old_cleanups
= cleanups_this_call
;
6650 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6652 dest_right_flag
= get_last_insn ();
6655 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6656 comparison operator. If we have one of these cases, set the
6657 output to A, branch on A (cse will merge these two references),
6658 then set the output to FOO. */
6660 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6661 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6662 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6663 TREE_OPERAND (exp
, 1), 0)
6664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6665 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
6667 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6668 temp
= gen_reg_rtx (mode
);
6669 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6670 dest_left_flag
= get_last_insn ();
6671 jumpif (TREE_OPERAND (exp
, 0), op0
);
6673 /* Allows cleanups up to here. */
6674 old_cleanups
= cleanups_this_call
;
6675 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6677 dest_right_flag
= get_last_insn ();
6680 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6681 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6683 TREE_OPERAND (exp
, 2), 0)
6684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6685 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
6687 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6688 temp
= gen_reg_rtx (mode
);
6689 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6690 dest_left_flag
= get_last_insn ();
6691 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6693 /* Allows cleanups up to here. */
6694 old_cleanups
= cleanups_this_call
;
6695 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6697 dest_right_flag
= get_last_insn ();
6701 op1
= gen_label_rtx ();
6702 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6704 /* Allows cleanups up to here. */
6705 old_cleanups
= cleanups_this_call
;
6707 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6709 expand_expr (TREE_OPERAND (exp
, 1),
6710 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6711 dest_left_flag
= get_last_insn ();
6713 /* Handle conditional cleanups, if any. */
6714 left_cleanups
= defer_cleanups_to (old_cleanups
);
6717 emit_jump_insn (gen_jump (op1
));
6721 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6723 expand_expr (TREE_OPERAND (exp
, 2),
6724 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6725 dest_right_flag
= get_last_insn ();
6728 /* Handle conditional cleanups, if any. */
6729 right_cleanups
= defer_cleanups_to (old_cleanups
);
6735 /* Add back in, any conditional cleanups. */
6736 if (left_cleanups
|| right_cleanups
)
6742 /* Now that we know that a flag is needed, go back and add in the
6743 setting of the flag. */
6745 /* Do the left side flag. */
6746 last
= get_last_insn ();
6747 /* Flag left cleanups as needed. */
6748 emit_move_insn (flag
, const1_rtx
);
6749 /* ??? deprecated, use sequences instead. */
6750 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_left_flag
);
6752 /* Do the right side flag. */
6753 last
= get_last_insn ();
6754 /* Flag left cleanups as needed. */
6755 emit_move_insn (flag
, const0_rtx
);
6756 /* ??? deprecated, use sequences instead. */
6757 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_right_flag
);
6759 /* All cleanups must be on the function_obstack. */
6760 push_obstacks_nochange ();
6761 resume_temporary_allocation ();
6763 /* convert flag, which is an rtx, into a tree. */
6764 cond
= make_node (RTL_EXPR
);
6765 TREE_TYPE (cond
) = integer_type_node
;
6766 RTL_EXPR_RTL (cond
) = flag
;
6767 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
6768 cond
= save_expr (cond
);
6770 if (! left_cleanups
)
6771 left_cleanups
= integer_zero_node
;
6772 if (! right_cleanups
)
6773 right_cleanups
= integer_zero_node
;
6774 new_cleanups
= build (COND_EXPR
, void_type_node
,
6775 truthvalue_conversion (cond
),
6776 left_cleanups
, right_cleanups
);
6777 new_cleanups
= fold (new_cleanups
);
6781 /* Now add in the conditionalized cleanups. */
6783 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
6784 expand_eh_region_start ();
6791 /* Something needs to be initialized, but we didn't know
6792 where that thing was when building the tree. For example,
6793 it could be the return value of a function, or a parameter
6794 to a function which lays down in the stack, or a temporary
6795 variable which must be passed by reference.
6797 We guarantee that the expression will either be constructed
6798 or copied into our original target. */
6800 tree slot
= TREE_OPERAND (exp
, 0);
6801 tree cleanups
= NULL_TREE
;
6805 if (TREE_CODE (slot
) != VAR_DECL
)
6809 target
= original_target
;
6813 if (DECL_RTL (slot
) != 0)
6815 target
= DECL_RTL (slot
);
6816 /* If we have already expanded the slot, so don't do
6818 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6823 target
= assign_temp (type
, 2, 1, 1);
6824 /* All temp slots at this level must not conflict. */
6825 preserve_temp_slots (target
);
6826 DECL_RTL (slot
) = target
;
6828 /* Since SLOT is not known to the called function
6829 to belong to its stack frame, we must build an explicit
6830 cleanup. This case occurs when we must build up a reference
6831 to pass the reference as an argument. In this case,
6832 it is very likely that such a reference need not be
6835 if (TREE_OPERAND (exp
, 2) == 0)
6836 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
6837 cleanups
= TREE_OPERAND (exp
, 2);
6842 /* This case does occur, when expanding a parameter which
6843 needs to be constructed on the stack. The target
6844 is the actual stack address that we want to initialize.
6845 The function we call will perform the cleanup in this case. */
6847 /* If we have already assigned it space, use that space,
6848 not target that we were passed in, as our target
6849 parameter is only a hint. */
6850 if (DECL_RTL (slot
) != 0)
6852 target
= DECL_RTL (slot
);
6853 /* If we have already expanded the slot, so don't do
6855 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6859 DECL_RTL (slot
) = target
;
6862 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
6863 /* Mark it as expanded. */
6864 TREE_OPERAND (exp
, 1) = NULL_TREE
;
6866 store_expr (exp1
, target
, 0);
6870 cleanups_this_call
= tree_cons (NULL_TREE
,
6872 cleanups_this_call
);
6873 expand_eh_region_start ();
6881 tree lhs
= TREE_OPERAND (exp
, 0);
6882 tree rhs
= TREE_OPERAND (exp
, 1);
6883 tree noncopied_parts
= 0;
6884 tree lhs_type
= TREE_TYPE (lhs
);
6886 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6887 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
6888 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
6889 TYPE_NONCOPIED_PARTS (lhs_type
));
6890 while (noncopied_parts
!= 0)
6892 expand_assignment (TREE_VALUE (noncopied_parts
),
6893 TREE_PURPOSE (noncopied_parts
), 0, 0);
6894 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6901 /* If lhs is complex, expand calls in rhs before computing it.
6902 That's so we don't compute a pointer and save it over a call.
6903 If lhs is simple, compute it first so we can give it as a
6904 target if the rhs is just a call. This avoids an extra temp and copy
6905 and that prevents a partial-subsumption which makes bad code.
6906 Actually we could treat component_ref's of vars like vars. */
6908 tree lhs
= TREE_OPERAND (exp
, 0);
6909 tree rhs
= TREE_OPERAND (exp
, 1);
6910 tree noncopied_parts
= 0;
6911 tree lhs_type
= TREE_TYPE (lhs
);
6915 if (TREE_CODE (lhs
) != VAR_DECL
6916 && TREE_CODE (lhs
) != RESULT_DECL
6917 && TREE_CODE (lhs
) != PARM_DECL
)
6918 preexpand_calls (exp
);
6920 /* Check for |= or &= of a bitfield of size one into another bitfield
6921 of size 1. In this case, (unless we need the result of the
6922 assignment) we can do this more efficiently with a
6923 test followed by an assignment, if necessary.
6925 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6926 things change so we do, this code should be enhanced to
6929 && TREE_CODE (lhs
) == COMPONENT_REF
6930 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
6931 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
6932 && TREE_OPERAND (rhs
, 0) == lhs
6933 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
6934 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
6935 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
6937 rtx label
= gen_label_rtx ();
6939 do_jump (TREE_OPERAND (rhs
, 1),
6940 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
6941 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
6942 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
6943 (TREE_CODE (rhs
) == BIT_IOR_EXPR
6945 : integer_zero_node
)),
6947 do_pending_stack_adjust ();
6952 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
6953 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
6954 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
6955 TYPE_NONCOPIED_PARTS (lhs_type
));
6957 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6958 while (noncopied_parts
!= 0)
6960 expand_assignment (TREE_PURPOSE (noncopied_parts
),
6961 TREE_VALUE (noncopied_parts
), 0, 0);
6962 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6967 case PREINCREMENT_EXPR
:
6968 case PREDECREMENT_EXPR
:
6969 return expand_increment (exp
, 0, ignore
);
6971 case POSTINCREMENT_EXPR
:
6972 case POSTDECREMENT_EXPR
:
6973 /* Faster to treat as pre-increment if result is not used. */
6974 return expand_increment (exp
, ! ignore
, ignore
);
6977 /* If nonzero, TEMP will be set to the address of something that might
6978 be a MEM corresponding to a stack slot. */
6981 /* Are we taking the address of a nested function? */
6982 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
6983 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
6984 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0)))
6986 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
6987 op0
= force_operand (op0
, target
);
6989 /* If we are taking the address of something erroneous, just
6991 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
6995 /* We make sure to pass const0_rtx down if we came in with
6996 ignore set, to avoid doing the cleanups twice for something. */
6997 op0
= expand_expr (TREE_OPERAND (exp
, 0),
6998 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
6999 (modifier
== EXPAND_INITIALIZER
7000 ? modifier
: EXPAND_CONST_ADDRESS
));
7002 /* If we are going to ignore the result, OP0 will have been set
7003 to const0_rtx, so just return it. Don't get confused and
7004 think we are taking the address of the constant. */
7008 op0
= protect_from_queue (op0
, 0);
7010 /* We would like the object in memory. If it is a constant,
7011 we can have it be statically allocated into memory. For
7012 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7013 memory and store the value into it. */
7015 if (CONSTANT_P (op0
))
7016 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7018 else if (GET_CODE (op0
) == MEM
)
7020 mark_temp_addr_taken (op0
);
7021 temp
= XEXP (op0
, 0);
7024 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7025 || GET_CODE (op0
) == CONCAT
)
7027 /* If this object is in a register, it must be not
7029 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7030 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
7032 mark_temp_addr_taken (memloc
);
7033 emit_move_insn (memloc
, op0
);
7037 if (GET_CODE (op0
) != MEM
)
7040 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7042 temp
= XEXP (op0
, 0);
7043 #ifdef POINTERS_EXTEND_UNSIGNED
7044 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
7045 && mode
== ptr_mode
)
7046 temp
= convert_memory_address (ptr_mode
, temp
);
7051 op0
= force_operand (XEXP (op0
, 0), target
);
7054 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
7055 op0
= force_reg (Pmode
, op0
);
7057 if (GET_CODE (op0
) == REG
7058 && ! REG_USERVAR_P (op0
))
7059 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
7061 /* If we might have had a temp slot, add an equivalent address
7064 update_temp_slot_address (temp
, op0
);
7066 #ifdef POINTERS_EXTEND_UNSIGNED
7067 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7068 && mode
== ptr_mode
)
7069 op0
= convert_memory_address (ptr_mode
, op0
);
7074 case ENTRY_VALUE_EXPR
:
7077 /* COMPLEX type for Extended Pascal & Fortran */
7080 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7083 /* Get the rtx code of the operands. */
7084 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7085 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
7088 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7092 /* Move the real (op0) and imaginary (op1) parts to their location. */
7093 emit_move_insn (gen_realpart (mode
, target
), op0
);
7094 emit_move_insn (gen_imagpart (mode
, target
), op1
);
7096 insns
= get_insns ();
7099 /* Complex construction should appear as a single unit. */
7100 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7101 each with a separate pseudo as destination.
7102 It's not correct for flow to treat them as a unit. */
7103 if (GET_CODE (target
) != CONCAT
)
7104 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
7112 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7113 return gen_realpart (mode
, op0
);
7116 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7117 return gen_imagpart (mode
, op0
);
7121 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7125 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7128 target
= gen_reg_rtx (mode
);
7132 /* Store the realpart and the negated imagpart to target. */
7133 emit_move_insn (gen_realpart (partmode
, target
),
7134 gen_realpart (partmode
, op0
));
7136 imag_t
= gen_imagpart (partmode
, target
);
7137 temp
= expand_unop (partmode
, neg_optab
,
7138 gen_imagpart (partmode
, op0
), imag_t
, 0);
7140 emit_move_insn (imag_t
, temp
);
7142 insns
= get_insns ();
7145 /* Conjugate should appear as a single unit
7146 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7147 each with a separate pseudo as destination.
7148 It's not correct for flow to treat them as a unit. */
7149 if (GET_CODE (target
) != CONCAT
)
7150 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
7158 op0
= CONST0_RTX (tmode
);
7164 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7167 /* Here to do an ordinary binary operator, generating an instruction
7168 from the optab already placed in `this_optab'. */
7170 preexpand_calls (exp
);
7171 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
7173 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7174 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7176 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
7177 unsignedp
, OPTAB_LIB_WIDEN
);
7184 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7187 bc_expand_expr (exp
)
7190 enum tree_code code
;
7193 struct binary_operator
*binoptab
;
7194 struct unary_operator
*unoptab
;
7195 struct increment_operator
*incroptab
;
7196 struct bc_label
*lab
, *lab1
;
7197 enum bytecode_opcode opcode
;
7200 code
= TREE_CODE (exp
);
7206 if (DECL_RTL (exp
) == 0)
7208 error_with_decl (exp
, "prior parameter's size depends on `%s'");
7212 bc_load_parmaddr (DECL_RTL (exp
));
7213 bc_load_memory (TREE_TYPE (exp
), exp
);
7219 if (DECL_RTL (exp
) == 0)
7223 if (BYTECODE_LABEL (DECL_RTL (exp
)))
7224 bc_load_externaddr (DECL_RTL (exp
));
7226 bc_load_localaddr (DECL_RTL (exp
));
7228 if (TREE_PUBLIC (exp
))
7229 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
7230 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
7232 bc_load_localaddr (DECL_RTL (exp
));
7234 bc_load_memory (TREE_TYPE (exp
), exp
);
7239 #ifdef DEBUG_PRINT_CODE
7240 fprintf (stderr
, " [%x]\n", TREE_INT_CST_LOW (exp
));
7242 bc_emit_instruction (mode_to_const_map
[(int) (DECL_BIT_FIELD (exp
)
7244 : TYPE_MODE (TREE_TYPE (exp
)))],
7245 (HOST_WIDE_INT
) TREE_INT_CST_LOW (exp
));
7251 #ifdef DEBUG_PRINT_CODE
7252 fprintf (stderr
, " [%g]\n", (double) TREE_INT_CST_LOW (exp
));
7254 /* FIX THIS: find a better way to pass real_cst's. -bson */
7255 bc_emit_instruction (mode_to_const_map
[TYPE_MODE (TREE_TYPE (exp
))],
7256 (double) TREE_REAL_CST (exp
));
7265 /* We build a call description vector describing the type of
7266 the return value and of the arguments; this call vector,
7267 together with a pointer to a location for the return value
7268 and the base of the argument list, is passed to the low
7269 level machine dependent call subroutine, which is responsible
7270 for putting the arguments wherever real functions expect
7271 them, as well as getting the return value back. */
7273 tree calldesc
= 0, arg
;
7277 /* Push the evaluated args on the evaluation stack in reverse
7278 order. Also make an entry for each arg in the calldesc
7279 vector while we're at it. */
7281 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7283 for (arg
= TREE_OPERAND (exp
, 1); arg
; arg
= TREE_CHAIN (arg
))
7286 bc_expand_expr (TREE_VALUE (arg
));
7288 calldesc
= tree_cons ((tree
) 0,
7289 size_in_bytes (TREE_TYPE (TREE_VALUE (arg
))),
7291 calldesc
= tree_cons ((tree
) 0,
7292 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg
))),
7296 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7298 /* Allocate a location for the return value and push its
7299 address on the evaluation stack. Also make an entry
7300 at the front of the calldesc for the return value type. */
7302 type
= TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7303 retval
= bc_allocate_local (int_size_in_bytes (type
), TYPE_ALIGN (type
));
7304 bc_load_localaddr (retval
);
7306 calldesc
= tree_cons ((tree
) 0, size_in_bytes (type
), calldesc
);
7307 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (type
), calldesc
);
7309 /* Prepend the argument count. */
7310 calldesc
= tree_cons ((tree
) 0,
7311 build_int_2 (nargs
, 0),
7314 /* Push the address of the call description vector on the stack. */
7315 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
7316 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
7317 build_index_type (build_int_2 (nargs
* 2, 0)));
7318 r
= output_constant_def (calldesc
);
7319 bc_load_externaddr (r
);
7321 /* Push the address of the function to be called. */
7322 bc_expand_expr (TREE_OPERAND (exp
, 0));
7324 /* Call the function, popping its address and the calldesc vector
7325 address off the evaluation stack in the process. */
7326 bc_emit_instruction (call
);
7328 /* Pop the arguments off the stack. */
7329 bc_adjust_stack (nargs
);
7331 /* Load the return value onto the stack. */
7332 bc_load_localaddr (retval
);
7333 bc_load_memory (type
, TREE_OPERAND (exp
, 0));
7339 if (!SAVE_EXPR_RTL (exp
))
7341 /* First time around: copy to local variable */
7342 SAVE_EXPR_RTL (exp
) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp
)),
7343 TYPE_ALIGN (TREE_TYPE(exp
)));
7344 bc_expand_expr (TREE_OPERAND (exp
, 0));
7345 bc_emit_instruction (duplicate
);
7347 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7348 bc_store_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7352 /* Consecutive reference: use saved copy */
7353 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7354 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7359 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7360 how are they handled instead? */
7363 TREE_USED (exp
) = 1;
7364 bc_expand_expr (STMT_BODY (exp
));
7371 bc_expand_expr (TREE_OPERAND (exp
, 0));
7372 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)), TREE_TYPE (exp
));
7377 expand_assignment (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1), 0, 0);
7382 bc_expand_address (TREE_OPERAND (exp
, 0));
7387 bc_expand_expr (TREE_OPERAND (exp
, 0));
7388 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7393 bc_expand_expr (bc_canonicalize_array_ref (exp
));
7398 bc_expand_component_address (exp
);
7400 /* If we have a bitfield, generate a proper load */
7401 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp
, 1)), TREE_OPERAND (exp
, 1));
7406 bc_expand_expr (TREE_OPERAND (exp
, 0));
7407 bc_emit_instruction (drop
);
7408 bc_expand_expr (TREE_OPERAND (exp
, 1));
7413 bc_expand_expr (TREE_OPERAND (exp
, 0));
7414 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7415 lab
= bc_get_bytecode_label ();
7416 bc_emit_bytecode (xjumpifnot
);
7417 bc_emit_bytecode_labelref (lab
);
7419 #ifdef DEBUG_PRINT_CODE
7420 fputc ('\n', stderr
);
7422 bc_expand_expr (TREE_OPERAND (exp
, 1));
7423 lab1
= bc_get_bytecode_label ();
7424 bc_emit_bytecode (jump
);
7425 bc_emit_bytecode_labelref (lab1
);
7427 #ifdef DEBUG_PRINT_CODE
7428 fputc ('\n', stderr
);
7431 bc_emit_bytecode_labeldef (lab
);
7432 bc_expand_expr (TREE_OPERAND (exp
, 2));
7433 bc_emit_bytecode_labeldef (lab1
);
7436 case TRUTH_ANDIF_EXPR
:
7438 opcode
= xjumpifnot
;
7441 case TRUTH_ORIF_EXPR
:
7448 binoptab
= optab_plus_expr
;
7453 binoptab
= optab_minus_expr
;
7458 binoptab
= optab_mult_expr
;
7461 case TRUNC_DIV_EXPR
:
7462 case FLOOR_DIV_EXPR
:
7464 case ROUND_DIV_EXPR
:
7465 case EXACT_DIV_EXPR
:
7467 binoptab
= optab_trunc_div_expr
;
7470 case TRUNC_MOD_EXPR
:
7471 case FLOOR_MOD_EXPR
:
7473 case ROUND_MOD_EXPR
:
7475 binoptab
= optab_trunc_mod_expr
;
7478 case FIX_ROUND_EXPR
:
7479 case FIX_FLOOR_EXPR
:
7481 abort (); /* Not used for C. */
7483 case FIX_TRUNC_EXPR
:
7490 abort (); /* FIXME */
7494 binoptab
= optab_rdiv_expr
;
7499 binoptab
= optab_bit_and_expr
;
7504 binoptab
= optab_bit_ior_expr
;
7509 binoptab
= optab_bit_xor_expr
;
7514 binoptab
= optab_lshift_expr
;
7519 binoptab
= optab_rshift_expr
;
7522 case TRUTH_AND_EXPR
:
7524 binoptab
= optab_truth_and_expr
;
7529 binoptab
= optab_truth_or_expr
;
7534 binoptab
= optab_lt_expr
;
7539 binoptab
= optab_le_expr
;
7544 binoptab
= optab_ge_expr
;
7549 binoptab
= optab_gt_expr
;
7554 binoptab
= optab_eq_expr
;
7559 binoptab
= optab_ne_expr
;
7564 unoptab
= optab_negate_expr
;
7569 unoptab
= optab_bit_not_expr
;
7572 case TRUTH_NOT_EXPR
:
7574 unoptab
= optab_truth_not_expr
;
7577 case PREDECREMENT_EXPR
:
7579 incroptab
= optab_predecrement_expr
;
7582 case PREINCREMENT_EXPR
:
7584 incroptab
= optab_preincrement_expr
;
7587 case POSTDECREMENT_EXPR
:
7589 incroptab
= optab_postdecrement_expr
;
7592 case POSTINCREMENT_EXPR
:
7594 incroptab
= optab_postincrement_expr
;
7599 bc_expand_constructor (exp
);
7609 tree vars
= TREE_OPERAND (exp
, 0);
7610 int vars_need_expansion
= 0;
7612 /* Need to open a binding contour here because
7613 if there are any cleanups they most be contained here. */
7614 expand_start_bindings (0);
7616 /* Mark the corresponding BLOCK for output. */
7617 if (TREE_OPERAND (exp
, 2) != 0)
7618 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
7620 /* If VARS have not yet been expanded, expand them now. */
7623 if (DECL_RTL (vars
) == 0)
7625 vars_need_expansion
= 1;
7628 expand_decl_init (vars
);
7629 vars
= TREE_CHAIN (vars
);
7632 bc_expand_expr (TREE_OPERAND (exp
, 1));
7634 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7644 bc_expand_binary_operation (binoptab
, TREE_TYPE (exp
),
7645 TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1));
7651 bc_expand_unary_operation (unoptab
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7657 bc_expand_expr (TREE_OPERAND (exp
, 0));
7658 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7659 lab
= bc_get_bytecode_label ();
7661 bc_emit_instruction (duplicate
);
7662 bc_emit_bytecode (opcode
);
7663 bc_emit_bytecode_labelref (lab
);
7665 #ifdef DEBUG_PRINT_CODE
7666 fputc ('\n', stderr
);
7669 bc_emit_instruction (drop
);
7671 bc_expand_expr (TREE_OPERAND (exp
, 1));
7672 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7673 bc_emit_bytecode_labeldef (lab
);
7679 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7681 /* Push the quantum. */
7682 bc_expand_expr (TREE_OPERAND (exp
, 1));
7684 /* Convert it to the lvalue's type. */
7685 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)), type
);
7687 /* Push the address of the lvalue */
7688 bc_expand_expr (build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
), TREE_OPERAND (exp
, 0)));
7690 /* Perform actual increment */
7691 bc_expand_increment (incroptab
, type
);
7695 /* Return the alignment in bits of EXP, a pointer valued expression.
7696 But don't return more than MAX_ALIGN no matter what.
7697 The alignment returned is, by default, the alignment of the thing that
7698 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7700 Otherwise, look at the expression to see if we can do better, i.e., if the
7701 expression is actually pointing at an object whose alignment is tighter. */
7704 get_pointer_alignment (exp
, max_align
)
7708 unsigned align
, inner
;
7710 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7713 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7714 align
= MIN (align
, max_align
);
7718 switch (TREE_CODE (exp
))
7722 case NON_LVALUE_EXPR
:
7723 exp
= TREE_OPERAND (exp
, 0);
7724 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7726 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7727 align
= MIN (inner
, max_align
);
7731 /* If sum of pointer + int, restrict our maximum alignment to that
7732 imposed by the integer. If not, we can't do any better than
7734 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7737 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7742 exp
= TREE_OPERAND (exp
, 0);
7746 /* See what we are pointing at and look at its alignment. */
7747 exp
= TREE_OPERAND (exp
, 0);
7748 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7749 align
= FUNCTION_BOUNDARY
;
7750 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7751 align
= DECL_ALIGN (exp
);
7752 #ifdef CONSTANT_ALIGNMENT
7753 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7754 align
= CONSTANT_ALIGNMENT (exp
, align
);
7756 return MIN (align
, max_align
);
7764 /* Return the tree node and offset if a given argument corresponds to
7765 a string constant. */
7768 string_constant (arg
, ptr_offset
)
7774 if (TREE_CODE (arg
) == ADDR_EXPR
7775 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7777 *ptr_offset
= integer_zero_node
;
7778 return TREE_OPERAND (arg
, 0);
7780 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7782 tree arg0
= TREE_OPERAND (arg
, 0);
7783 tree arg1
= TREE_OPERAND (arg
, 1);
7788 if (TREE_CODE (arg0
) == ADDR_EXPR
7789 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7792 return TREE_OPERAND (arg0
, 0);
7794 else if (TREE_CODE (arg1
) == ADDR_EXPR
7795 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7798 return TREE_OPERAND (arg1
, 0);
7805 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7806 way, because it could contain a zero byte in the middle.
7807 TREE_STRING_LENGTH is the size of the character array, not the string.
7809 Unfortunately, string_constant can't access the values of const char
7810 arrays with initializers, so neither can we do so here. */
7820 src
= string_constant (src
, &offset_node
);
7823 max
= TREE_STRING_LENGTH (src
);
7824 ptr
= TREE_STRING_POINTER (src
);
7825 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7827 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7828 compute the offset to the following null if we don't know where to
7829 start searching for it. */
7831 for (i
= 0; i
< max
; i
++)
7834 /* We don't know the starting offset, but we do know that the string
7835 has no internal zero bytes. We can assume that the offset falls
7836 within the bounds of the string; otherwise, the programmer deserves
7837 what he gets. Subtract the offset from the length of the string,
7839 /* This would perhaps not be valid if we were dealing with named
7840 arrays in addition to literal string constants. */
7841 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7844 /* We have a known offset into the string. Start searching there for
7845 a null character. */
7846 if (offset_node
== 0)
7850 /* Did we get a long long offset? If so, punt. */
7851 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7853 offset
= TREE_INT_CST_LOW (offset_node
);
7855 /* If the offset is known to be out of bounds, warn, and call strlen at
7857 if (offset
< 0 || offset
> max
)
7859 warning ("offset outside bounds of constant string");
7862 /* Use strlen to search for the first zero byte. Since any strings
7863 constructed with build_string will have nulls appended, we win even
7864 if we get handed something like (char[4])"abcd".
7866 Since OFFSET is our starting index into the string, no further
7867 calculation is needed. */
7868 return size_int (strlen (ptr
+ offset
));
7872 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7873 enum built_in_function fndecl_code
;
7879 /* Some machines need special handling before we can access
7880 arbitrary frames. For example, on the sparc, we must first flush
7881 all register windows to the stack. */
7882 #ifdef SETUP_FRAME_ADDRESSES
7883 SETUP_FRAME_ADDRESSES ();
7886 /* On the sparc, the return address is not in the frame, it is in a
7887 register. There is no way to access it off of the current frame
7888 pointer, but it can be accessed off the previous frame pointer by
7889 reading the value from the register window save area. */
7890 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7891 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7895 /* Scan back COUNT frames to the specified frame. */
7896 for (i
= 0; i
< count
; i
++)
7898 /* Assume the dynamic chain pointer is in the word that the
7899 frame address points to, unless otherwise specified. */
7900 #ifdef DYNAMIC_CHAIN_ADDRESS
7901 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7903 tem
= memory_address (Pmode
, tem
);
7904 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
7907 /* For __builtin_frame_address, return what we've got. */
7908 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
7911 /* For __builtin_return_address, Get the return address from that
7913 #ifdef RETURN_ADDR_RTX
7914 tem
= RETURN_ADDR_RTX (count
, tem
);
7916 tem
= memory_address (Pmode
,
7917 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
7918 tem
= gen_rtx (MEM
, Pmode
, tem
);
7923 /* Expand an expression EXP that calls a built-in function,
7924 with result going to TARGET if that's convenient
7925 (and in mode MODE if that's convenient).
7926 SUBTARGET may be used as the target for computing one of EXP's operands.
7927 IGNORE is nonzero if the value is to be ignored. */
7929 #define CALLED_AS_BUILT_IN(NODE) \
7930 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7933 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
7937 enum machine_mode mode
;
7940 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7941 tree arglist
= TREE_OPERAND (exp
, 1);
7944 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
7945 optab builtin_optab
;
7947 switch (DECL_FUNCTION_CODE (fndecl
))
7952 /* build_function_call changes these into ABS_EXPR. */
7957 /* Treat these like sqrt, but only if the user asks for them. */
7958 if (! flag_fast_math
)
7960 case BUILT_IN_FSQRT
:
7961 /* If not optimizing, call the library function. */
7966 /* Arg could be wrong type if user redeclared this fcn wrong. */
7967 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
7970 /* Stabilize and compute the argument. */
7971 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
7972 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
7974 exp
= copy_node (exp
);
7975 arglist
= copy_node (arglist
);
7976 TREE_OPERAND (exp
, 1) = arglist
;
7977 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
7979 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
7981 /* Make a suitable register to place result in. */
7982 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7987 switch (DECL_FUNCTION_CODE (fndecl
))
7990 builtin_optab
= sin_optab
; break;
7992 builtin_optab
= cos_optab
; break;
7993 case BUILT_IN_FSQRT
:
7994 builtin_optab
= sqrt_optab
; break;
7999 /* Compute into TARGET.
8000 Set TARGET to wherever the result comes back. */
8001 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8002 builtin_optab
, op0
, target
, 0);
8004 /* If we were unable to expand via the builtin, stop the
8005 sequence (without outputting the insns) and break, causing
8006 a call the the library function. */
8013 /* Check the results by default. But if flag_fast_math is turned on,
8014 then assume sqrt will always be called with valid arguments. */
8016 if (! flag_fast_math
)
8018 /* Don't define the builtin FP instructions
8019 if your machine is not IEEE. */
8020 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
8023 lab1
= gen_label_rtx ();
8025 /* Test the result; if it is NaN, set errno=EDOM because
8026 the argument was not in the domain. */
8027 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
8028 emit_jump_insn (gen_beq (lab1
));
8032 #ifdef GEN_ERRNO_RTX
8033 rtx errno_rtx
= GEN_ERRNO_RTX
;
8036 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "errno"));
8039 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
8042 /* We can't set errno=EDOM directly; let the library call do it.
8043 Pop the arguments right away in case the call gets deleted. */
8045 expand_call (exp
, target
, 0);
8052 /* Output the entire sequence. */
8053 insns
= get_insns ();
8059 /* __builtin_apply_args returns block of memory allocated on
8060 the stack into which is stored the arg pointer, structure
8061 value address, static chain, and all the registers that might
8062 possibly be used in performing a function call. The code is
8063 moved to the start of the function so the incoming values are
8065 case BUILT_IN_APPLY_ARGS
:
8066 /* Don't do __builtin_apply_args more than once in a function.
8067 Save the result of the first call and reuse it. */
8068 if (apply_args_value
!= 0)
8069 return apply_args_value
;
8071 /* When this function is called, it means that registers must be
8072 saved on entry to this function. So we migrate the
8073 call to the first insn of this function. */
8078 temp
= expand_builtin_apply_args ();
8082 apply_args_value
= temp
;
8084 /* Put the sequence after the NOTE that starts the function.
8085 If this is inside a SEQUENCE, make the outer-level insn
8086 chain current, so the code is placed at the start of the
8088 push_topmost_sequence ();
8089 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8090 pop_topmost_sequence ();
8094 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8095 FUNCTION with a copy of the parameters described by
8096 ARGUMENTS, and ARGSIZE. It returns a block of memory
8097 allocated on the stack into which is stored all the registers
8098 that might possibly be used for returning the result of a
8099 function. ARGUMENTS is the value returned by
8100 __builtin_apply_args. ARGSIZE is the number of bytes of
8101 arguments that must be copied. ??? How should this value be
8102 computed? We'll also need a safe worst case value for varargs
8104 case BUILT_IN_APPLY
:
8106 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8107 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8108 || TREE_CHAIN (arglist
) == 0
8109 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8110 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8111 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8119 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8120 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8122 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8125 /* __builtin_return (RESULT) causes the function to return the
8126 value described by RESULT. RESULT is address of the block of
8127 memory returned by __builtin_apply. */
8128 case BUILT_IN_RETURN
:
8130 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8131 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8132 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8133 NULL_RTX
, VOIDmode
, 0));
8136 case BUILT_IN_SAVEREGS
:
8137 /* Don't do __builtin_saveregs more than once in a function.
8138 Save the result of the first call and reuse it. */
8139 if (saveregs_value
!= 0)
8140 return saveregs_value
;
8142 /* When this function is called, it means that registers must be
8143 saved on entry to this function. So we migrate the
8144 call to the first insn of this function. */
8148 /* Now really call the function. `expand_call' does not call
8149 expand_builtin, so there is no danger of infinite recursion here. */
8152 #ifdef EXPAND_BUILTIN_SAVEREGS
8153 /* Do whatever the machine needs done in this case. */
8154 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8156 /* The register where the function returns its value
8157 is likely to have something else in it, such as an argument.
8158 So preserve that register around the call. */
8160 if (value_mode
!= VOIDmode
)
8162 rtx valreg
= hard_libcall_value (value_mode
);
8163 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8165 emit_move_insn (saved_valreg
, valreg
);
8166 temp
= expand_call (exp
, target
, ignore
);
8167 emit_move_insn (valreg
, saved_valreg
);
8170 /* Generate the call, putting the value in a pseudo. */
8171 temp
= expand_call (exp
, target
, ignore
);
8177 saveregs_value
= temp
;
8179 /* Put the sequence after the NOTE that starts the function.
8180 If this is inside a SEQUENCE, make the outer-level insn
8181 chain current, so the code is placed at the start of the
8183 push_topmost_sequence ();
8184 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8185 pop_topmost_sequence ();
8189 /* __builtin_args_info (N) returns word N of the arg space info
8190 for the current function. The number and meanings of words
8191 is controlled by the definition of CUMULATIVE_ARGS. */
8192 case BUILT_IN_ARGS_INFO
:
8194 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8196 int *word_ptr
= (int *) ¤t_function_args_info
;
8197 tree type
, elts
, result
;
8199 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8200 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8201 __FILE__
, __LINE__
);
8205 tree arg
= TREE_VALUE (arglist
);
8206 if (TREE_CODE (arg
) != INTEGER_CST
)
8207 error ("argument of `__builtin_args_info' must be constant");
8210 int wordnum
= TREE_INT_CST_LOW (arg
);
8212 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8213 error ("argument of `__builtin_args_info' out of range");
8215 return GEN_INT (word_ptr
[wordnum
]);
8219 error ("missing argument in `__builtin_args_info'");
8224 for (i
= 0; i
< nwords
; i
++)
8225 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8227 type
= build_array_type (integer_type_node
,
8228 build_index_type (build_int_2 (nwords
, 0)));
8229 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8230 TREE_CONSTANT (result
) = 1;
8231 TREE_STATIC (result
) = 1;
8232 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8233 TREE_CONSTANT (result
) = 1;
8234 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
8238 /* Return the address of the first anonymous stack arg. */
8239 case BUILT_IN_NEXT_ARG
:
8241 tree fntype
= TREE_TYPE (current_function_decl
);
8243 if ((TYPE_ARG_TYPES (fntype
) == 0
8244 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8246 && ! current_function_varargs
)
8248 error ("`va_start' used in function with fixed args");
8254 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8255 tree arg
= TREE_VALUE (arglist
);
8257 /* Strip off all nops for the sake of the comparison. This
8258 is not quite the same as STRIP_NOPS. It does more.
8259 We must also strip off INDIRECT_EXPR for C++ reference
8261 while (TREE_CODE (arg
) == NOP_EXPR
8262 || TREE_CODE (arg
) == CONVERT_EXPR
8263 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8264 || TREE_CODE (arg
) == INDIRECT_REF
)
8265 arg
= TREE_OPERAND (arg
, 0);
8266 if (arg
!= last_parm
)
8267 warning ("second parameter of `va_start' not last named argument");
8269 else if (! current_function_varargs
)
8270 /* Evidently an out of date version of <stdarg.h>; can't validate
8271 va_start's second argument, but can still work as intended. */
8272 warning ("`__builtin_next_arg' called without an argument");
8275 return expand_binop (Pmode
, add_optab
,
8276 current_function_internal_arg_pointer
,
8277 current_function_arg_offset_rtx
,
8278 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8280 case BUILT_IN_CLASSIFY_TYPE
:
8283 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8284 enum tree_code code
= TREE_CODE (type
);
8285 if (code
== VOID_TYPE
)
8286 return GEN_INT (void_type_class
);
8287 if (code
== INTEGER_TYPE
)
8288 return GEN_INT (integer_type_class
);
8289 if (code
== CHAR_TYPE
)
8290 return GEN_INT (char_type_class
);
8291 if (code
== ENUMERAL_TYPE
)
8292 return GEN_INT (enumeral_type_class
);
8293 if (code
== BOOLEAN_TYPE
)
8294 return GEN_INT (boolean_type_class
);
8295 if (code
== POINTER_TYPE
)
8296 return GEN_INT (pointer_type_class
);
8297 if (code
== REFERENCE_TYPE
)
8298 return GEN_INT (reference_type_class
);
8299 if (code
== OFFSET_TYPE
)
8300 return GEN_INT (offset_type_class
);
8301 if (code
== REAL_TYPE
)
8302 return GEN_INT (real_type_class
);
8303 if (code
== COMPLEX_TYPE
)
8304 return GEN_INT (complex_type_class
);
8305 if (code
== FUNCTION_TYPE
)
8306 return GEN_INT (function_type_class
);
8307 if (code
== METHOD_TYPE
)
8308 return GEN_INT (method_type_class
);
8309 if (code
== RECORD_TYPE
)
8310 return GEN_INT (record_type_class
);
8311 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8312 return GEN_INT (union_type_class
);
8313 if (code
== ARRAY_TYPE
)
8315 if (TYPE_STRING_FLAG (type
))
8316 return GEN_INT (string_type_class
);
8318 return GEN_INT (array_type_class
);
8320 if (code
== SET_TYPE
)
8321 return GEN_INT (set_type_class
);
8322 if (code
== FILE_TYPE
)
8323 return GEN_INT (file_type_class
);
8324 if (code
== LANG_TYPE
)
8325 return GEN_INT (lang_type_class
);
8327 return GEN_INT (no_type_class
);
8329 case BUILT_IN_CONSTANT_P
:
8334 tree arg
= TREE_VALUE (arglist
);
8337 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
8338 || (TREE_CODE (arg
) == ADDR_EXPR
8339 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8340 ? const1_rtx
: const0_rtx
);
8343 case BUILT_IN_FRAME_ADDRESS
:
8344 /* The argument must be a nonnegative integer constant.
8345 It counts the number of frames to scan up the stack.
8346 The value is the address of that frame. */
8347 case BUILT_IN_RETURN_ADDRESS
:
8348 /* The argument must be a nonnegative integer constant.
8349 It counts the number of frames to scan up the stack.
8350 The value is the return address saved in that frame. */
8352 /* Warning about missing arg was already issued. */
8354 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
)
8356 error ("invalid arg to `__builtin_return_address'");
8359 else if (tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8361 error ("invalid arg to `__builtin_return_address'");
8366 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8367 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8368 hard_frame_pointer_rtx
);
8370 /* For __builtin_frame_address, return what we've got. */
8371 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8374 if (GET_CODE (tem
) != REG
)
8375 tem
= copy_to_reg (tem
);
8379 case BUILT_IN_ALLOCA
:
8381 /* Arg could be non-integer if user redeclared this fcn wrong. */
8382 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8385 /* Compute the argument. */
8386 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8388 /* Allocate the desired space. */
8389 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8392 /* If not optimizing, call the library function. */
8393 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8397 /* Arg could be non-integer if user redeclared this fcn wrong. */
8398 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8401 /* Compute the argument. */
8402 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8403 /* Compute ffs, into TARGET if possible.
8404 Set TARGET to wherever the result comes back. */
8405 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8406 ffs_optab
, op0
, target
, 1);
8411 case BUILT_IN_STRLEN
:
8412 /* If not optimizing, call the library function. */
8413 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8417 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8418 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8422 tree src
= TREE_VALUE (arglist
);
8423 tree len
= c_strlen (src
);
8426 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8428 rtx result
, src_rtx
, char_rtx
;
8429 enum machine_mode insn_mode
= value_mode
, char_mode
;
8430 enum insn_code icode
;
8432 /* If the length is known, just return it. */
8434 return expand_expr (len
, target
, mode
, 0);
8436 /* If SRC is not a pointer type, don't do this operation inline. */
8440 /* Call a function if we can't compute strlen in the right mode. */
8442 while (insn_mode
!= VOIDmode
)
8444 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8445 if (icode
!= CODE_FOR_nothing
)
8448 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8450 if (insn_mode
== VOIDmode
)
8453 /* Make a place to write the result of the instruction. */
8456 && GET_CODE (result
) == REG
8457 && GET_MODE (result
) == insn_mode
8458 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8459 result
= gen_reg_rtx (insn_mode
);
8461 /* Make sure the operands are acceptable to the predicates. */
8463 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8464 result
= gen_reg_rtx (insn_mode
);
8466 src_rtx
= memory_address (BLKmode
,
8467 expand_expr (src
, NULL_RTX
, ptr_mode
,
8469 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8470 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8472 char_rtx
= const0_rtx
;
8473 char_mode
= insn_operand_mode
[(int)icode
][2];
8474 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8475 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8477 emit_insn (GEN_FCN (icode
) (result
,
8478 gen_rtx (MEM
, BLKmode
, src_rtx
),
8479 char_rtx
, GEN_INT (align
)));
8481 /* Return the value in the proper mode for this function. */
8482 if (GET_MODE (result
) == value_mode
)
8484 else if (target
!= 0)
8486 convert_move (target
, result
, 0);
8490 return convert_to_mode (value_mode
, result
, 0);
8493 case BUILT_IN_STRCPY
:
8494 /* If not optimizing, call the library function. */
8495 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8499 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8500 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8501 || TREE_CHAIN (arglist
) == 0
8502 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8506 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8511 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8513 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8517 case BUILT_IN_MEMCPY
:
8518 /* If not optimizing, call the library function. */
8519 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8523 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8524 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8525 || TREE_CHAIN (arglist
) == 0
8526 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8527 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8528 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8532 tree dest
= TREE_VALUE (arglist
);
8533 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8534 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8538 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8540 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8541 rtx dest_rtx
, dest_mem
, src_mem
;
8543 /* If either SRC or DEST is not a pointer type, don't do
8544 this operation in-line. */
8545 if (src_align
== 0 || dest_align
== 0)
8547 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8548 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8552 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8553 dest_mem
= gen_rtx (MEM
, BLKmode
,
8554 memory_address (BLKmode
, dest_rtx
));
8555 /* There could be a void* cast on top of the object. */
8556 while (TREE_CODE (dest
) == NOP_EXPR
)
8557 dest
= TREE_OPERAND (dest
, 0);
8558 type
= TREE_TYPE (TREE_TYPE (dest
));
8559 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8560 src_mem
= gen_rtx (MEM
, BLKmode
,
8561 memory_address (BLKmode
,
8562 expand_expr (src
, NULL_RTX
,
8565 /* There could be a void* cast on top of the object. */
8566 while (TREE_CODE (src
) == NOP_EXPR
)
8567 src
= TREE_OPERAND (src
, 0);
8568 type
= TREE_TYPE (TREE_TYPE (src
));
8569 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
8571 /* Copy word part most expediently. */
8572 emit_block_move (dest_mem
, src_mem
,
8573 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8574 MIN (src_align
, dest_align
));
8575 return force_operand (dest_rtx
, NULL_RTX
);
8578 case BUILT_IN_MEMSET
:
8579 /* If not optimizing, call the library function. */
8580 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8584 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8585 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8586 || TREE_CHAIN (arglist
) == 0
8587 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8589 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8591 != (TREE_CODE (TREE_TYPE
8593 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
8597 tree dest
= TREE_VALUE (arglist
);
8598 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
8599 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8603 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8604 rtx dest_rtx
, dest_mem
;
8606 /* If DEST is not a pointer type, don't do this
8607 operation in-line. */
8608 if (dest_align
== 0)
8611 /* If VAL is not 0, don't do this operation in-line. */
8612 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
8615 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8616 dest_mem
= gen_rtx (MEM
, BLKmode
,
8617 memory_address (BLKmode
, dest_rtx
));
8618 /* There could be a void* cast on top of the object. */
8619 while (TREE_CODE (dest
) == NOP_EXPR
)
8620 dest
= TREE_OPERAND (dest
, 0);
8621 type
= TREE_TYPE (TREE_TYPE (dest
));
8622 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8624 clear_storage (dest_mem
, expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8627 return force_operand (dest_rtx
, NULL_RTX
);
8630 /* These comparison functions need an instruction that returns an actual
8631 index. An ordinary compare that just sets the condition codes
8633 #ifdef HAVE_cmpstrsi
8634 case BUILT_IN_STRCMP
:
8635 /* If not optimizing, call the library function. */
8636 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8640 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8641 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8642 || TREE_CHAIN (arglist
) == 0
8643 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8645 else if (!HAVE_cmpstrsi
)
8648 tree arg1
= TREE_VALUE (arglist
);
8649 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8653 len
= c_strlen (arg1
);
8655 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
8656 len2
= c_strlen (arg2
);
8658 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
8660 /* If we don't have a constant length for the first, use the length
8661 of the second, if we know it. We don't require a constant for
8662 this case; some cost analysis could be done if both are available
8663 but neither is constant. For now, assume they're equally cheap.
8665 If both strings have constant lengths, use the smaller. This
8666 could arise if optimization results in strcpy being called with
8667 two fixed strings, or if the code was machine-generated. We should
8668 add some code to the `memcmp' handler below to deal with such
8669 situations, someday. */
8670 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
8677 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
8679 if (tree_int_cst_lt (len2
, len
))
8683 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8687 case BUILT_IN_MEMCMP
:
8688 /* If not optimizing, call the library function. */
8689 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8693 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8694 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8695 || TREE_CHAIN (arglist
) == 0
8696 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8697 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8698 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8700 else if (!HAVE_cmpstrsi
)
8703 tree arg1
= TREE_VALUE (arglist
);
8704 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8705 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8709 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8711 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8712 enum machine_mode insn_mode
8713 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
8715 /* If we don't have POINTER_TYPE, call the function. */
8716 if (arg1_align
== 0 || arg2_align
== 0)
8718 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
8719 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8723 /* Make a place to write the result of the instruction. */
8726 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
8727 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8728 result
= gen_reg_rtx (insn_mode
);
8730 emit_insn (gen_cmpstrsi (result
,
8731 gen_rtx (MEM
, BLKmode
,
8732 expand_expr (arg1
, NULL_RTX
,
8735 gen_rtx (MEM
, BLKmode
,
8736 expand_expr (arg2
, NULL_RTX
,
8739 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8740 GEN_INT (MIN (arg1_align
, arg2_align
))));
8742 /* Return the value in the proper mode for this function. */
8743 mode
= TYPE_MODE (TREE_TYPE (exp
));
8744 if (GET_MODE (result
) == mode
)
8746 else if (target
!= 0)
8748 convert_move (target
, result
, 0);
8752 return convert_to_mode (mode
, result
, 0);
8755 case BUILT_IN_STRCMP
:
8756 case BUILT_IN_MEMCMP
:
8760 /* __builtin_setjmp is passed a pointer to an array of five words
8761 (not all will be used on all machines). It operates similarly to
8762 the C library function of the same name, but is more efficient.
8763 Much of the code below (and for longjmp) is copied from the handling
8766 NOTE: This is intended for use by GNAT and will only work in
8767 the method used by it. This code will likely NOT survive to
8768 the GCC 2.8.0 release. */
8769 case BUILT_IN_SETJMP
:
8771 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8775 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
8777 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
8778 enum machine_mode sa_mode
= Pmode
;
8780 int old_inhibit_defer_pop
= inhibit_defer_pop
;
8781 int return_pops
= RETURN_POPS_ARGS (get_identifier ("__dummy"),
8782 get_identifier ("__dummy"), 0);
8784 CUMULATIVE_ARGS args_so_far
;
8787 #ifdef POINTERS_EXTEND_UNSIGNED
8788 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
8791 buf_addr
= force_reg (Pmode
, buf_addr
);
8793 if (target
== 0 || GET_CODE (target
) != REG
8794 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
8795 target
= gen_reg_rtx (value_mode
);
8799 CONST_CALL_P (emit_note (NULL_PTR
, NOTE_INSN_SETJMP
)) = 1;
8800 current_function_calls_setjmp
= 1;
8802 /* We store the frame pointer and the address of lab1 in the buffer
8803 and use the rest of it for the stack save area, which is
8804 machine-dependent. */
8805 emit_move_insn (gen_rtx (MEM
, Pmode
, buf_addr
),
8806 virtual_stack_vars_rtx
);
8808 (validize_mem (gen_rtx (MEM
, Pmode
,
8809 plus_constant (buf_addr
,
8810 GET_MODE_SIZE (Pmode
)))),
8811 gen_rtx (LABEL_REF
, Pmode
, lab1
));
8813 #ifdef HAVE_save_stack_nonlocal
8814 if (HAVE_save_stack_nonlocal
)
8815 sa_mode
= insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0];
8818 stack_save
= gen_rtx (MEM
, sa_mode
,
8819 plus_constant (buf_addr
,
8820 2 * GET_MODE_SIZE (Pmode
)));
8821 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
8825 emit_insn (gen_setjmp ());
8828 /* Set TARGET to zero and branch around the other case. */
8829 emit_move_insn (target
, const0_rtx
);
8830 emit_jump_insn (gen_jump (lab2
));
8834 /* Note that setjmp clobbers FP when we get here, so we have to
8835 make sure it's marked as used by this function. */
8836 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
8838 /* Mark the static chain as clobbered here so life information
8839 doesn't get messed up for it. */
8840 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, static_chain_rtx
));
8842 /* Now put in the code to restore the frame pointer, and argument
8843 pointer, if needed. The code below is from expand_end_bindings
8844 in stmt.c; see detailed documentation there. */
8845 #ifdef HAVE_nonlocal_goto
8846 if (! HAVE_nonlocal_goto
)
8848 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
8850 current_function_has_nonlocal_goto
= 1;
8852 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8853 if (fixed_regs
[ARG_POINTER_REGNUM
])
8855 #ifdef ELIMINABLE_REGS
8856 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
8858 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
8859 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
8860 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
8863 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
8866 /* Now restore our arg pointer from the address at which it
8867 was saved in our stack frame.
8868 If there hasn't be space allocated for it yet, make
8870 if (arg_pointer_save_area
== 0)
8871 arg_pointer_save_area
8872 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8873 emit_move_insn (virtual_incoming_args_rtx
,
8874 copy_to_reg (arg_pointer_save_area
));
8879 #ifdef HAVE_nonlocal_goto_receiver
8880 if (HAVE_nonlocal_goto_receiver
)
8881 emit_insn (gen_nonlocal_goto_receiver ());
8883 /* The static chain pointer contains the address of dummy function.
8884 We need to call it here to handle some PIC cases of restoring
8885 a global pointer. Then return 1. */
8886 op0
= copy_to_mode_reg (Pmode
, static_chain_rtx
);
8888 /* We can't actually call emit_library_call here, so do everything
8889 it does, which isn't much for a libfunc with no args. */
8890 op0
= memory_address (FUNCTION_MODE
, op0
);
8892 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
,
8893 gen_rtx (SYMBOL_REF
, Pmode
, "__dummy"), 1);
8894 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1);
8896 #ifndef ACCUMULATE_OUTGOING_ARGS
8897 #ifdef HAVE_call_pop
8899 emit_call_insn (gen_call_pop (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8900 const0_rtx
, next_arg_reg
,
8901 GEN_INT (return_pops
)));
8908 emit_call_insn (gen_call (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8909 const0_rtx
, next_arg_reg
, const0_rtx
));
8914 emit_move_insn (target
, const1_rtx
);
8919 /* __builtin_longjmp is passed a pointer to an array of five words
8920 and a value, which is a dummy. It's similar to the C library longjmp
8921 function but works with __builtin_setjmp above. */
8922 case BUILT_IN_LONGJMP
:
8923 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
8924 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8928 tree dummy_id
= get_identifier ("__dummy");
8929 tree dummy_type
= build_function_type (void_type_node
, NULL_TREE
);
8930 tree dummy_decl
= build_decl (FUNCTION_DECL
, dummy_id
, dummy_type
);
8931 #ifdef POINTERS_EXTEND_UNSIGNED
8934 convert_memory_address
8936 expand_expr (TREE_VALUE (arglist
),
8937 NULL_RTX
, VOIDmode
, 0)));
8940 = force_reg (Pmode
, expand_expr (TREE_VALUE (arglist
),
8944 rtx fp
= gen_rtx (MEM
, Pmode
, buf_addr
);
8945 rtx lab
= gen_rtx (MEM
, Pmode
,
8946 plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
)));
8947 enum machine_mode sa_mode
8948 #ifdef HAVE_save_stack_nonlocal
8949 = (HAVE_save_stack_nonlocal
8950 ? insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0]
8955 rtx stack
= gen_rtx (MEM
, sa_mode
,
8956 plus_constant (buf_addr
,
8957 2 * GET_MODE_SIZE (Pmode
)));
8959 DECL_EXTERNAL (dummy_decl
) = 1;
8960 TREE_PUBLIC (dummy_decl
) = 1;
8961 make_decl_rtl (dummy_decl
, NULL_PTR
, 1);
8963 /* Expand the second expression just for side-effects. */
8964 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
8965 const0_rtx
, VOIDmode
, 0);
8967 assemble_external (dummy_decl
);
8969 /* Pick up FP, label, and SP from the block and jump. This code is
8970 from expand_goto in stmt.c; see there for detailed comments. */
8971 #if HAVE_nonlocal_goto
8972 if (HAVE_nonlocal_goto
)
8973 emit_insn (gen_nonlocal_goto (fp
, lab
, stack
,
8974 XEXP (DECL_RTL (dummy_decl
), 0)));
8978 lab
= copy_to_reg (lab
);
8979 emit_move_insn (hard_frame_pointer_rtx
, fp
);
8980 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
8982 /* Put in the static chain register the address of the dummy
8984 emit_move_insn (static_chain_rtx
, XEXP (DECL_RTL (dummy_decl
), 0));
8985 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
8986 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
8987 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
8988 emit_indirect_jump (lab
);
8994 default: /* just do library call, if unknown builtin */
8995 error ("built-in function `%s' not currently supported",
8996 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8999 /* The switch statement above can drop through to cause the function
9000 to be called normally. */
9002 return expand_call (exp
, target
, ignore
);
9005 /* Built-in functions to perform an untyped call and return. */
9007 /* For each register that may be used for calling a function, this
9008 gives a mode used to copy the register's value. VOIDmode indicates
9009 the register is not used for calling a function. If the machine
9010 has register windows, this gives only the outbound registers.
9011 INCOMING_REGNO gives the corresponding inbound register. */
9012 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
9014 /* For each register that may be used for returning values, this gives
9015 a mode used to copy the register's value. VOIDmode indicates the
9016 register is not used for returning values. If the machine has
9017 register windows, this gives only the outbound registers.
9018 INCOMING_REGNO gives the corresponding inbound register. */
9019 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
9021 /* For each register that may be used for calling a function, this
9022 gives the offset of that register into the block returned by
9023 __builtin_apply_args. 0 indicates that the register is not
9024 used for calling a function. */
9025 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
9027 /* Return the offset of register REGNO into the block returned by
9028 __builtin_apply_args. This is not declared static, since it is
9029 needed in objc-act.c. */
9032 apply_args_register_offset (regno
)
9037 /* Arguments are always put in outgoing registers (in the argument
9038 block) if such make sense. */
9039 #ifdef OUTGOING_REGNO
9040 regno
= OUTGOING_REGNO(regno
);
9042 return apply_args_reg_offset
[regno
];
9045 /* Return the size required for the block returned by __builtin_apply_args,
9046 and initialize apply_args_mode. */
9051 static int size
= -1;
9053 enum machine_mode mode
;
9055 /* The values computed by this function never change. */
9058 /* The first value is the incoming arg-pointer. */
9059 size
= GET_MODE_SIZE (Pmode
);
9061 /* The second value is the structure value address unless this is
9062 passed as an "invisible" first argument. */
9063 if (struct_value_rtx
)
9064 size
+= GET_MODE_SIZE (Pmode
);
9066 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9067 if (FUNCTION_ARG_REGNO_P (regno
))
9069 /* Search for the proper mode for copying this register's
9070 value. I'm not sure this is right, but it works so far. */
9071 enum machine_mode best_mode
= VOIDmode
;
9073 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9075 mode
= GET_MODE_WIDER_MODE (mode
))
9076 if (HARD_REGNO_MODE_OK (regno
, mode
)
9077 && HARD_REGNO_NREGS (regno
, mode
) == 1)
9080 if (best_mode
== VOIDmode
)
9081 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9083 mode
= GET_MODE_WIDER_MODE (mode
))
9084 if (HARD_REGNO_MODE_OK (regno
, mode
)
9085 && (mov_optab
->handlers
[(int) mode
].insn_code
9086 != CODE_FOR_nothing
))
9090 if (mode
== VOIDmode
)
9093 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9094 if (size
% align
!= 0)
9095 size
= CEIL (size
, align
) * align
;
9096 apply_args_reg_offset
[regno
] = size
;
9097 size
+= GET_MODE_SIZE (mode
);
9098 apply_args_mode
[regno
] = mode
;
9102 apply_args_mode
[regno
] = VOIDmode
;
9103 apply_args_reg_offset
[regno
] = 0;
9109 /* Return the size required for the block returned by __builtin_apply,
9110 and initialize apply_result_mode. */
9113 apply_result_size ()
9115 static int size
= -1;
9117 enum machine_mode mode
;
9119 /* The values computed by this function never change. */
9124 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9125 if (FUNCTION_VALUE_REGNO_P (regno
))
9127 /* Search for the proper mode for copying this register's
9128 value. I'm not sure this is right, but it works so far. */
9129 enum machine_mode best_mode
= VOIDmode
;
9131 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9133 mode
= GET_MODE_WIDER_MODE (mode
))
9134 if (HARD_REGNO_MODE_OK (regno
, mode
))
9137 if (best_mode
== VOIDmode
)
9138 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9140 mode
= GET_MODE_WIDER_MODE (mode
))
9141 if (HARD_REGNO_MODE_OK (regno
, mode
)
9142 && (mov_optab
->handlers
[(int) mode
].insn_code
9143 != CODE_FOR_nothing
))
9147 if (mode
== VOIDmode
)
9150 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9151 if (size
% align
!= 0)
9152 size
= CEIL (size
, align
) * align
;
9153 size
+= GET_MODE_SIZE (mode
);
9154 apply_result_mode
[regno
] = mode
;
9157 apply_result_mode
[regno
] = VOIDmode
;
9159 /* Allow targets that use untyped_call and untyped_return to override
9160 the size so that machine-specific information can be stored here. */
9161 #ifdef APPLY_RESULT_SIZE
9162 size
= APPLY_RESULT_SIZE
;
9168 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9169 /* Create a vector describing the result block RESULT. If SAVEP is true,
9170 the result block is used to save the values; otherwise it is used to
9171 restore the values. */
9174 result_vector (savep
, result
)
9178 int regno
, size
, align
, nelts
;
9179 enum machine_mode mode
;
9181 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9184 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9185 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9187 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9188 if (size
% align
!= 0)
9189 size
= CEIL (size
, align
) * align
;
9190 reg
= gen_rtx (REG
, mode
, savep
? regno
: INCOMING_REGNO (regno
));
9191 mem
= change_address (result
, mode
,
9192 plus_constant (XEXP (result
, 0), size
));
9193 savevec
[nelts
++] = (savep
9194 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
9195 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
9196 size
+= GET_MODE_SIZE (mode
);
9198 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9200 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9202 /* Save the state required to perform an untyped call with the same
9203 arguments as were passed to the current function. */
9206 expand_builtin_apply_args ()
9209 int size
, align
, regno
;
9210 enum machine_mode mode
;
9212 /* Create a block where the arg-pointer, structure value address,
9213 and argument registers can be saved. */
9214 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9216 /* Walk past the arg-pointer and structure value address. */
9217 size
= GET_MODE_SIZE (Pmode
);
9218 if (struct_value_rtx
)
9219 size
+= GET_MODE_SIZE (Pmode
);
9221 /* Save each register used in calling a function to the block. */
9222 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9223 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9227 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9228 if (size
% align
!= 0)
9229 size
= CEIL (size
, align
) * align
;
9231 tem
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9234 /* For reg-stack.c's stack register household.
9235 Compare with a similar piece of code in function.c. */
9237 emit_insn (gen_rtx (USE
, mode
, tem
));
9240 emit_move_insn (change_address (registers
, mode
,
9241 plus_constant (XEXP (registers
, 0),
9244 size
+= GET_MODE_SIZE (mode
);
9247 /* Save the arg pointer to the block. */
9248 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9249 copy_to_reg (virtual_incoming_args_rtx
));
9250 size
= GET_MODE_SIZE (Pmode
);
9252 /* Save the structure value address unless this is passed as an
9253 "invisible" first argument. */
9254 if (struct_value_incoming_rtx
)
9256 emit_move_insn (change_address (registers
, Pmode
,
9257 plus_constant (XEXP (registers
, 0),
9259 copy_to_reg (struct_value_incoming_rtx
));
9260 size
+= GET_MODE_SIZE (Pmode
);
9263 /* Return the address of the block. */
9264 return copy_addr_to_reg (XEXP (registers
, 0));
9267 /* Perform an untyped call and save the state required to perform an
9268 untyped return of whatever value was returned by the given function. */
9271 expand_builtin_apply (function
, arguments
, argsize
)
9272 rtx function
, arguments
, argsize
;
9274 int size
, align
, regno
;
9275 enum machine_mode mode
;
9276 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9277 rtx old_stack_level
= 0;
9278 rtx call_fusage
= 0;
9280 /* Create a block where the return registers can be saved. */
9281 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9283 /* ??? The argsize value should be adjusted here. */
9285 /* Fetch the arg pointer from the ARGUMENTS block. */
9286 incoming_args
= gen_reg_rtx (Pmode
);
9287 emit_move_insn (incoming_args
,
9288 gen_rtx (MEM
, Pmode
, arguments
));
9289 #ifndef STACK_GROWS_DOWNWARD
9290 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9291 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9294 /* Perform postincrements before actually calling the function. */
9297 /* Push a new argument block and copy the arguments. */
9298 do_pending_stack_adjust ();
9299 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9301 /* Push a block of memory onto the stack to store the memory arguments.
9302 Save the address in a register, and copy the memory arguments. ??? I
9303 haven't figured out how the calling convention macros effect this,
9304 but it's likely that the source and/or destination addresses in
9305 the block copy will need updating in machine specific ways. */
9306 dest
= copy_addr_to_reg (push_block (argsize
, 0, 0));
9307 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
9308 gen_rtx (MEM
, BLKmode
, incoming_args
),
9310 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9312 /* Refer to the argument block. */
9314 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
9316 /* Walk past the arg-pointer and structure value address. */
9317 size
= GET_MODE_SIZE (Pmode
);
9318 if (struct_value_rtx
)
9319 size
+= GET_MODE_SIZE (Pmode
);
9321 /* Restore each of the registers previously saved. Make USE insns
9322 for each of these registers for use in making the call. */
9323 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9324 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9326 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9327 if (size
% align
!= 0)
9328 size
= CEIL (size
, align
) * align
;
9329 reg
= gen_rtx (REG
, mode
, regno
);
9330 emit_move_insn (reg
,
9331 change_address (arguments
, mode
,
9332 plus_constant (XEXP (arguments
, 0),
9335 use_reg (&call_fusage
, reg
);
9336 size
+= GET_MODE_SIZE (mode
);
9339 /* Restore the structure value address unless this is passed as an
9340 "invisible" first argument. */
9341 size
= GET_MODE_SIZE (Pmode
);
9342 if (struct_value_rtx
)
9344 rtx value
= gen_reg_rtx (Pmode
);
9345 emit_move_insn (value
,
9346 change_address (arguments
, Pmode
,
9347 plus_constant (XEXP (arguments
, 0),
9349 emit_move_insn (struct_value_rtx
, value
);
9350 if (GET_CODE (struct_value_rtx
) == REG
)
9351 use_reg (&call_fusage
, struct_value_rtx
);
9352 size
+= GET_MODE_SIZE (Pmode
);
9355 /* All arguments and registers used for the call are set up by now! */
9356 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9358 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9359 and we don't want to load it into a register as an optimization,
9360 because prepare_call_address already did it if it should be done. */
9361 if (GET_CODE (function
) != SYMBOL_REF
)
9362 function
= memory_address (FUNCTION_MODE
, function
);
9364 /* Generate the actual call instruction and save the return value. */
9365 #ifdef HAVE_untyped_call
9366 if (HAVE_untyped_call
)
9367 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
9368 result
, result_vector (1, result
)));
9371 #ifdef HAVE_call_value
9372 if (HAVE_call_value
)
9376 /* Locate the unique return register. It is not possible to
9377 express a call that sets more than one return register using
9378 call_value; use untyped_call for that. In fact, untyped_call
9379 only needs to save the return registers in the given block. */
9380 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9381 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9384 abort (); /* HAVE_untyped_call required. */
9385 valreg
= gen_rtx (REG
, mode
, regno
);
9388 emit_call_insn (gen_call_value (valreg
,
9389 gen_rtx (MEM
, FUNCTION_MODE
, function
),
9390 const0_rtx
, NULL_RTX
, const0_rtx
));
9392 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9400 /* Find the CALL insn we just emitted. */
9401 for (call_insn
= get_last_insn ();
9402 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9403 call_insn
= PREV_INSN (call_insn
))
9409 /* Put the register usage information on the CALL. If there is already
9410 some usage information, put ours at the end. */
9411 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9415 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9416 link
= XEXP (link
, 1))
9419 XEXP (link
, 1) = call_fusage
;
9422 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9424 /* Restore the stack. */
9425 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9427 /* Return the address of the result block. */
9428 return copy_addr_to_reg (XEXP (result
, 0));
9431 /* Perform an untyped return. */
9434 expand_builtin_return (result
)
9437 int size
, align
, regno
;
9438 enum machine_mode mode
;
9440 rtx call_fusage
= 0;
9442 apply_result_size ();
9443 result
= gen_rtx (MEM
, BLKmode
, result
);
9445 #ifdef HAVE_untyped_return
9446 if (HAVE_untyped_return
)
9448 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9454 /* Restore the return value and note that each value is used. */
9456 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9457 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9459 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9460 if (size
% align
!= 0)
9461 size
= CEIL (size
, align
) * align
;
9462 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9463 emit_move_insn (reg
,
9464 change_address (result
, mode
,
9465 plus_constant (XEXP (result
, 0),
9468 push_to_sequence (call_fusage
);
9469 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
9470 call_fusage
= get_insns ();
9472 size
+= GET_MODE_SIZE (mode
);
9475 /* Put the USE insns before the return. */
9476 emit_insns (call_fusage
);
9478 /* Return whatever values was restored by jumping directly to the end
9480 expand_null_return ();
9483 /* Expand code for a post- or pre- increment or decrement
9484 and return the RTX for the result.
9485 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9488 expand_increment (exp
, post
, ignore
)
9492 register rtx op0
, op1
;
9493 register rtx temp
, value
;
9494 register tree incremented
= TREE_OPERAND (exp
, 0);
9495 optab this_optab
= add_optab
;
9497 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9498 int op0_is_copy
= 0;
9499 int single_insn
= 0;
9500 /* 1 means we can't store into OP0 directly,
9501 because it is a subreg narrower than a word,
9502 and we don't dare clobber the rest of the word. */
9505 if (output_bytecode
)
9507 bc_expand_expr (exp
);
9511 /* Stabilize any component ref that might need to be
9512 evaluated more than once below. */
9514 || TREE_CODE (incremented
) == BIT_FIELD_REF
9515 || (TREE_CODE (incremented
) == COMPONENT_REF
9516 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9517 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9518 incremented
= stabilize_reference (incremented
);
9519 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9520 ones into save exprs so that they don't accidentally get evaluated
9521 more than once by the code below. */
9522 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9523 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9524 incremented
= save_expr (incremented
);
9526 /* Compute the operands as RTX.
9527 Note whether OP0 is the actual lvalue or a copy of it:
9528 I believe it is a copy iff it is a register or subreg
9529 and insns were generated in computing it. */
9531 temp
= get_last_insn ();
9532 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9534 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9535 in place but instead must do sign- or zero-extension during assignment,
9536 so we copy it into a new register and let the code below use it as
9539 Note that we can safely modify this SUBREG since it is know not to be
9540 shared (it was made by the expand_expr call above). */
9542 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9545 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9549 else if (GET_CODE (op0
) == SUBREG
9550 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9552 /* We cannot increment this SUBREG in place. If we are
9553 post-incrementing, get a copy of the old value. Otherwise,
9554 just mark that we cannot increment in place. */
9556 op0
= copy_to_reg (op0
);
9561 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9562 && temp
!= get_last_insn ());
9563 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9565 /* Decide whether incrementing or decrementing. */
9566 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9567 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9568 this_optab
= sub_optab
;
9570 /* Convert decrement by a constant into a negative increment. */
9571 if (this_optab
== sub_optab
9572 && GET_CODE (op1
) == CONST_INT
)
9574 op1
= GEN_INT (- INTVAL (op1
));
9575 this_optab
= add_optab
;
9578 /* For a preincrement, see if we can do this with a single instruction. */
9581 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9582 if (icode
!= (int) CODE_FOR_nothing
9583 /* Make sure that OP0 is valid for operands 0 and 1
9584 of the insn we want to queue. */
9585 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9586 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9587 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9591 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9592 then we cannot just increment OP0. We must therefore contrive to
9593 increment the original value. Then, for postincrement, we can return
9594 OP0 since it is a copy of the old value. For preincrement, expand here
9595 unless we can do it with a single insn.
9597 Likewise if storing directly into OP0 would clobber high bits
9598 we need to preserve (bad_subreg). */
9599 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9601 /* This is the easiest way to increment the value wherever it is.
9602 Problems with multiple evaluation of INCREMENTED are prevented
9603 because either (1) it is a component_ref or preincrement,
9604 in which case it was stabilized above, or (2) it is an array_ref
9605 with constant index in an array in a register, which is
9606 safe to reevaluate. */
9607 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9608 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9609 ? MINUS_EXPR
: PLUS_EXPR
),
9612 TREE_OPERAND (exp
, 1));
9614 while (TREE_CODE (incremented
) == NOP_EXPR
9615 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9617 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9618 incremented
= TREE_OPERAND (incremented
, 0);
9621 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9622 return post
? op0
: temp
;
9627 /* We have a true reference to the value in OP0.
9628 If there is an insn to add or subtract in this mode, queue it.
9629 Queueing the increment insn avoids the register shuffling
9630 that often results if we must increment now and first save
9631 the old value for subsequent use. */
9633 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9634 op0
= stabilize (op0
);
9637 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9638 if (icode
!= (int) CODE_FOR_nothing
9639 /* Make sure that OP0 is valid for operands 0 and 1
9640 of the insn we want to queue. */
9641 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9642 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
9644 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9645 op1
= force_reg (mode
, op1
);
9647 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9649 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9651 rtx addr
= force_reg (Pmode
, XEXP (op0
, 0));
9654 op0
= change_address (op0
, VOIDmode
, addr
);
9655 temp
= force_reg (GET_MODE (op0
), op0
);
9656 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9657 op1
= force_reg (mode
, op1
);
9659 /* The increment queue is LIFO, thus we have to `queue'
9660 the instructions in reverse order. */
9661 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9662 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9667 /* Preincrement, or we can't increment with one simple insn. */
9669 /* Save a copy of the value before inc or dec, to return it later. */
9670 temp
= value
= copy_to_reg (op0
);
9672 /* Arrange to return the incremented value. */
9673 /* Copy the rtx because expand_binop will protect from the queue,
9674 and the results of that would be invalid for us to return
9675 if our caller does emit_queue before using our result. */
9676 temp
= copy_rtx (value
= op0
);
9678 /* Increment however we can. */
9679 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9680 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9681 /* Make sure the value is stored into OP0. */
9683 emit_move_insn (op0
, op1
);
9688 /* Expand all function calls contained within EXP, innermost ones first.
9689 But don't look within expressions that have sequence points.
9690 For each CALL_EXPR, record the rtx for its value
9691 in the CALL_EXPR_RTL field. */
9694 preexpand_calls (exp
)
9697 register int nops
, i
;
9698 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9700 if (! do_preexpand_calls
)
9703 /* Only expressions and references can contain calls. */
9705 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9708 switch (TREE_CODE (exp
))
9711 /* Do nothing if already expanded. */
9712 if (CALL_EXPR_RTL (exp
) != 0
9713 /* Do nothing if the call returns a variable-sized object. */
9714 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
9715 /* Do nothing to built-in functions. */
9716 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9717 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9719 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9722 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9727 case TRUTH_ANDIF_EXPR
:
9728 case TRUTH_ORIF_EXPR
:
9729 /* If we find one of these, then we can be sure
9730 the adjust will be done for it (since it makes jumps).
9731 Do it now, so that if this is inside an argument
9732 of a function, we don't get the stack adjustment
9733 after some other args have already been pushed. */
9734 do_pending_stack_adjust ();
9739 case WITH_CLEANUP_EXPR
:
9740 case CLEANUP_POINT_EXPR
:
9744 if (SAVE_EXPR_RTL (exp
) != 0)
9748 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9749 for (i
= 0; i
< nops
; i
++)
9750 if (TREE_OPERAND (exp
, i
) != 0)
9752 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9753 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9755 preexpand_calls (TREE_OPERAND (exp
, i
));
9759 /* At the start of a function, record that we have no previously-pushed
9760 arguments waiting to be popped. */
9763 init_pending_stack_adjust ()
9765 pending_stack_adjust
= 0;
9768 /* When exiting from function, if safe, clear out any pending stack adjust
9769 so the adjustment won't get done. */
9772 clear_pending_stack_adjust ()
9774 #ifdef EXIT_IGNORE_STACK
9776 && ! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
9777 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9778 && ! flag_inline_functions
)
9779 pending_stack_adjust
= 0;
9783 /* Pop any previously-pushed arguments that have not been popped yet. */
9786 do_pending_stack_adjust ()
9788 if (inhibit_defer_pop
== 0)
9790 if (pending_stack_adjust
!= 0)
9791 adjust_stack (GEN_INT (pending_stack_adjust
));
9792 pending_stack_adjust
= 0;
9796 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9797 Returns the cleanups to be performed. */
9800 defer_cleanups_to (old_cleanups
)
9803 tree new_cleanups
= NULL_TREE
;
9804 tree cleanups
= cleanups_this_call
;
9805 tree last
= NULL_TREE
;
9807 while (cleanups_this_call
!= old_cleanups
)
9809 expand_eh_region_end (TREE_VALUE (cleanups_this_call
));
9810 last
= cleanups_this_call
;
9811 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
9816 /* Remove the list from the chain of cleanups. */
9817 TREE_CHAIN (last
) = NULL_TREE
;
9819 /* reverse them so that we can build them in the right order. */
9820 cleanups
= nreverse (cleanups
);
9822 /* All cleanups must be on the function_obstack. */
9823 push_obstacks_nochange ();
9824 resume_temporary_allocation ();
9829 new_cleanups
= build (COMPOUND_EXPR
, TREE_TYPE (new_cleanups
),
9830 TREE_VALUE (cleanups
), new_cleanups
);
9832 new_cleanups
= TREE_VALUE (cleanups
);
9834 cleanups
= TREE_CHAIN (cleanups
);
9840 return new_cleanups
;
9843 /* Expand all cleanups up to OLD_CLEANUPS.
9844 Needed here, and also for language-dependent calls. */
9847 expand_cleanups_to (old_cleanups
)
9850 while (cleanups_this_call
!= old_cleanups
)
9852 expand_eh_region_end (TREE_VALUE (cleanups_this_call
));
9853 expand_expr (TREE_VALUE (cleanups_this_call
), const0_rtx
, VOIDmode
, 0);
9854 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
9858 /* Expand conditional expressions. */
9860 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9861 LABEL is an rtx of code CODE_LABEL, in this function and all the
9865 jumpifnot (exp
, label
)
9869 do_jump (exp
, label
, NULL_RTX
);
9872 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9879 do_jump (exp
, NULL_RTX
, label
);
9882 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9883 the result is zero, or IF_TRUE_LABEL if the result is one.
9884 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9885 meaning fall through in that case.
9887 do_jump always does any pending stack adjust except when it does not
9888 actually perform a jump. An example where there is no jump
9889 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9891 This function is responsible for optimizing cases such as
9892 &&, || and comparison operators in EXP. */
9895 do_jump (exp
, if_false_label
, if_true_label
)
9897 rtx if_false_label
, if_true_label
;
9899 register enum tree_code code
= TREE_CODE (exp
);
9900 /* Some cases need to create a label to jump to
9901 in order to properly fall through.
9902 These cases set DROP_THROUGH_LABEL nonzero. */
9903 rtx drop_through_label
= 0;
9908 enum machine_mode mode
;
9918 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9924 /* This is not true with #pragma weak */
9926 /* The address of something can never be zero. */
9928 emit_jump (if_true_label
);
9933 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9934 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9935 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9938 /* If we are narrowing the operand, we have to do the compare in the
9940 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9941 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9943 case NON_LVALUE_EXPR
:
9944 case REFERENCE_EXPR
:
9949 /* These cannot change zero->non-zero or vice versa. */
9950 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9954 /* This is never less insns than evaluating the PLUS_EXPR followed by
9955 a test and can be longer if the test is eliminated. */
9957 /* Reduce to minus. */
9958 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9959 TREE_OPERAND (exp
, 0),
9960 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9961 TREE_OPERAND (exp
, 1))));
9962 /* Process as MINUS. */
9966 /* Non-zero iff operands of minus differ. */
9967 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
9968 TREE_OPERAND (exp
, 0),
9969 TREE_OPERAND (exp
, 1)),
9974 /* If we are AND'ing with a small constant, do this comparison in the
9975 smallest type that fits. If the machine doesn't have comparisons
9976 that small, it will be converted back to the wider comparison.
9977 This helps if we are testing the sign bit of a narrower object.
9978 combine can't do this for us because it can't know whether a
9979 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9981 if (! SLOW_BYTE_ACCESS
9982 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9983 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9984 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
9985 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9986 && (type
= type_for_mode (mode
, 1)) != 0
9987 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9988 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9989 != CODE_FOR_nothing
))
9991 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9996 case TRUTH_NOT_EXPR
:
9997 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10000 case TRUTH_ANDIF_EXPR
:
10003 tree cleanups
, old_cleanups
;
10005 if (if_false_label
== 0)
10006 if_false_label
= drop_through_label
= gen_label_rtx ();
10008 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
10009 seq1
= get_insns ();
10012 old_cleanups
= cleanups_this_call
;
10014 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10015 seq2
= get_insns ();
10016 cleanups
= defer_cleanups_to (old_cleanups
);
10021 rtx flag
= gen_reg_rtx (word_mode
);
10025 /* Flag cleanups as not needed. */
10026 emit_move_insn (flag
, const0_rtx
);
10029 /* Flag cleanups as needed. */
10030 emit_move_insn (flag
, const1_rtx
);
10033 /* All cleanups must be on the function_obstack. */
10034 push_obstacks_nochange ();
10035 resume_temporary_allocation ();
10037 /* convert flag, which is an rtx, into a tree. */
10038 cond
= make_node (RTL_EXPR
);
10039 TREE_TYPE (cond
) = integer_type_node
;
10040 RTL_EXPR_RTL (cond
) = flag
;
10041 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
10042 cond
= save_expr (cond
);
10044 new_cleanups
= build (COND_EXPR
, void_type_node
,
10045 truthvalue_conversion (cond
),
10046 cleanups
, integer_zero_node
);
10047 new_cleanups
= fold (new_cleanups
);
10051 /* Now add in the conditionalized cleanups. */
10053 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
10054 expand_eh_region_start ();
10064 case TRUTH_ORIF_EXPR
:
10067 tree cleanups
, old_cleanups
;
10069 if (if_true_label
== 0)
10070 if_true_label
= drop_through_label
= gen_label_rtx ();
10072 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
10073 seq1
= get_insns ();
10076 old_cleanups
= cleanups_this_call
;
10078 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10079 seq2
= get_insns ();
10080 cleanups
= defer_cleanups_to (old_cleanups
);
10085 rtx flag
= gen_reg_rtx (word_mode
);
10089 /* Flag cleanups as not needed. */
10090 emit_move_insn (flag
, const0_rtx
);
10093 /* Flag cleanups as needed. */
10094 emit_move_insn (flag
, const1_rtx
);
10097 /* All cleanups must be on the function_obstack. */
10098 push_obstacks_nochange ();
10099 resume_temporary_allocation ();
10101 /* convert flag, which is an rtx, into a tree. */
10102 cond
= make_node (RTL_EXPR
);
10103 TREE_TYPE (cond
) = integer_type_node
;
10104 RTL_EXPR_RTL (cond
) = flag
;
10105 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
10106 cond
= save_expr (cond
);
10108 new_cleanups
= build (COND_EXPR
, void_type_node
,
10109 truthvalue_conversion (cond
),
10110 cleanups
, integer_zero_node
);
10111 new_cleanups
= fold (new_cleanups
);
10115 /* Now add in the conditionalized cleanups. */
10117 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
10118 expand_eh_region_start ();
10128 case COMPOUND_EXPR
:
10129 push_temp_slots ();
10130 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
10131 preserve_temp_slots (NULL_RTX
);
10132 free_temp_slots ();
10135 do_pending_stack_adjust ();
10136 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10139 case COMPONENT_REF
:
10140 case BIT_FIELD_REF
:
10143 int bitsize
, bitpos
, unsignedp
;
10144 enum machine_mode mode
;
10149 /* Get description of this reference. We don't actually care
10150 about the underlying object here. */
10151 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10152 &mode
, &unsignedp
, &volatilep
);
10154 type
= type_for_size (bitsize
, unsignedp
);
10155 if (! SLOW_BYTE_ACCESS
10156 && type
!= 0 && bitsize
>= 0
10157 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10158 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10159 != CODE_FOR_nothing
))
10161 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10168 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10169 if (integer_onep (TREE_OPERAND (exp
, 1))
10170 && integer_zerop (TREE_OPERAND (exp
, 2)))
10171 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10173 else if (integer_zerop (TREE_OPERAND (exp
, 1))
10174 && integer_onep (TREE_OPERAND (exp
, 2)))
10175 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10179 register rtx label1
= gen_label_rtx ();
10180 drop_through_label
= gen_label_rtx ();
10181 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
10182 /* Now the THEN-expression. */
10183 do_jump (TREE_OPERAND (exp
, 1),
10184 if_false_label
? if_false_label
: drop_through_label
,
10185 if_true_label
? if_true_label
: drop_through_label
);
10186 /* In case the do_jump just above never jumps. */
10187 do_pending_stack_adjust ();
10188 emit_label (label1
);
10189 /* Now the ELSE-expression. */
10190 do_jump (TREE_OPERAND (exp
, 2),
10191 if_false_label
? if_false_label
: drop_through_label
,
10192 if_true_label
? if_true_label
: drop_through_label
);
10198 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10200 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10201 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10202 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10203 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10206 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
10207 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10208 fold (build1 (REALPART_EXPR
,
10209 TREE_TYPE (inner_type
),
10210 TREE_OPERAND (exp
, 0))),
10211 fold (build1 (REALPART_EXPR
,
10212 TREE_TYPE (inner_type
),
10213 TREE_OPERAND (exp
, 1))))),
10214 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10215 fold (build1 (IMAGPART_EXPR
,
10216 TREE_TYPE (inner_type
),
10217 TREE_OPERAND (exp
, 0))),
10218 fold (build1 (IMAGPART_EXPR
,
10219 TREE_TYPE (inner_type
),
10220 TREE_OPERAND (exp
, 1))))))),
10221 if_false_label
, if_true_label
);
10222 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10223 && !can_compare_p (TYPE_MODE (inner_type
)))
10224 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
10226 comparison
= compare (exp
, EQ
, EQ
);
10232 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10234 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10235 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10236 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10237 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10240 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
10241 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10242 fold (build1 (REALPART_EXPR
,
10243 TREE_TYPE (inner_type
),
10244 TREE_OPERAND (exp
, 0))),
10245 fold (build1 (REALPART_EXPR
,
10246 TREE_TYPE (inner_type
),
10247 TREE_OPERAND (exp
, 1))))),
10248 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10249 fold (build1 (IMAGPART_EXPR
,
10250 TREE_TYPE (inner_type
),
10251 TREE_OPERAND (exp
, 0))),
10252 fold (build1 (IMAGPART_EXPR
,
10253 TREE_TYPE (inner_type
),
10254 TREE_OPERAND (exp
, 1))))))),
10255 if_false_label
, if_true_label
);
10256 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10257 && !can_compare_p (TYPE_MODE (inner_type
)))
10258 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
10260 comparison
= compare (exp
, NE
, NE
);
10265 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10267 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10268 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10270 comparison
= compare (exp
, LT
, LTU
);
10274 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10276 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10277 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10279 comparison
= compare (exp
, LE
, LEU
);
10283 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10285 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10286 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10288 comparison
= compare (exp
, GT
, GTU
);
10292 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10294 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10295 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10297 comparison
= compare (exp
, GE
, GEU
);
10302 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10304 /* This is not needed any more and causes poor code since it causes
10305 comparisons and tests from non-SI objects to have different code
10307 /* Copy to register to avoid generating bad insns by cse
10308 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10309 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10310 temp
= copy_to_reg (temp
);
10312 do_pending_stack_adjust ();
10313 if (GET_CODE (temp
) == CONST_INT
)
10314 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10315 else if (GET_CODE (temp
) == LABEL_REF
)
10316 comparison
= const_true_rtx
;
10317 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10318 && !can_compare_p (GET_MODE (temp
)))
10319 /* Note swapping the labels gives us not-equal. */
10320 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10321 else if (GET_MODE (temp
) != VOIDmode
)
10322 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10323 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10324 GET_MODE (temp
), NULL_RTX
, 0);
10329 /* Do any postincrements in the expression that was tested. */
10332 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10333 straight into a conditional jump instruction as the jump condition.
10334 Otherwise, all the work has been done already. */
10336 if (comparison
== const_true_rtx
)
10339 emit_jump (if_true_label
);
10341 else if (comparison
== const0_rtx
)
10343 if (if_false_label
)
10344 emit_jump (if_false_label
);
10346 else if (comparison
)
10347 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10349 if (drop_through_label
)
10351 /* If do_jump produces code that might be jumped around,
10352 do any stack adjusts from that code, before the place
10353 where control merges in. */
10354 do_pending_stack_adjust ();
10355 emit_label (drop_through_label
);
10359 /* Given a comparison expression EXP for values too wide to be compared
10360 with one insn, test the comparison and jump to the appropriate label.
10361 The code of EXP is ignored; we always test GT if SWAP is 0,
10362 and LT if SWAP is 1. */
10365 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10368 rtx if_false_label
, if_true_label
;
10370 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10371 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10372 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10373 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10374 rtx drop_through_label
= 0;
10375 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10378 if (! if_true_label
|| ! if_false_label
)
10379 drop_through_label
= gen_label_rtx ();
10380 if (! if_true_label
)
10381 if_true_label
= drop_through_label
;
10382 if (! if_false_label
)
10383 if_false_label
= drop_through_label
;
10385 /* Compare a word at a time, high order first. */
10386 for (i
= 0; i
< nwords
; i
++)
10389 rtx op0_word
, op1_word
;
10391 if (WORDS_BIG_ENDIAN
)
10393 op0_word
= operand_subword_force (op0
, i
, mode
);
10394 op1_word
= operand_subword_force (op1
, i
, mode
);
10398 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10399 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10402 /* All but high-order word must be compared as unsigned. */
10403 comp
= compare_from_rtx (op0_word
, op1_word
,
10404 (unsignedp
|| i
> 0) ? GTU
: GT
,
10405 unsignedp
, word_mode
, NULL_RTX
, 0);
10406 if (comp
== const_true_rtx
)
10407 emit_jump (if_true_label
);
10408 else if (comp
!= const0_rtx
)
10409 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10411 /* Consider lower words only if these are equal. */
10412 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10414 if (comp
== const_true_rtx
)
10415 emit_jump (if_false_label
);
10416 else if (comp
!= const0_rtx
)
10417 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10420 if (if_false_label
)
10421 emit_jump (if_false_label
);
10422 if (drop_through_label
)
10423 emit_label (drop_through_label
);
10426 /* Compare OP0 with OP1, word at a time, in mode MODE.
10427 UNSIGNEDP says to do unsigned comparison.
10428 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10431 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10432 enum machine_mode mode
;
10435 rtx if_false_label
, if_true_label
;
10437 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10438 rtx drop_through_label
= 0;
10441 if (! if_true_label
|| ! if_false_label
)
10442 drop_through_label
= gen_label_rtx ();
10443 if (! if_true_label
)
10444 if_true_label
= drop_through_label
;
10445 if (! if_false_label
)
10446 if_false_label
= drop_through_label
;
10448 /* Compare a word at a time, high order first. */
10449 for (i
= 0; i
< nwords
; i
++)
10452 rtx op0_word
, op1_word
;
10454 if (WORDS_BIG_ENDIAN
)
10456 op0_word
= operand_subword_force (op0
, i
, mode
);
10457 op1_word
= operand_subword_force (op1
, i
, mode
);
10461 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10462 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10465 /* All but high-order word must be compared as unsigned. */
10466 comp
= compare_from_rtx (op0_word
, op1_word
,
10467 (unsignedp
|| i
> 0) ? GTU
: GT
,
10468 unsignedp
, word_mode
, NULL_RTX
, 0);
10469 if (comp
== const_true_rtx
)
10470 emit_jump (if_true_label
);
10471 else if (comp
!= const0_rtx
)
10472 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10474 /* Consider lower words only if these are equal. */
10475 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10477 if (comp
== const_true_rtx
)
10478 emit_jump (if_false_label
);
10479 else if (comp
!= const0_rtx
)
10480 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10483 if (if_false_label
)
10484 emit_jump (if_false_label
);
10485 if (drop_through_label
)
10486 emit_label (drop_through_label
);
10489 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10490 with one insn, test the comparison and jump to the appropriate label. */
10493 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10495 rtx if_false_label
, if_true_label
;
10497 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10498 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10499 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10500 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10502 rtx drop_through_label
= 0;
10504 if (! if_false_label
)
10505 drop_through_label
= if_false_label
= gen_label_rtx ();
10507 for (i
= 0; i
< nwords
; i
++)
10509 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10510 operand_subword_force (op1
, i
, mode
),
10511 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10512 word_mode
, NULL_RTX
, 0);
10513 if (comp
== const_true_rtx
)
10514 emit_jump (if_false_label
);
10515 else if (comp
!= const0_rtx
)
10516 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10520 emit_jump (if_true_label
);
10521 if (drop_through_label
)
10522 emit_label (drop_through_label
);
10525 /* Jump according to whether OP0 is 0.
10526 We assume that OP0 has an integer mode that is too wide
10527 for the available compare insns. */
10530 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10532 rtx if_false_label
, if_true_label
;
10534 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10536 rtx drop_through_label
= 0;
10538 if (! if_false_label
)
10539 drop_through_label
= if_false_label
= gen_label_rtx ();
10541 for (i
= 0; i
< nwords
; i
++)
10543 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10545 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10546 if (comp
== const_true_rtx
)
10547 emit_jump (if_false_label
);
10548 else if (comp
!= const0_rtx
)
10549 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10553 emit_jump (if_true_label
);
10554 if (drop_through_label
)
10555 emit_label (drop_through_label
);
10558 /* Given a comparison expression in rtl form, output conditional branches to
10559 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10562 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10563 rtx comparison
, if_false_label
, if_true_label
;
10567 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10568 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10572 if (if_false_label
)
10573 emit_jump (if_false_label
);
10575 else if (if_false_label
)
10578 rtx prev
= get_last_insn ();
10581 /* Output the branch with the opposite condition. Then try to invert
10582 what is generated. If more than one insn is a branch, or if the
10583 branch is not the last insn written, abort. If we can't invert
10584 the branch, emit make a true label, redirect this jump to that,
10585 emit a jump to the false label and define the true label. */
10587 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10588 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10592 /* Here we get the first insn that was just emitted. It used to be the
10593 case that, on some machines, emitting the branch would discard
10594 the previous compare insn and emit a replacement. This isn't
10595 done anymore, but abort if we see that PREV is deleted. */
10598 insn
= get_insns ();
10599 else if (INSN_DELETED_P (prev
))
10602 insn
= NEXT_INSN (prev
);
10604 for (; insn
; insn
= NEXT_INSN (insn
))
10605 if (GET_CODE (insn
) == JUMP_INSN
)
10612 if (branch
!= get_last_insn ())
10615 JUMP_LABEL (branch
) = if_false_label
;
10616 if (! invert_jump (branch
, if_false_label
))
10618 if_true_label
= gen_label_rtx ();
10619 redirect_jump (branch
, if_true_label
);
10620 emit_jump (if_false_label
);
10621 emit_label (if_true_label
);
10626 /* Generate code for a comparison expression EXP
10627 (including code to compute the values to be compared)
10628 and set (CC0) according to the result.
10629 SIGNED_CODE should be the rtx operation for this comparison for
10630 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10632 We force a stack adjustment unless there are currently
10633 things pushed on the stack that aren't yet used. */
10636 compare (exp
, signed_code
, unsigned_code
)
10638 enum rtx_code signed_code
, unsigned_code
;
10641 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10643 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10644 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10645 register enum machine_mode mode
= TYPE_MODE (type
);
10646 int unsignedp
= TREE_UNSIGNED (type
);
10647 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10649 #ifdef HAVE_canonicalize_funcptr_for_compare
10650 /* If function pointers need to be "canonicalized" before they can
10651 be reliably compared, then canonicalize them. */
10652 if (HAVE_canonicalize_funcptr_for_compare
10653 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10654 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10657 rtx new_op0
= gen_reg_rtx (mode
);
10659 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10663 if (HAVE_canonicalize_funcptr_for_compare
10664 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10665 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10668 rtx new_op1
= gen_reg_rtx (mode
);
10670 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10675 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10677 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10678 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10681 /* Like compare but expects the values to compare as two rtx's.
10682 The decision as to signed or unsigned comparison must be made by the caller.
10684 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10687 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10688 size of MODE should be used. */
10691 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10692 register rtx op0
, op1
;
10693 enum rtx_code code
;
10695 enum machine_mode mode
;
10701 /* If one operand is constant, make it the second one. Only do this
10702 if the other operand is not constant as well. */
10704 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10705 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10710 code
= swap_condition (code
);
10713 if (flag_force_mem
)
10715 op0
= force_not_mem (op0
);
10716 op1
= force_not_mem (op1
);
10719 do_pending_stack_adjust ();
10721 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10722 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10726 /* There's no need to do this now that combine.c can eliminate lots of
10727 sign extensions. This can be less efficient in certain cases on other
10730 /* If this is a signed equality comparison, we can do it as an
10731 unsigned comparison since zero-extension is cheaper than sign
10732 extension and comparisons with zero are done as unsigned. This is
10733 the case even on machines that can do fast sign extension, since
10734 zero-extension is easier to combine with other operations than
10735 sign-extension is. If we are comparing against a constant, we must
10736 convert it to what it would look like unsigned. */
10737 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10738 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10740 if (GET_CODE (op1
) == CONST_INT
10741 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10742 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10747 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10749 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10752 /* Generate code to calculate EXP using a store-flag instruction
10753 and return an rtx for the result. EXP is either a comparison
10754 or a TRUTH_NOT_EXPR whose operand is a comparison.
10756 If TARGET is nonzero, store the result there if convenient.
10758 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10761 Return zero if there is no suitable set-flag instruction
10762 available on this machine.
10764 Once expand_expr has been called on the arguments of the comparison,
10765 we are committed to doing the store flag, since it is not safe to
10766 re-evaluate the expression. We emit the store-flag insn by calling
10767 emit_store_flag, but only expand the arguments if we have a reason
10768 to believe that emit_store_flag will be successful. If we think that
10769 it will, but it isn't, we have to simulate the store-flag with a
10770 set/jump/set sequence. */
10773 do_store_flag (exp
, target
, mode
, only_cheap
)
10776 enum machine_mode mode
;
10779 enum rtx_code code
;
10780 tree arg0
, arg1
, type
;
10782 enum machine_mode operand_mode
;
10786 enum insn_code icode
;
10787 rtx subtarget
= target
;
10788 rtx result
, label
, pattern
, jump_pat
;
10790 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10791 result at the end. We can't simply invert the test since it would
10792 have already been inverted if it were valid. This case occurs for
10793 some floating-point comparisons. */
10795 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10796 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10798 arg0
= TREE_OPERAND (exp
, 0);
10799 arg1
= TREE_OPERAND (exp
, 1);
10800 type
= TREE_TYPE (arg0
);
10801 operand_mode
= TYPE_MODE (type
);
10802 unsignedp
= TREE_UNSIGNED (type
);
10804 /* We won't bother with BLKmode store-flag operations because it would mean
10805 passing a lot of information to emit_store_flag. */
10806 if (operand_mode
== BLKmode
)
10809 /* We won't bother with store-flag operations involving function pointers
10810 when function pointers must be canonicalized before comparisons. */
10811 #ifdef HAVE_canonicalize_funcptr_for_compare
10812 if (HAVE_canonicalize_funcptr_for_compare
10813 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10814 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10816 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10817 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10818 == FUNCTION_TYPE
))))
10825 /* Get the rtx comparison code to use. We know that EXP is a comparison
10826 operation of some type. Some comparisons against 1 and -1 can be
10827 converted to comparisons with zero. Do so here so that the tests
10828 below will be aware that we have a comparison with zero. These
10829 tests will not catch constants in the first operand, but constants
10830 are rarely passed as the first operand. */
10832 switch (TREE_CODE (exp
))
10841 if (integer_onep (arg1
))
10842 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10844 code
= unsignedp
? LTU
: LT
;
10847 if (! unsignedp
&& integer_all_onesp (arg1
))
10848 arg1
= integer_zero_node
, code
= LT
;
10850 code
= unsignedp
? LEU
: LE
;
10853 if (! unsignedp
&& integer_all_onesp (arg1
))
10854 arg1
= integer_zero_node
, code
= GE
;
10856 code
= unsignedp
? GTU
: GT
;
10859 if (integer_onep (arg1
))
10860 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10862 code
= unsignedp
? GEU
: GE
;
10868 /* Put a constant second. */
10869 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10871 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10872 code
= swap_condition (code
);
10875 /* If this is an equality or inequality test of a single bit, we can
10876 do this by shifting the bit being tested to the low-order bit and
10877 masking the result with the constant 1. If the condition was EQ,
10878 we xor it with 1. This does not require an scc insn and is faster
10879 than an scc insn even if we have it. */
10881 if ((code
== NE
|| code
== EQ
)
10882 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10883 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10884 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
10886 tree inner
= TREE_OPERAND (arg0
, 0);
10891 tem
= INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
10892 NULL_RTX
, VOIDmode
, 0));
10893 /* In this case, immed_double_const will sign extend the value to make
10894 it look the same on the host and target. We must remove the
10895 sign-extension before calling exact_log2, since exact_log2 will
10896 fail for negative values. */
10897 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
10898 && BITS_PER_WORD
== GET_MODE_BITSIZE (TYPE_MODE (type
)))
10899 /* We don't use the obvious constant shift to generate the mask,
10900 because that generates compiler warnings when BITS_PER_WORD is
10901 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10902 code is unreachable in that case. */
10903 tem
= tem
& GET_MODE_MASK (word_mode
);
10904 bitnum
= exact_log2 (tem
);
10906 /* If INNER is a right shift of a constant and it plus BITNUM does
10907 not overflow, adjust BITNUM and INNER. */
10909 if (TREE_CODE (inner
) == RSHIFT_EXPR
10910 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10911 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10912 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
10913 < TYPE_PRECISION (type
)))
10915 bitnum
+=TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10916 inner
= TREE_OPERAND (inner
, 0);
10919 /* If we are going to be able to omit the AND below, we must do our
10920 operations as unsigned. If we must use the AND, we have a choice.
10921 Normally unsigned is faster, but for some machines signed is. */
10922 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10923 #ifdef LOAD_EXTEND_OP
10924 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10930 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10931 || GET_MODE (subtarget
) != operand_mode
10932 || ! safe_from_p (subtarget
, inner
))
10935 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10938 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10939 size_int (bitnum
), subtarget
, ops_unsignedp
);
10941 if (GET_MODE (op0
) != mode
)
10942 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10944 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10945 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10946 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10948 /* Put the AND last so it can combine with more things. */
10949 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10950 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10955 /* Now see if we are likely to be able to do this. Return if not. */
10956 if (! can_compare_p (operand_mode
))
10958 icode
= setcc_gen_code
[(int) code
];
10959 if (icode
== CODE_FOR_nothing
10960 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
10962 /* We can only do this if it is one of the special cases that
10963 can be handled without an scc insn. */
10964 if ((code
== LT
&& integer_zerop (arg1
))
10965 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10967 else if (BRANCH_COST
>= 0
10968 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10969 && TREE_CODE (type
) != REAL_TYPE
10970 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10971 != CODE_FOR_nothing
)
10972 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10973 != CODE_FOR_nothing
)))
10979 preexpand_calls (exp
);
10980 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10981 || GET_MODE (subtarget
) != operand_mode
10982 || ! safe_from_p (subtarget
, arg1
))
10985 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10986 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10989 target
= gen_reg_rtx (mode
);
10991 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10992 because, if the emit_store_flag does anything it will succeed and
10993 OP0 and OP1 will not be used subsequently. */
10995 result
= emit_store_flag (target
, code
,
10996 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10997 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10998 operand_mode
, unsignedp
, 1);
11003 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
11004 result
, 0, OPTAB_LIB_WIDEN
);
11008 /* If this failed, we have to do this with set/compare/jump/set code. */
11009 if (target
== 0 || GET_CODE (target
) != REG
11010 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
11011 target
= gen_reg_rtx (GET_MODE (target
));
11013 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
11014 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
11015 operand_mode
, NULL_RTX
, 0);
11016 if (GET_CODE (result
) == CONST_INT
)
11017 return (((result
== const0_rtx
&& ! invert
)
11018 || (result
!= const0_rtx
&& invert
))
11019 ? const0_rtx
: const1_rtx
);
11021 label
= gen_label_rtx ();
11022 if (bcc_gen_fctn
[(int) code
] == 0)
11025 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
11026 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
11027 emit_label (label
);
11032 /* Generate a tablejump instruction (used for switch statements). */
11034 #ifdef HAVE_tablejump
11036 /* INDEX is the value being switched on, with the lowest value
11037 in the table already subtracted.
11038 MODE is its expected mode (needed if INDEX is constant).
11039 RANGE is the length of the jump table.
11040 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11042 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11043 index value is out of range. */
11046 do_tablejump (index
, mode
, range
, table_label
, default_label
)
11047 rtx index
, range
, table_label
, default_label
;
11048 enum machine_mode mode
;
11050 register rtx temp
, vector
;
11052 /* Do an unsigned comparison (in the proper mode) between the index
11053 expression and the value which represents the length of the range.
11054 Since we just finished subtracting the lower bound of the range
11055 from the index expression, this comparison allows us to simultaneously
11056 check that the original index expression value is both greater than
11057 or equal to the minimum value of the range and less than or equal to
11058 the maximum value of the range. */
11060 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
11061 emit_jump_insn (gen_bgtu (default_label
));
11063 /* If index is in range, it must fit in Pmode.
11064 Convert to Pmode so we can index with it. */
11066 index
= convert_to_mode (Pmode
, index
, 1);
11068 /* Don't let a MEM slip thru, because then INDEX that comes
11069 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11070 and break_out_memory_refs will go to work on it and mess it up. */
11071 #ifdef PIC_CASE_VECTOR_ADDRESS
11072 if (flag_pic
&& GET_CODE (index
) != REG
)
11073 index
= copy_to_mode_reg (Pmode
, index
);
11076 /* If flag_force_addr were to affect this address
11077 it could interfere with the tricky assumptions made
11078 about addresses that contain label-refs,
11079 which may be valid only very near the tablejump itself. */
11080 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11081 GET_MODE_SIZE, because this indicates how large insns are. The other
11082 uses should all be Pmode, because they are addresses. This code
11083 could fail if addresses and insns are not the same size. */
11084 index
= gen_rtx (PLUS
, Pmode
,
11085 gen_rtx (MULT
, Pmode
, index
,
11086 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
11087 gen_rtx (LABEL_REF
, Pmode
, table_label
));
11088 #ifdef PIC_CASE_VECTOR_ADDRESS
11090 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11093 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
11094 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11095 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
11096 RTX_UNCHANGING_P (vector
) = 1;
11097 convert_move (temp
, vector
, 0);
11099 emit_jump_insn (gen_tablejump (temp
, table_label
));
11101 #ifndef CASE_VECTOR_PC_RELATIVE
11102 /* If we are generating PIC code or if the table is PC-relative, the
11103 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11109 #endif /* HAVE_tablejump */
11112 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11113 to that value is on the top of the stack. The resulting type is TYPE, and
11114 the source declaration is DECL. */
11117 bc_load_memory (type
, decl
)
11120 enum bytecode_opcode opcode
;
11123 /* Bit fields are special. We only know about signed and
11124 unsigned ints, and enums. The latter are treated as
11125 signed integers. */
11127 if (DECL_BIT_FIELD (decl
))
11128 if (TREE_CODE (type
) == ENUMERAL_TYPE
11129 || TREE_CODE (type
) == INTEGER_TYPE
)
11130 opcode
= TREE_UNSIGNED (type
) ? zxloadBI
: sxloadBI
;
11134 /* See corresponding comment in bc_store_memory(). */
11135 if (TYPE_MODE (type
) == BLKmode
11136 || TYPE_MODE (type
) == VOIDmode
)
11139 opcode
= mode_to_load_map
[(int) TYPE_MODE (type
)];
11141 if (opcode
== neverneverland
)
11144 bc_emit_bytecode (opcode
);
11146 #ifdef DEBUG_PRINT_CODE
11147 fputc ('\n', stderr
);
11152 /* Store the contents of the second stack slot to the address in the
11153 top stack slot. DECL is the declaration of the destination and is used
11154 to determine whether we're dealing with a bitfield. */
11157 bc_store_memory (type
, decl
)
11160 enum bytecode_opcode opcode
;
11163 if (DECL_BIT_FIELD (decl
))
11165 if (TREE_CODE (type
) == ENUMERAL_TYPE
11166 || TREE_CODE (type
) == INTEGER_TYPE
)
11172 if (TYPE_MODE (type
) == BLKmode
)
11174 /* Copy structure. This expands to a block copy instruction, storeBLK.
11175 In addition to the arguments expected by the other store instructions,
11176 it also expects a type size (SImode) on top of the stack, which is the
11177 structure size in size units (usually bytes). The two first arguments
11178 are already on the stack; so we just put the size on level 1. For some
11179 other languages, the size may be variable, this is why we don't encode
11180 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11182 bc_expand_expr (TYPE_SIZE (type
));
11186 opcode
= mode_to_store_map
[(int) TYPE_MODE (type
)];
11188 if (opcode
== neverneverland
)
11191 bc_emit_bytecode (opcode
);
11193 #ifdef DEBUG_PRINT_CODE
11194 fputc ('\n', stderr
);
11199 /* Allocate local stack space sufficient to hold a value of the given
11200 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11201 integral power of 2. A special case is locals of type VOID, which
11202 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11203 remapped into the corresponding attribute of SI. */
11206 bc_allocate_local (size
, alignment
)
11207 int size
, alignment
;
11210 int byte_alignment
;
11215 /* Normalize size and alignment */
11217 size
= UNITS_PER_WORD
;
11219 if (alignment
< BITS_PER_UNIT
)
11220 byte_alignment
= 1 << (INT_ALIGN
- 1);
11223 byte_alignment
= alignment
/ BITS_PER_UNIT
;
11225 if (local_vars_size
& (byte_alignment
- 1))
11226 local_vars_size
+= byte_alignment
- (local_vars_size
& (byte_alignment
- 1));
11228 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11229 local_vars_size
+= size
;
11235 /* Allocate variable-sized local array. Variable-sized arrays are
11236 actually pointers to the address in memory where they are stored. */
11239 bc_allocate_variable_array (size
)
11243 const int ptralign
= (1 << (PTR_ALIGN
- 1));
11245 /* Align pointer */
11246 if (local_vars_size
& ptralign
)
11247 local_vars_size
+= ptralign
- (local_vars_size
& ptralign
);
11249 /* Note down local space needed: pointer to block; also return
11252 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11253 local_vars_size
+= POINTER_SIZE
/ BITS_PER_UNIT
;
11258 /* Push the machine address for the given external variable offset. */
11261 bc_load_externaddr (externaddr
)
11264 bc_emit_bytecode (constP
);
11265 bc_emit_code_labelref (BYTECODE_LABEL (externaddr
),
11266 BYTECODE_BC_LABEL (externaddr
)->offset
);
11268 #ifdef DEBUG_PRINT_CODE
11269 fputc ('\n', stderr
);
11274 /* Like above, but expects an IDENTIFIER. */
11277 bc_load_externaddr_id (id
, offset
)
11281 if (!IDENTIFIER_POINTER (id
))
11284 bc_emit_bytecode (constP
);
11285 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id
)), offset
);
11287 #ifdef DEBUG_PRINT_CODE
11288 fputc ('\n', stderr
);
11293 /* Push the machine address for the given local variable offset. */
11296 bc_load_localaddr (localaddr
)
11299 bc_emit_instruction (localP
, (HOST_WIDE_INT
) BYTECODE_BC_LABEL (localaddr
)->offset
);
11303 /* Push the machine address for the given parameter offset.
11304 NOTE: offset is in bits. */
11307 bc_load_parmaddr (parmaddr
)
11310 bc_emit_instruction (argP
, ((HOST_WIDE_INT
) BYTECODE_BC_LABEL (parmaddr
)->offset
11315 /* Convert a[i] into *(a + i). */
11318 bc_canonicalize_array_ref (exp
)
11321 tree type
= TREE_TYPE (exp
);
11322 tree array_adr
= build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
),
11323 TREE_OPERAND (exp
, 0));
11324 tree index
= TREE_OPERAND (exp
, 1);
11327 /* Convert the integer argument to a type the same size as a pointer
11328 so the multiply won't overflow spuriously. */
11330 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
11331 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
11333 /* The array address isn't volatile even if the array is.
11334 (Of course this isn't terribly relevant since the bytecode
11335 translator treats nearly everything as volatile anyway.) */
11336 TREE_THIS_VOLATILE (array_adr
) = 0;
11338 return build1 (INDIRECT_REF
, type
,
11339 fold (build (PLUS_EXPR
,
11340 TYPE_POINTER_TO (type
),
11342 fold (build (MULT_EXPR
,
11343 TYPE_POINTER_TO (type
),
11345 size_in_bytes (type
))))));
11349 /* Load the address of the component referenced by the given
11350 COMPONENT_REF expression.
11352 Returns innermost lvalue. */
11355 bc_expand_component_address (exp
)
11359 enum machine_mode mode
;
11361 HOST_WIDE_INT SIval
;
11364 tem
= TREE_OPERAND (exp
, 1);
11365 mode
= DECL_MODE (tem
);
11368 /* Compute cumulative bit offset for nested component refs
11369 and array refs, and find the ultimate containing object. */
11371 for (tem
= exp
;; tem
= TREE_OPERAND (tem
, 0))
11373 if (TREE_CODE (tem
) == COMPONENT_REF
)
11374 bitpos
+= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem
, 1)));
11376 if (TREE_CODE (tem
) == ARRAY_REF
11377 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11378 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
))) == INTEGER_CST
)
11380 bitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1))
11381 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)))
11382 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11387 bc_expand_expr (tem
);
11390 /* For bitfields also push their offset and size */
11391 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
11392 bc_push_offset_and_size (bitpos
, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp
, 1)));
11394 if (SIval
= bitpos
/ BITS_PER_UNIT
)
11395 bc_emit_instruction (addconstPSI
, SIval
);
11397 return (TREE_OPERAND (exp
, 1));
11401 /* Emit code to push two SI constants */
11404 bc_push_offset_and_size (offset
, size
)
11405 HOST_WIDE_INT offset
, size
;
11407 bc_emit_instruction (constSI
, offset
);
11408 bc_emit_instruction (constSI
, size
);
11412 /* Emit byte code to push the address of the given lvalue expression to
11413 the stack. If it's a bit field, we also push offset and size info.
11415 Returns innermost component, which allows us to determine not only
11416 its type, but also whether it's a bitfield. */
11419 bc_expand_address (exp
)
11423 if (!exp
|| TREE_CODE (exp
) == ERROR_MARK
)
11427 switch (TREE_CODE (exp
))
11431 return (bc_expand_address (bc_canonicalize_array_ref (exp
)));
11433 case COMPONENT_REF
:
11435 return (bc_expand_component_address (exp
));
11439 bc_expand_expr (TREE_OPERAND (exp
, 0));
11441 /* For variable-sized types: retrieve pointer. Sometimes the
11442 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11443 also make sure we have an operand, just in case... */
11445 if (TREE_OPERAND (exp
, 0)
11446 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
11447 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))) != INTEGER_CST
)
11448 bc_emit_instruction (loadP
);
11450 /* If packed, also return offset and size */
11451 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 0)))
11453 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 0))),
11454 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp
, 0))));
11456 return (TREE_OPERAND (exp
, 0));
11458 case FUNCTION_DECL
:
11460 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11461 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
11466 bc_load_parmaddr (DECL_RTL (exp
));
11468 /* For variable-sized types: retrieve pointer */
11469 if (TYPE_SIZE (TREE_TYPE (exp
))
11470 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11471 bc_emit_instruction (loadP
);
11473 /* If packed, also return offset and size */
11474 if (DECL_BIT_FIELD (exp
))
11475 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11476 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11482 bc_emit_instruction (returnP
);
11488 if (BYTECODE_LABEL (DECL_RTL (exp
)))
11489 bc_load_externaddr (DECL_RTL (exp
));
11492 if (DECL_EXTERNAL (exp
))
11493 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11494 (BYTECODE_BC_LABEL (DECL_RTL (exp
)))->offset
);
11496 bc_load_localaddr (DECL_RTL (exp
));
11498 /* For variable-sized types: retrieve pointer */
11499 if (TYPE_SIZE (TREE_TYPE (exp
))
11500 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11501 bc_emit_instruction (loadP
);
11503 /* If packed, also return offset and size */
11504 if (DECL_BIT_FIELD (exp
))
11505 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11506 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11514 bc_emit_bytecode (constP
);
11515 r
= output_constant_def (exp
);
11516 bc_emit_code_labelref (BYTECODE_LABEL (r
), BYTECODE_BC_LABEL (r
)->offset
);
11518 #ifdef DEBUG_PRINT_CODE
11519 fputc ('\n', stderr
);
11530 /* Most lvalues don't have components. */
11535 /* Emit a type code to be used by the runtime support in handling
11536 parameter passing. The type code consists of the machine mode
11537 plus the minimal alignment shifted left 8 bits. */
11540 bc_runtime_type_code (type
)
11545 switch (TREE_CODE (type
))
11551 case ENUMERAL_TYPE
:
11555 val
= (int) TYPE_MODE (type
) | TYPE_ALIGN (type
) << 8;
11567 return build_int_2 (val
, 0);
11571 /* Generate constructor label */
11574 bc_gen_constr_label ()
11576 static int label_counter
;
11577 static char label
[20];
11579 sprintf (label
, "*LR%d", label_counter
++);
11581 return (obstack_copy0 (&permanent_obstack
, label
, strlen (label
)));
11585 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11586 expand the constructor data as static data, and push a pointer to it.
11587 The pointer is put in the pointer table and is retrieved by a constP
11588 bytecode instruction. We then loop and store each constructor member in
11589 the corresponding component. Finally, we return the original pointer on
11593 bc_expand_constructor (constr
)
11597 HOST_WIDE_INT ptroffs
;
11601 /* Literal constructors are handled as constants, whereas
11602 non-literals are evaluated and stored element by element
11603 into the data segment. */
11605 /* Allocate space in proper segment and push pointer to space on stack.
11608 l
= bc_gen_constr_label ();
11610 if (TREE_CONSTANT (constr
))
11614 bc_emit_const_labeldef (l
);
11615 bc_output_constructor (constr
, int_size_in_bytes (TREE_TYPE (constr
)));
11621 bc_emit_data_labeldef (l
);
11622 bc_output_data_constructor (constr
);
11626 /* Add reference to pointer table and recall pointer to stack;
11627 this code is common for both types of constructors: literals
11628 and non-literals. */
11630 ptroffs
= bc_define_pointer (l
);
11631 bc_emit_instruction (constP
, ptroffs
);
11633 /* This is all that has to be done if it's a literal. */
11634 if (TREE_CONSTANT (constr
))
11638 /* At this point, we have the pointer to the structure on top of the stack.
11639 Generate sequences of store_memory calls for the constructor. */
11641 /* constructor type is structure */
11642 if (TREE_CODE (TREE_TYPE (constr
)) == RECORD_TYPE
)
11646 /* If the constructor has fewer fields than the structure,
11647 clear the whole structure first. */
11649 if (list_length (CONSTRUCTOR_ELTS (constr
))
11650 != list_length (TYPE_FIELDS (TREE_TYPE (constr
))))
11652 bc_emit_instruction (duplicate
);
11653 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11654 bc_emit_instruction (clearBLK
);
11657 /* Store each element of the constructor into the corresponding
11658 field of TARGET. */
11660 for (elt
= CONSTRUCTOR_ELTS (constr
); elt
; elt
= TREE_CHAIN (elt
))
11662 register tree field
= TREE_PURPOSE (elt
);
11663 register enum machine_mode mode
;
11668 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
)) /* * DECL_SIZE_UNIT (field) */;
11669 mode
= DECL_MODE (field
);
11670 unsignedp
= TREE_UNSIGNED (field
);
11672 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
11674 bc_store_field (elt
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11675 /* The alignment of TARGET is
11676 at least what its type requires. */
11678 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11679 int_size_in_bytes (TREE_TYPE (constr
)));
11684 /* Constructor type is array */
11685 if (TREE_CODE (TREE_TYPE (constr
)) == ARRAY_TYPE
)
11689 tree domain
= TYPE_DOMAIN (TREE_TYPE (constr
));
11690 int minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
11691 int maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
11692 tree elttype
= TREE_TYPE (TREE_TYPE (constr
));
11694 /* If the constructor has fewer fields than the structure,
11695 clear the whole structure first. */
11697 if (list_length (CONSTRUCTOR_ELTS (constr
)) < maxelt
- minelt
+ 1)
11699 bc_emit_instruction (duplicate
);
11700 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11701 bc_emit_instruction (clearBLK
);
11705 /* Store each element of the constructor into the corresponding
11706 element of TARGET, determined by counting the elements. */
11708 for (elt
= CONSTRUCTOR_ELTS (constr
), i
= 0;
11710 elt
= TREE_CHAIN (elt
), i
++)
11712 register enum machine_mode mode
;
11717 mode
= TYPE_MODE (elttype
);
11718 bitsize
= GET_MODE_BITSIZE (mode
);
11719 unsignedp
= TREE_UNSIGNED (elttype
);
11721 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
))
11722 /* * TYPE_SIZE_UNIT (elttype) */ );
11724 bc_store_field (elt
, bitsize
, bitpos
, mode
,
11725 TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11726 /* The alignment of TARGET is
11727 at least what its type requires. */
11729 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11730 int_size_in_bytes (TREE_TYPE (constr
)));
11737 /* Store the value of EXP (an expression tree) into member FIELD of
11738 structure at address on stack, which has type TYPE, mode MODE and
11739 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11742 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11743 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11746 bc_store_field (field
, bitsize
, bitpos
, mode
, exp
, type
,
11747 value_mode
, unsignedp
, align
, total_size
)
11748 int bitsize
, bitpos
;
11749 enum machine_mode mode
;
11750 tree field
, exp
, type
;
11751 enum machine_mode value_mode
;
11757 /* Expand expression and copy pointer */
11758 bc_expand_expr (exp
);
11759 bc_emit_instruction (over
);
11762 /* If the component is a bit field, we cannot use addressing to access
11763 it. Use bit-field techniques to store in it. */
11765 if (DECL_BIT_FIELD (field
))
11767 bc_store_bit_field (bitpos
, bitsize
, unsignedp
);
11771 /* Not bit field */
11773 HOST_WIDE_INT offset
= bitpos
/ BITS_PER_UNIT
;
11775 /* Advance pointer to the desired member */
11777 bc_emit_instruction (addconstPSI
, offset
);
11780 bc_store_memory (type
, field
);
11785 /* Store SI/SU in bitfield */
11788 bc_store_bit_field (offset
, size
, unsignedp
)
11789 int offset
, size
, unsignedp
;
11791 /* Push bitfield offset and size */
11792 bc_push_offset_and_size (offset
, size
);
11795 bc_emit_instruction (sstoreBI
);
11799 /* Load SI/SU from bitfield */
11802 bc_load_bit_field (offset
, size
, unsignedp
)
11803 int offset
, size
, unsignedp
;
11805 /* Push bitfield offset and size */
11806 bc_push_offset_and_size (offset
, size
);
11808 /* Load: sign-extend if signed, else zero-extend */
11809 bc_emit_instruction (unsignedp
? zxloadBI
: sxloadBI
);
11813 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11814 (adjust stack pointer upwards), negative means add that number of
11815 levels (adjust the stack pointer downwards). Only positive values
11816 normally make sense. */
11819 bc_adjust_stack (nlevels
)
11828 bc_emit_instruction (drop
);
11831 bc_emit_instruction (drop
);
11836 bc_emit_instruction (adjstackSI
, (HOST_WIDE_INT
) nlevels
);
11837 stack_depth
-= nlevels
;
11840 #if defined (VALIDATE_STACK_FOR_BC)
11841 VALIDATE_STACK_FOR_BC ();