1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls
= 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust
;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop
;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call
;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level
;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value
;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value
;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from
;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size
;
155 extern int stack_depth
;
156 extern int max_stack_depth
;
157 extern struct obstack permanent_obstack
;
158 extern rtx arg_pointer_save_area
;
160 static rtx enqueue_insn
PROTO((rtx
, rtx
));
161 static int queued_subexp_p
PROTO((rtx
));
162 static void init_queue
PROTO((void));
163 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
164 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
165 static void move_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
166 struct move_by_pieces
*));
167 static void clear_by_pieces
PROTO((rtx
, int, int));
168 static void clear_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
169 struct clear_by_pieces
*));
170 static int is_zeros_p
PROTO((tree
));
171 static int mostly_zeros_p
PROTO((tree
));
172 static void store_constructor
PROTO((tree
, rtx
, int));
173 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
174 enum machine_mode
, int, int, int));
175 static int get_inner_unaligned_p
PROTO((tree
));
176 static tree save_noncopied_parts
PROTO((tree
, tree
));
177 static tree init_noncopied_parts
PROTO((tree
, tree
));
178 static int safe_from_p
PROTO((rtx
, tree
));
179 static int fixed_type_p
PROTO((tree
));
180 static rtx var_rtx
PROTO((tree
));
181 static int get_pointer_alignment
PROTO((tree
, unsigned));
182 static tree string_constant
PROTO((tree
, tree
*));
183 static tree c_strlen
PROTO((tree
));
184 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
185 enum machine_mode
, int));
186 static int apply_args_size
PROTO((void));
187 static int apply_result_size
PROTO((void));
188 static rtx result_vector
PROTO((int, rtx
));
189 static rtx expand_builtin_apply_args
PROTO((void));
190 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
191 static void expand_builtin_return
PROTO((rtx
));
192 static rtx expand_increment
PROTO((tree
, int, int));
193 void bc_expand_increment
PROTO((struct increment_operator
*, tree
));
194 rtx bc_allocate_local
PROTO((int, int));
195 void bc_store_memory
PROTO((tree
, tree
));
196 tree bc_expand_component_address
PROTO((tree
));
197 tree bc_expand_address
PROTO((tree
));
198 void bc_expand_constructor
PROTO((tree
));
199 void bc_adjust_stack
PROTO((int));
200 tree bc_canonicalize_array_ref
PROTO((tree
));
201 void bc_load_memory
PROTO((tree
, tree
));
202 void bc_load_externaddr
PROTO((rtx
));
203 void bc_load_externaddr_id
PROTO((tree
, int));
204 void bc_load_localaddr
PROTO((rtx
));
205 void bc_load_parmaddr
PROTO((rtx
));
206 static void preexpand_calls
PROTO((tree
));
207 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
208 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
209 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
210 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
211 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
212 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
213 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
214 static tree defer_cleanups_to
PROTO((tree
));
215 extern tree truthvalue_conversion
PROTO((tree
));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load
[NUM_MACHINE_MODES
];
222 static char direct_store
[NUM_MACHINE_MODES
];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map
[MAX_MACHINE_MODE
];
259 enum bytecode_opcode mode_to_load_map
[MAX_MACHINE_MODE
];
260 enum bytecode_opcode mode_to_store_map
[MAX_MACHINE_MODE
];
262 /* Initialize maps used to convert modes to const, load, and store
266 bc_init_mode_to_opcode_maps ()
270 for (mode
= 0; mode
< (int) MAX_MACHINE_MODE
; mode
++)
271 mode_to_const_map
[mode
] =
272 mode_to_load_map
[mode
] =
273 mode_to_store_map
[mode
] = neverneverland
;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
291 enum machine_mode mode
;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
296 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
299 insn
= emit_insn (gen_rtx (SET
, 0, 0));
300 pat
= PATTERN (insn
);
302 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
303 mode
= (enum machine_mode
) ((int) mode
+ 1))
309 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
310 PUT_MODE (mem
, mode
);
311 PUT_MODE (mem1
, mode
);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
317 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
318 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
321 if (! HARD_REGNO_MODE_OK (regno
, mode
))
324 reg
= gen_rtx (REG
, mode
, regno
);
327 SET_DEST (pat
) = reg
;
328 if (recog (pat
, insn
, &num_clobbers
) >= 0)
329 direct_load
[(int) mode
] = 1;
331 SET_SRC (pat
) = mem1
;
332 SET_DEST (pat
) = reg
;
333 if (recog (pat
, insn
, &num_clobbers
) >= 0)
334 direct_load
[(int) mode
] = 1;
337 SET_DEST (pat
) = mem
;
338 if (recog (pat
, insn
, &num_clobbers
) >= 0)
339 direct_store
[(int) mode
] = 1;
342 SET_DEST (pat
) = mem1
;
343 if (recog (pat
, insn
, &num_clobbers
) >= 0)
344 direct_store
[(int) mode
] = 1;
351 /* This is run at the start of compiling a function. */
358 pending_stack_adjust
= 0;
359 inhibit_defer_pop
= 0;
360 cleanups_this_call
= 0;
362 apply_args_value
= 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
373 /* Instead of saving the postincrement queue, empty it. */
376 p
->pending_stack_adjust
= pending_stack_adjust
;
377 p
->inhibit_defer_pop
= inhibit_defer_pop
;
378 p
->cleanups_this_call
= cleanups_this_call
;
379 p
->saveregs_value
= saveregs_value
;
380 p
->apply_args_value
= apply_args_value
;
381 p
->forced_labels
= forced_labels
;
383 pending_stack_adjust
= 0;
384 inhibit_defer_pop
= 0;
385 cleanups_this_call
= 0;
387 apply_args_value
= 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
395 restore_expr_status (p
)
398 pending_stack_adjust
= p
->pending_stack_adjust
;
399 inhibit_defer_pop
= p
->inhibit_defer_pop
;
400 cleanups_this_call
= p
->cleanups_this_call
;
401 saveregs_value
= p
->saveregs_value
;
402 apply_args_value
= p
->apply_args_value
;
403 forced_labels
= p
->forced_labels
;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain
;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
419 enqueue_insn (var
, body
)
422 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
423 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
424 return pending_chain
;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x
, modify
)
447 register RTX_CODE code
= GET_CODE (x
);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain
== 0)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
462 if (code
== MEM
&& GET_MODE (x
) != BLKmode
463 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
465 register rtx y
= XEXP (x
, 0);
466 register rtx
new = gen_rtx (MEM
, GET_MODE (x
), QUEUED_VAR (y
));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
474 register rtx temp
= gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp
, new),
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
485 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
486 if (tem
!= XEXP (x
, 0))
492 else if (code
== PLUS
|| code
== MULT
)
494 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
495 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
496 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x
) == 0)
507 return QUEUED_VAR (x
);
508 /* If the increment has happened and a pre-increment copy exists,
510 if (QUEUED_COPY (x
) != 0)
511 return QUEUED_COPY (x
);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
517 return QUEUED_COPY (x
);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
529 register enum rtx_code code
= GET_CODE (x
);
535 return queued_subexp_p (XEXP (x
, 0));
539 return queued_subexp_p (XEXP (x
, 0))
540 || queued_subexp_p (XEXP (x
, 1));
545 /* Perform all the pending incrementations. */
551 while (p
= pending_chain
)
553 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
554 pending_chain
= QUEUED_NEXT (p
);
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
571 convert_move (to
, from
, unsignedp
)
572 register rtx to
, from
;
575 enum machine_mode to_mode
= GET_MODE (to
);
576 enum machine_mode from_mode
= GET_MODE (from
);
577 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
578 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
585 to
= protect_from_queue (to
, 1);
586 from
= protect_from_queue (from
, 0);
588 if (to_real
!= from_real
)
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
595 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
597 >= GET_MODE_SIZE (to_mode
))
598 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
599 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
601 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
604 if (to_mode
== from_mode
605 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
607 emit_move_insn (to
, from
);
615 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
617 /* Try converting directly if the insn is supported. */
618 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
621 emit_unop_insn (code
, to
, from
, UNKNOWN
);
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
629 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
636 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
643 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
650 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
657 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
665 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
672 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
679 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
686 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
693 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
701 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
708 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
715 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
722 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
730 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
737 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
744 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
751 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
758 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
770 libcall
= extendsfdf2_libfunc
;
774 libcall
= extendsfxf2_libfunc
;
778 libcall
= extendsftf2_libfunc
;
787 libcall
= truncdfsf2_libfunc
;
791 libcall
= extenddfxf2_libfunc
;
795 libcall
= extenddftf2_libfunc
;
804 libcall
= truncxfsf2_libfunc
;
808 libcall
= truncxfdf2_libfunc
;
817 libcall
= trunctfsf2_libfunc
;
821 libcall
= trunctfdf2_libfunc
;
827 if (libcall
== (rtx
) 0)
828 /* This conversion is not implemented yet. */
831 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
833 emit_move_insn (to
, value
);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
841 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
848 enum machine_mode lowpart_mode
;
849 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
851 /* Try converting directly if the insn is supported. */
852 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
860 from
= force_reg (from_mode
, from
);
861 emit_unop_insn (code
, to
, from
, equiv_code
);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
866 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
867 != CODE_FOR_nothing
))
869 if (GET_CODE (to
) == REG
)
870 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, to
));
871 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
872 emit_unop_insn (code
, to
,
873 gen_lowpart (word_mode
, to
), equiv_code
);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to
, from
))
884 from
= force_reg (from_mode
, from
);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
888 lowpart_mode
= word_mode
;
890 lowpart_mode
= from_mode
;
892 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
894 lowpart
= gen_lowpart (lowpart_mode
, to
);
895 emit_move_insn (lowpart
, lowfrom
);
897 /* Compute the value to put in each remaining word. */
899 fill_value
= const0_rtx
;
904 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
905 && STORE_FLAG_VALUE
== -1)
907 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
909 fill_value
= gen_reg_rtx (word_mode
);
910 emit_insn (gen_slt (fill_value
));
916 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
917 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
919 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
923 /* Fill the remaining words. */
924 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
926 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
927 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
932 if (fill_value
!= subword
)
933 emit_move_insn (subword
, fill_value
);
936 insns
= get_insns ();
939 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
940 gen_rtx (equiv_code
, to_mode
, copy_rtx (from
)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
948 if (!((GET_CODE (from
) == MEM
949 && ! MEM_VOLATILE_P (from
)
950 && direct_load
[(int) to_mode
]
951 && ! mode_dependent_address_p (XEXP (from
, 0)))
952 || GET_CODE (from
) == REG
953 || GET_CODE (from
) == SUBREG
))
954 from
= force_reg (from_mode
, from
);
955 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode
== PSImode
)
962 if (from_mode
!= SImode
)
963 from
= convert_to_mode (SImode
, from
, unsignedp
);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2
)
968 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode
== PSImode
)
977 if (to_mode
!= SImode
)
979 from
= convert_to_mode (SImode
, from
, unsignedp
);
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2
)
987 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
990 #endif /* HAVE_extendpsisi2 */
995 if (to_mode
== PDImode
)
997 if (from_mode
!= DImode
)
998 from
= convert_to_mode (DImode
, from
, unsignedp
);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2
)
1003 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1006 #endif /* HAVE_truncdipdi2 */
1010 if (from_mode
== PDImode
)
1012 if (to_mode
!= DImode
)
1014 from
= convert_to_mode (DImode
, from
, unsignedp
);
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2
)
1022 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1025 #endif /* HAVE_extendpdidi2 */
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1036 GET_MODE_BITSIZE (from_mode
)))
1038 if (!((GET_CODE (from
) == MEM
1039 && ! MEM_VOLATILE_P (from
)
1040 && direct_load
[(int) to_mode
]
1041 && ! mode_dependent_address_p (XEXP (from
, 0)))
1042 || GET_CODE (from
) == REG
1043 || GET_CODE (from
) == SUBREG
))
1044 from
= force_reg (from_mode
, from
);
1045 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1047 from
= copy_to_reg (from
);
1048 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1055 /* Convert directly if that works. */
1056 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1057 != CODE_FOR_nothing
)
1059 emit_unop_insn (code
, to
, from
, equiv_code
);
1064 enum machine_mode intermediate
;
1066 /* Search for a mode to convert via. */
1067 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1068 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1069 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1070 != CODE_FOR_nothing
)
1071 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1072 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1073 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1074 != CODE_FOR_nothing
))
1076 convert_move (to
, convert_to_mode (intermediate
, from
,
1077 unsignedp
), unsignedp
);
1081 /* No suitable intermediate mode. */
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode
== DImode
&& to_mode
== SImode
)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2
)
1093 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1097 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1101 if (from_mode
== DImode
&& to_mode
== HImode
)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2
)
1106 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1110 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1114 if (from_mode
== DImode
&& to_mode
== QImode
)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2
)
1119 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1123 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1127 if (from_mode
== SImode
&& to_mode
== HImode
)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2
)
1132 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1136 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1140 if (from_mode
== SImode
&& to_mode
== QImode
)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2
)
1145 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1149 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1153 if (from_mode
== HImode
&& to_mode
== QImode
)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2
)
1158 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1162 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1166 if (from_mode
== TImode
&& to_mode
== DImode
)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2
)
1171 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== TImode
&& to_mode
== SImode
)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2
)
1184 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== TImode
&& to_mode
== HImode
)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2
)
1197 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== TImode
&& to_mode
== QImode
)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2
)
1210 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1223 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1224 emit_move_insn (to
, temp
);
1228 /* Mode combination is not recognized. */
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode
, x
, unsignedp
)
1244 enum machine_mode mode
;
1248 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode
, oldmode
, x
, unsignedp
)
1266 enum machine_mode mode
, oldmode
;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1278 x
= gen_lowpart (mode
, x
);
1280 if (GET_MODE (x
) != VOIDmode
)
1281 oldmode
= GET_MODE (x
);
1283 if (mode
== oldmode
)
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1293 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1296 HOST_WIDE_INT val
= INTVAL (x
);
1298 if (oldmode
!= VOIDmode
1299 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1301 int width
= GET_MODE_BITSIZE (oldmode
);
1303 /* We need to zero extend VAL. */
1304 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1307 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1315 if ((GET_CODE (x
) == CONST_INT
1316 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1317 || (GET_MODE_CLASS (mode
) == MODE_INT
1318 && GET_MODE_CLASS (oldmode
) == MODE_INT
1319 && (GET_CODE (x
) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1321 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1322 && direct_load
[(int) mode
])
1323 || (GET_CODE (x
) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1325 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1331 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1333 HOST_WIDE_INT val
= INTVAL (x
);
1334 int width
= GET_MODE_BITSIZE (oldmode
);
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1340 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1341 val
|= (HOST_WIDE_INT
) (-1) << width
;
1343 return GEN_INT (val
);
1346 return gen_lowpart (mode
, x
);
1349 temp
= gen_reg_rtx (mode
);
1350 convert_move (temp
, x
, unsignedp
);
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1361 move_by_pieces (to
, from
, len
, align
)
1365 struct move_by_pieces data
;
1366 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1367 int max_size
= MOVE_MAX
+ 1;
1370 data
.to_addr
= to_addr
;
1371 data
.from_addr
= from_addr
;
1375 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1376 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1378 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1379 || GET_CODE (from_addr
) == POST_INC
1380 || GET_CODE (from_addr
) == POST_DEC
);
1382 data
.explicit_inc_from
= 0;
1383 data
.explicit_inc_to
= 0;
1385 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1386 if (data
.reverse
) data
.offset
= len
;
1389 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1390 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data
.autinc_from
&& data
.autinc_to
)
1396 && move_by_pieces_ninsns (len
, align
) > 2)
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data
.reverse
&& ! data
.autinc_from
)
1401 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1402 data
.autinc_from
= 1;
1403 data
.explicit_inc_from
= -1;
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data
.autinc_from
)
1409 data
.from_addr
= copy_addr_to_reg (from_addr
);
1410 data
.autinc_from
= 1;
1411 data
.explicit_inc_from
= 1;
1414 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1415 data
.from_addr
= copy_addr_to_reg (from_addr
);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data
.reverse
&& ! data
.autinc_to
)
1419 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1421 data
.explicit_inc_to
= -1;
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data
.reverse
&& ! data
.autinc_to
)
1427 data
.to_addr
= copy_addr_to_reg (to_addr
);
1429 data
.explicit_inc_to
= 1;
1432 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1433 data
.to_addr
= copy_addr_to_reg (to_addr
);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size
> 1)
1445 enum machine_mode mode
= VOIDmode
, tmode
;
1446 enum insn_code icode
;
1448 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1449 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1450 if (GET_MODE_SIZE (tmode
) < max_size
)
1453 if (mode
== VOIDmode
)
1456 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1457 if (icode
!= CODE_FOR_nothing
1458 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1459 GET_MODE_SIZE (mode
)))
1460 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1462 max_size
= GET_MODE_SIZE (mode
);
1465 /* The code above should have handled everything. */
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1474 move_by_pieces_ninsns (l
, align
)
1478 register int n_insns
= 0;
1479 int max_size
= MOVE_MAX
+ 1;
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1485 while (max_size
> 1)
1487 enum machine_mode mode
= VOIDmode
, tmode
;
1488 enum insn_code icode
;
1490 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1491 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1492 if (GET_MODE_SIZE (tmode
) < max_size
)
1495 if (mode
== VOIDmode
)
1498 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1499 if (icode
!= CODE_FOR_nothing
1500 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1501 GET_MODE_SIZE (mode
)))
1502 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1504 max_size
= GET_MODE_SIZE (mode
);
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1515 move_by_pieces_1 (genfun
, mode
, data
)
1517 enum machine_mode mode
;
1518 struct move_by_pieces
*data
;
1520 register int size
= GET_MODE_SIZE (mode
);
1521 register rtx to1
, from1
;
1523 while (data
->len
>= size
)
1525 if (data
->reverse
) data
->offset
-= size
;
1527 to1
= (data
->autinc_to
1528 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1529 : change_address (data
->to
, mode
,
1530 plus_constant (data
->to_addr
, data
->offset
)));
1531 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1534 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1535 : change_address (data
->from
, mode
,
1536 plus_constant (data
->from_addr
, data
->offset
)));
1537 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data
->explicit_inc_to
< 0)
1541 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1542 if (data
->explicit_inc_from
< 0)
1543 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1546 emit_insn ((*genfun
) (to1
, from1
));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data
->explicit_inc_to
> 0)
1549 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1550 if (data
->explicit_inc_from
> 0)
1551 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1554 if (! data
->reverse
) data
->offset
+= size
;
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1571 emit_block_move (x
, y
, size
, align
)
1576 if (GET_MODE (x
) != BLKmode
)
1579 if (GET_MODE (y
) != BLKmode
)
1582 x
= protect_from_queue (x
, 1);
1583 y
= protect_from_queue (y
, 0);
1584 size
= protect_from_queue (size
, 0);
1586 if (GET_CODE (x
) != MEM
)
1588 if (GET_CODE (y
) != MEM
)
1593 if (GET_CODE (size
) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1595 move_by_pieces (x
, y
, INTVAL (size
), align
);
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1602 rtx opalign
= GEN_INT (align
);
1603 enum machine_mode mode
;
1605 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1606 mode
= GET_MODE_WIDER_MODE (mode
))
1608 enum insn_code code
= movstr_optab
[(int) mode
];
1610 if (code
!= CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size
) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1617 <= GET_MODE_MASK (mode
)))
1618 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1619 && (insn_operand_predicate
[(int) code
][0] == 0
1620 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1621 && (insn_operand_predicate
[(int) code
][1] == 0
1622 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1623 && (insn_operand_predicate
[(int) code
][3] == 0
1624 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1628 rtx last
= get_last_insn ();
1631 op2
= convert_to_mode (mode
, size
, 1);
1632 if (insn_operand_predicate
[(int) code
][2] != 0
1633 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1634 op2
= copy_to_mode_reg (mode
, op2
);
1636 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1643 delete_insns_since (last
);
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc
, 0,
1649 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1651 convert_to_mode (TYPE_MODE (sizetype
), size
,
1652 TREE_UNSIGNED (sizetype
)),
1653 TYPE_MODE (sizetype
));
1655 emit_library_call (bcopy_libfunc
, 0,
1656 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1658 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1659 TREE_UNSIGNED (integer_type_node
)),
1660 TYPE_MODE (integer_type_node
));
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1669 move_block_to_reg (regno
, x
, nregs
, mode
)
1673 enum machine_mode mode
;
1681 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1682 x
= validize_mem (force_const_mem (mode
, x
));
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple
)
1688 last
= get_last_insn ();
1689 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1697 delete_insns_since (last
);
1701 for (i
= 0; i
< nregs
; i
++)
1702 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1703 operand_subword_force (x
, i
, mode
));
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1712 move_block_from_reg (regno
, x
, nregs
, size
)
1721 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1722 to the left before storing to memory. */
1723 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1725 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1731 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1732 gen_rtx (REG
, word_mode
, regno
),
1733 build_int_2 ((UNITS_PER_WORD
- size
)
1734 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1735 emit_move_insn (tem
, shift
);
1739 /* See if the machine can do this with a store multiple insn. */
1740 #ifdef HAVE_store_multiple
1741 if (HAVE_store_multiple
)
1743 last
= get_last_insn ();
1744 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1752 delete_insns_since (last
);
1756 for (i
= 0; i
< nregs
; i
++)
1758 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1763 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1767 /* Emit code to move a block Y to a block X, where X is non-consecutive
1768 registers represented by a PARALLEL. */
1771 emit_group_load (x
, y
)
1774 rtx target_reg
, source
;
1777 if (GET_CODE (x
) != PARALLEL
)
1780 /* Check for a NULL entry, used to indicate that the parameter goes
1781 both on the stack and in registers. */
1782 if (XEXP (XVECEXP (x
, 0, 0), 0))
1787 for (; i
< XVECLEN (x
, 0); i
++)
1789 rtx element
= XVECEXP (x
, 0, i
);
1791 target_reg
= XEXP (element
, 0);
1793 if (GET_CODE (y
) == MEM
)
1794 source
= change_address (y
, GET_MODE (target_reg
),
1795 plus_constant (XEXP (y
, 0),
1796 INTVAL (XEXP (element
, 1))));
1797 else if (XEXP (element
, 1) == const0_rtx
)
1799 if (GET_MODE (target_reg
) == GET_MODE (y
))
1801 /* Allow for the target_reg to be smaller than the input register
1802 to allow for AIX with 4 DF arguments after a single SI arg. The
1803 last DF argument will only load 1 word into the integer registers,
1804 but load a DF value into the float registers. */
1805 else if (GET_MODE_SIZE (GET_MODE (target_reg
))
1806 <= GET_MODE_SIZE (GET_MODE (y
)))
1807 source
= gen_rtx (SUBREG
, GET_MODE (target_reg
), y
, 0);
1814 emit_move_insn (target_reg
, source
);
1818 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1819 registers represented by a PARALLEL. */
1822 emit_group_store (x
, y
)
1825 rtx source_reg
, target
;
1828 if (GET_CODE (y
) != PARALLEL
)
1831 /* Check for a NULL entry, used to indicate that the parameter goes
1832 both on the stack and in registers. */
1833 if (XEXP (XVECEXP (y
, 0, 0), 0))
1838 for (; i
< XVECLEN (y
, 0); i
++)
1840 rtx element
= XVECEXP (y
, 0, i
);
1842 source_reg
= XEXP (element
, 0);
1844 if (GET_CODE (x
) == MEM
)
1845 target
= change_address (x
, GET_MODE (source_reg
),
1846 plus_constant (XEXP (x
, 0),
1847 INTVAL (XEXP (element
, 1))));
1848 else if (XEXP (element
, 1) == const0_rtx
)
1853 emit_move_insn (target
, source_reg
);
1857 /* Add a USE expression for REG to the (possibly empty) list pointed
1858 to by CALL_FUSAGE. REG must denote a hard register. */
1861 use_reg (call_fusage
, reg
)
1862 rtx
*call_fusage
, reg
;
1864 if (GET_CODE (reg
) != REG
1865 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1869 = gen_rtx (EXPR_LIST
, VOIDmode
,
1870 gen_rtx (USE
, VOIDmode
, reg
), *call_fusage
);
1873 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1874 starting at REGNO. All of these registers must be hard registers. */
1877 use_regs (call_fusage
, regno
, nregs
)
1884 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1887 for (i
= 0; i
< nregs
; i
++)
1888 use_reg (call_fusage
, gen_rtx (REG
, reg_raw_mode
[regno
+ i
], regno
+ i
));
1891 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1892 PARALLEL REGS. This is for calls that pass values in multiple
1893 non-contiguous locations. The Irix 6 ABI has examples of this. */
1896 use_group_regs (call_fusage
, regs
)
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (regs
, 0, 0), 0))
1909 for (; i
< XVECLEN (regs
, 0); i
++)
1910 use_reg (call_fusage
, XEXP (XVECEXP (regs
, 0, i
), 0));
1913 /* Generate several move instructions to clear LEN bytes of block TO.
1914 (A MEM rtx with BLKmode). The caller must pass TO through
1915 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1919 clear_by_pieces (to
, len
, align
)
1923 struct clear_by_pieces data
;
1924 rtx to_addr
= XEXP (to
, 0);
1925 int max_size
= MOVE_MAX
+ 1;
1928 data
.to_addr
= to_addr
;
1931 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1932 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1934 data
.explicit_inc_to
= 0;
1936 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1937 if (data
.reverse
) data
.offset
= len
;
1940 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1942 /* If copying requires more than two move insns,
1943 copy addresses to registers (to make displacements shorter)
1944 and use post-increment if available. */
1946 && move_by_pieces_ninsns (len
, align
) > 2)
1948 #ifdef HAVE_PRE_DECREMENT
1949 if (data
.reverse
&& ! data
.autinc_to
)
1951 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1953 data
.explicit_inc_to
= -1;
1956 #ifdef HAVE_POST_INCREMENT
1957 if (! data
.reverse
&& ! data
.autinc_to
)
1959 data
.to_addr
= copy_addr_to_reg (to_addr
);
1961 data
.explicit_inc_to
= 1;
1964 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1965 data
.to_addr
= copy_addr_to_reg (to_addr
);
1968 if (! SLOW_UNALIGNED_ACCESS
1969 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1972 /* First move what we can in the largest integer mode, then go to
1973 successively smaller modes. */
1975 while (max_size
> 1)
1977 enum machine_mode mode
= VOIDmode
, tmode
;
1978 enum insn_code icode
;
1980 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1981 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1982 if (GET_MODE_SIZE (tmode
) < max_size
)
1985 if (mode
== VOIDmode
)
1988 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1989 if (icode
!= CODE_FOR_nothing
1990 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1991 GET_MODE_SIZE (mode
)))
1992 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1994 max_size
= GET_MODE_SIZE (mode
);
1997 /* The code above should have handled everything. */
2002 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2003 with move instructions for mode MODE. GENFUN is the gen_... function
2004 to make a move insn for that mode. DATA has all the other info. */
2007 clear_by_pieces_1 (genfun
, mode
, data
)
2009 enum machine_mode mode
;
2010 struct clear_by_pieces
*data
;
2012 register int size
= GET_MODE_SIZE (mode
);
2015 while (data
->len
>= size
)
2017 if (data
->reverse
) data
->offset
-= size
;
2019 to1
= (data
->autinc_to
2020 ? gen_rtx (MEM
, mode
, data
->to_addr
)
2021 : change_address (data
->to
, mode
,
2022 plus_constant (data
->to_addr
, data
->offset
)));
2023 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2025 #ifdef HAVE_PRE_DECREMENT
2026 if (data
->explicit_inc_to
< 0)
2027 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2030 emit_insn ((*genfun
) (to1
, const0_rtx
));
2031 #ifdef HAVE_POST_INCREMENT
2032 if (data
->explicit_inc_to
> 0)
2033 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2036 if (! data
->reverse
) data
->offset
+= size
;
2042 /* Write zeros through the storage of OBJECT.
2043 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2044 the maximum alignment we can is has, measured in bytes. */
2047 clear_storage (object
, size
, align
)
2052 if (GET_MODE (object
) == BLKmode
)
2054 object
= protect_from_queue (object
, 1);
2055 size
= protect_from_queue (size
, 0);
2057 if (GET_CODE (size
) == CONST_INT
2058 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2059 clear_by_pieces (object
, INTVAL (size
), align
);
2063 /* Try the most limited insn first, because there's no point
2064 including more than one in the machine description unless
2065 the more limited one has some advantage. */
2067 rtx opalign
= GEN_INT (align
);
2068 enum machine_mode mode
;
2070 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2071 mode
= GET_MODE_WIDER_MODE (mode
))
2073 enum insn_code code
= clrstr_optab
[(int) mode
];
2075 if (code
!= CODE_FOR_nothing
2076 /* We don't need MODE to be narrower than
2077 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2078 the mode mask, as it is returned by the macro, it will
2079 definitely be less than the actual mode mask. */
2080 && ((GET_CODE (size
) == CONST_INT
2081 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2082 <= GET_MODE_MASK (mode
)))
2083 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2084 && (insn_operand_predicate
[(int) code
][0] == 0
2085 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2087 && (insn_operand_predicate
[(int) code
][2] == 0
2088 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2092 rtx last
= get_last_insn ();
2095 op1
= convert_to_mode (mode
, size
, 1);
2096 if (insn_operand_predicate
[(int) code
][1] != 0
2097 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2099 op1
= copy_to_mode_reg (mode
, op1
);
2101 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2108 delete_insns_since (last
);
2113 #ifdef TARGET_MEM_FUNCTIONS
2114 emit_library_call (memset_libfunc
, 0,
2116 XEXP (object
, 0), Pmode
,
2117 const0_rtx
, TYPE_MODE (integer_type_node
),
2118 convert_to_mode (TYPE_MODE (sizetype
),
2119 size
, TREE_UNSIGNED (sizetype
)),
2120 TYPE_MODE (sizetype
));
2122 emit_library_call (bzero_libfunc
, 0,
2124 XEXP (object
, 0), Pmode
,
2125 convert_to_mode (TYPE_MODE (integer_type_node
),
2127 TREE_UNSIGNED (integer_type_node
)),
2128 TYPE_MODE (integer_type_node
));
2133 emit_move_insn (object
, const0_rtx
);
2136 /* Generate code to copy Y into X.
2137 Both Y and X must have the same mode, except that
2138 Y can be a constant with VOIDmode.
2139 This mode cannot be BLKmode; use emit_block_move for that.
2141 Return the last instruction emitted. */
2144 emit_move_insn (x
, y
)
2147 enum machine_mode mode
= GET_MODE (x
);
2149 x
= protect_from_queue (x
, 1);
2150 y
= protect_from_queue (y
, 0);
2152 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2155 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2156 y
= force_const_mem (mode
, y
);
2158 /* If X or Y are memory references, verify that their addresses are valid
2160 if (GET_CODE (x
) == MEM
2161 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2162 && ! push_operand (x
, GET_MODE (x
)))
2164 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2165 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2167 if (GET_CODE (y
) == MEM
2168 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2170 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2171 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2173 if (mode
== BLKmode
)
2176 return emit_move_insn_1 (x
, y
);
2179 /* Low level part of emit_move_insn.
2180 Called just like emit_move_insn, but assumes X and Y
2181 are basically valid. */
2184 emit_move_insn_1 (x
, y
)
2187 enum machine_mode mode
= GET_MODE (x
);
2188 enum machine_mode submode
;
2189 enum mode_class
class = GET_MODE_CLASS (mode
);
2192 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2194 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2196 /* Expand complex moves by moving real part and imag part, if possible. */
2197 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2198 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2200 (class == MODE_COMPLEX_INT
2201 ? MODE_INT
: MODE_FLOAT
),
2203 && (mov_optab
->handlers
[(int) submode
].insn_code
2204 != CODE_FOR_nothing
))
2206 /* Don't split destination if it is a stack push. */
2207 int stack
= push_operand (x
, GET_MODE (x
));
2210 /* If this is a stack, push the highpart first, so it
2211 will be in the argument order.
2213 In that case, change_address is used only to convert
2214 the mode, not to change the address. */
2217 /* Note that the real part always precedes the imag part in memory
2218 regardless of machine's endianness. */
2219 #ifdef STACK_GROWS_DOWNWARD
2220 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2221 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2222 gen_imagpart (submode
, y
)));
2223 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2224 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2225 gen_realpart (submode
, y
)));
2227 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2228 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2229 gen_realpart (submode
, y
)));
2230 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2231 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2232 gen_imagpart (submode
, y
)));
2237 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2238 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2239 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2240 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2243 return get_last_insn ();
2246 /* This will handle any multi-word mode that lacks a move_insn pattern.
2247 However, you will get better code if you define such patterns,
2248 even if they must turn into multiple assembler instructions. */
2249 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2254 #ifdef PUSH_ROUNDING
2256 /* If X is a push on the stack, do the push now and replace
2257 X with a reference to the stack pointer. */
2258 if (push_operand (x
, GET_MODE (x
)))
2260 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2261 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2265 /* Show the output dies here. */
2267 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
2270 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2273 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2274 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2276 /* If we can't get a part of Y, put Y into memory if it is a
2277 constant. Otherwise, force it into a register. If we still
2278 can't get a part of Y, abort. */
2279 if (ypart
== 0 && CONSTANT_P (y
))
2281 y
= force_const_mem (mode
, y
);
2282 ypart
= operand_subword (y
, i
, 1, mode
);
2284 else if (ypart
== 0)
2285 ypart
= operand_subword_force (y
, i
, mode
);
2287 if (xpart
== 0 || ypart
== 0)
2290 last_insn
= emit_move_insn (xpart
, ypart
);
2299 /* Pushing data onto the stack. */
2301 /* Push a block of length SIZE (perhaps variable)
2302 and return an rtx to address the beginning of the block.
2303 Note that it is not possible for the value returned to be a QUEUED.
2304 The value may be virtual_outgoing_args_rtx.
2306 EXTRA is the number of bytes of padding to push in addition to SIZE.
2307 BELOW nonzero means this padding comes at low addresses;
2308 otherwise, the padding comes at high addresses. */
2311 push_block (size
, extra
, below
)
2317 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2318 if (CONSTANT_P (size
))
2319 anti_adjust_stack (plus_constant (size
, extra
));
2320 else if (GET_CODE (size
) == REG
&& extra
== 0)
2321 anti_adjust_stack (size
);
2324 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2326 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2327 temp
, 0, OPTAB_LIB_WIDEN
);
2328 anti_adjust_stack (temp
);
2331 #ifdef STACK_GROWS_DOWNWARD
2332 temp
= virtual_outgoing_args_rtx
;
2333 if (extra
!= 0 && below
)
2334 temp
= plus_constant (temp
, extra
);
2336 if (GET_CODE (size
) == CONST_INT
)
2337 temp
= plus_constant (virtual_outgoing_args_rtx
,
2338 - INTVAL (size
) - (below
? 0 : extra
));
2339 else if (extra
!= 0 && !below
)
2340 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2341 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2343 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2344 negate_rtx (Pmode
, size
));
2347 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2353 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2356 /* Generate code to push X onto the stack, assuming it has mode MODE and
2358 MODE is redundant except when X is a CONST_INT (since they don't
2360 SIZE is an rtx for the size of data to be copied (in bytes),
2361 needed only if X is BLKmode.
2363 ALIGN (in bytes) is maximum alignment we can assume.
2365 If PARTIAL and REG are both nonzero, then copy that many of the first
2366 words of X into registers starting with REG, and push the rest of X.
2367 The amount of space pushed is decreased by PARTIAL words,
2368 rounded *down* to a multiple of PARM_BOUNDARY.
2369 REG must be a hard register in this case.
2370 If REG is zero but PARTIAL is not, take any all others actions for an
2371 argument partially in registers, but do not actually load any
2374 EXTRA is the amount in bytes of extra space to leave next to this arg.
2375 This is ignored if an argument block has already been allocated.
2377 On a machine that lacks real push insns, ARGS_ADDR is the address of
2378 the bottom of the argument block for this call. We use indexing off there
2379 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2380 argument block has not been preallocated.
2382 ARGS_SO_FAR is the size of args previously pushed for this call. */
2385 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2386 args_addr
, args_so_far
)
2388 enum machine_mode mode
;
2399 enum direction stack_direction
2400 #ifdef STACK_GROWS_DOWNWARD
2406 /* Decide where to pad the argument: `downward' for below,
2407 `upward' for above, or `none' for don't pad it.
2408 Default is below for small data on big-endian machines; else above. */
2409 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2411 /* If we're placing part of X into a register and part of X onto
2412 the stack, indicate that the entire register is clobbered to
2413 keep flow from thinking the unused part of the register is live. */
2414 if (partial
> 0 && reg
!= 0)
2415 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, reg
));
2417 /* Invert direction if stack is post-update. */
2418 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2419 if (where_pad
!= none
)
2420 where_pad
= (where_pad
== downward
? upward
: downward
);
2422 xinner
= x
= protect_from_queue (x
, 0);
2424 if (mode
== BLKmode
)
2426 /* Copy a block into the stack, entirely or partially. */
2429 int used
= partial
* UNITS_PER_WORD
;
2430 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2438 /* USED is now the # of bytes we need not copy to the stack
2439 because registers will take care of them. */
2442 xinner
= change_address (xinner
, BLKmode
,
2443 plus_constant (XEXP (xinner
, 0), used
));
2445 /* If the partial register-part of the arg counts in its stack size,
2446 skip the part of stack space corresponding to the registers.
2447 Otherwise, start copying to the beginning of the stack space,
2448 by setting SKIP to 0. */
2449 #ifndef REG_PARM_STACK_SPACE
2455 #ifdef PUSH_ROUNDING
2456 /* Do it with several push insns if that doesn't take lots of insns
2457 and if there is no difficulty with push insns that skip bytes
2458 on the stack for alignment purposes. */
2460 && GET_CODE (size
) == CONST_INT
2462 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2464 /* Here we avoid the case of a structure whose weak alignment
2465 forces many pushes of a small amount of data,
2466 and such small pushes do rounding that causes trouble. */
2467 && ((! SLOW_UNALIGNED_ACCESS
)
2468 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2469 || PUSH_ROUNDING (align
) == align
)
2470 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2472 /* Push padding now if padding above and stack grows down,
2473 or if padding below and stack grows up.
2474 But if space already allocated, this has already been done. */
2475 if (extra
&& args_addr
== 0
2476 && where_pad
!= none
&& where_pad
!= stack_direction
)
2477 anti_adjust_stack (GEN_INT (extra
));
2479 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
2480 INTVAL (size
) - used
, align
);
2483 #endif /* PUSH_ROUNDING */
2485 /* Otherwise make space on the stack and copy the data
2486 to the address of that space. */
2488 /* Deduct words put into registers from the size we must copy. */
2491 if (GET_CODE (size
) == CONST_INT
)
2492 size
= GEN_INT (INTVAL (size
) - used
);
2494 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2495 GEN_INT (used
), NULL_RTX
, 0,
2499 /* Get the address of the stack space.
2500 In this case, we do not deal with EXTRA separately.
2501 A single stack adjust will do. */
2504 temp
= push_block (size
, extra
, where_pad
== downward
);
2507 else if (GET_CODE (args_so_far
) == CONST_INT
)
2508 temp
= memory_address (BLKmode
,
2509 plus_constant (args_addr
,
2510 skip
+ INTVAL (args_so_far
)));
2512 temp
= memory_address (BLKmode
,
2513 plus_constant (gen_rtx (PLUS
, Pmode
,
2514 args_addr
, args_so_far
),
2517 /* TEMP is the address of the block. Copy the data there. */
2518 if (GET_CODE (size
) == CONST_INT
2519 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2522 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
2523 INTVAL (size
), align
);
2526 /* Try the most limited insn first, because there's no point
2527 including more than one in the machine description unless
2528 the more limited one has some advantage. */
2529 #ifdef HAVE_movstrqi
2531 && GET_CODE (size
) == CONST_INT
2532 && ((unsigned) INTVAL (size
)
2533 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
2535 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
2536 xinner
, size
, GEN_INT (align
));
2544 #ifdef HAVE_movstrhi
2546 && GET_CODE (size
) == CONST_INT
2547 && ((unsigned) INTVAL (size
)
2548 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
2550 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
2551 xinner
, size
, GEN_INT (align
));
2559 #ifdef HAVE_movstrsi
2562 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
2563 xinner
, size
, GEN_INT (align
));
2571 #ifdef HAVE_movstrdi
2574 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
2575 xinner
, size
, GEN_INT (align
));
2584 #ifndef ACCUMULATE_OUTGOING_ARGS
2585 /* If the source is referenced relative to the stack pointer,
2586 copy it to another register to stabilize it. We do not need
2587 to do this if we know that we won't be changing sp. */
2589 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2590 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2591 temp
= copy_to_reg (temp
);
2594 /* Make inhibit_defer_pop nonzero around the library call
2595 to force it to pop the bcopy-arguments right away. */
2597 #ifdef TARGET_MEM_FUNCTIONS
2598 emit_library_call (memcpy_libfunc
, 0,
2599 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2600 convert_to_mode (TYPE_MODE (sizetype
),
2601 size
, TREE_UNSIGNED (sizetype
)),
2602 TYPE_MODE (sizetype
));
2604 emit_library_call (bcopy_libfunc
, 0,
2605 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2606 convert_to_mode (TYPE_MODE (integer_type_node
),
2608 TREE_UNSIGNED (integer_type_node
)),
2609 TYPE_MODE (integer_type_node
));
2614 else if (partial
> 0)
2616 /* Scalar partly in registers. */
2618 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2621 /* # words of start of argument
2622 that we must make space for but need not store. */
2623 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2624 int args_offset
= INTVAL (args_so_far
);
2627 /* Push padding now if padding above and stack grows down,
2628 or if padding below and stack grows up.
2629 But if space already allocated, this has already been done. */
2630 if (extra
&& args_addr
== 0
2631 && where_pad
!= none
&& where_pad
!= stack_direction
)
2632 anti_adjust_stack (GEN_INT (extra
));
2634 /* If we make space by pushing it, we might as well push
2635 the real data. Otherwise, we can leave OFFSET nonzero
2636 and leave the space uninitialized. */
2640 /* Now NOT_STACK gets the number of words that we don't need to
2641 allocate on the stack. */
2642 not_stack
= partial
- offset
;
2644 /* If the partial register-part of the arg counts in its stack size,
2645 skip the part of stack space corresponding to the registers.
2646 Otherwise, start copying to the beginning of the stack space,
2647 by setting SKIP to 0. */
2648 #ifndef REG_PARM_STACK_SPACE
2654 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2655 x
= validize_mem (force_const_mem (mode
, x
));
2657 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2658 SUBREGs of such registers are not allowed. */
2659 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2660 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2661 x
= copy_to_reg (x
);
2663 /* Loop over all the words allocated on the stack for this arg. */
2664 /* We can do it by words, because any scalar bigger than a word
2665 has a size a multiple of a word. */
2666 #ifndef PUSH_ARGS_REVERSED
2667 for (i
= not_stack
; i
< size
; i
++)
2669 for (i
= size
- 1; i
>= not_stack
; i
--)
2671 if (i
>= not_stack
+ offset
)
2672 emit_push_insn (operand_subword_force (x
, i
, mode
),
2673 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2675 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2676 * UNITS_PER_WORD
)));
2682 /* Push padding now if padding above and stack grows down,
2683 or if padding below and stack grows up.
2684 But if space already allocated, this has already been done. */
2685 if (extra
&& args_addr
== 0
2686 && where_pad
!= none
&& where_pad
!= stack_direction
)
2687 anti_adjust_stack (GEN_INT (extra
));
2689 #ifdef PUSH_ROUNDING
2691 addr
= gen_push_operand ();
2694 if (GET_CODE (args_so_far
) == CONST_INT
)
2696 = memory_address (mode
,
2697 plus_constant (args_addr
, INTVAL (args_so_far
)));
2699 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2702 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2706 /* If part should go in registers, copy that part
2707 into the appropriate registers. Do this now, at the end,
2708 since mem-to-mem copies above may do function calls. */
2709 if (partial
> 0 && reg
!= 0)
2711 /* Handle calls that pass values in multiple non-contiguous locations.
2712 The Irix 6 ABI has examples of this. */
2713 if (GET_CODE (reg
) == PARALLEL
)
2714 emit_group_load (reg
, x
);
2716 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2719 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2720 anti_adjust_stack (GEN_INT (extra
));
2723 /* Expand an assignment that stores the value of FROM into TO.
2724 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2725 (This may contain a QUEUED rtx;
2726 if the value is constant, this rtx is a constant.)
2727 Otherwise, the returned value is NULL_RTX.
2729 SUGGEST_REG is no longer actually used.
2730 It used to mean, copy the value through a register
2731 and return that register, if that is possible.
2732 We now use WANT_VALUE to decide whether to do this. */
2735 expand_assignment (to
, from
, want_value
, suggest_reg
)
2740 register rtx to_rtx
= 0;
2743 /* Don't crash if the lhs of the assignment was erroneous. */
2745 if (TREE_CODE (to
) == ERROR_MARK
)
2747 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2748 return want_value
? result
: NULL_RTX
;
2751 if (output_bytecode
)
2753 tree dest_innermost
;
2755 bc_expand_expr (from
);
2756 bc_emit_instruction (duplicate
);
2758 dest_innermost
= bc_expand_address (to
);
2760 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2761 take care of it here. */
2763 bc_store_memory (TREE_TYPE (to
), dest_innermost
);
2767 /* Assignment of a structure component needs special treatment
2768 if the structure component's rtx is not simply a MEM.
2769 Assignment of an array element at a constant index, and assignment of
2770 an array element in an unaligned packed structure field, has the same
2773 if (TREE_CODE (to
) == COMPONENT_REF
2774 || TREE_CODE (to
) == BIT_FIELD_REF
2775 || (TREE_CODE (to
) == ARRAY_REF
2776 && ((TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2777 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
)
2778 || (SLOW_UNALIGNED_ACCESS
&& get_inner_unaligned_p (to
)))))
2780 enum machine_mode mode1
;
2790 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
,
2791 &mode1
, &unsignedp
, &volatilep
);
2793 /* If we are going to use store_bit_field and extract_bit_field,
2794 make sure to_rtx will be safe for multiple use. */
2796 if (mode1
== VOIDmode
&& want_value
)
2797 tem
= stabilize_reference (tem
);
2799 alignment
= TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
;
2800 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2803 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2805 if (GET_CODE (to_rtx
) != MEM
)
2807 to_rtx
= change_address (to_rtx
, VOIDmode
,
2808 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
2809 force_reg (ptr_mode
, offset_rtx
)));
2810 /* If we have a variable offset, the known alignment
2811 is only that of the innermost structure containing the field.
2812 (Actually, we could sometimes do better by using the
2813 align of an element of the innermost array, but no need.) */
2814 if (TREE_CODE (to
) == COMPONENT_REF
2815 || TREE_CODE (to
) == BIT_FIELD_REF
)
2817 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to
, 0))) / BITS_PER_UNIT
;
2821 if (GET_CODE (to_rtx
) == MEM
)
2823 /* When the offset is zero, to_rtx is the address of the
2824 structure we are storing into, and hence may be shared.
2825 We must make a new MEM before setting the volatile bit. */
2827 to_rtx
= change_address (to_rtx
, VOIDmode
, XEXP (to_rtx
, 0));
2828 MEM_VOLATILE_P (to_rtx
) = 1;
2830 #if 0 /* This was turned off because, when a field is volatile
2831 in an object which is not volatile, the object may be in a register,
2832 and then we would abort over here. */
2838 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2840 /* Spurious cast makes HPUX compiler happy. */
2841 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2844 /* Required alignment of containing datum. */
2846 int_size_in_bytes (TREE_TYPE (tem
)));
2847 preserve_temp_slots (result
);
2851 /* If the value is meaningful, convert RESULT to the proper mode.
2852 Otherwise, return nothing. */
2853 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
2854 TYPE_MODE (TREE_TYPE (from
)),
2856 TREE_UNSIGNED (TREE_TYPE (to
)))
2860 /* If the rhs is a function call and its value is not an aggregate,
2861 call the function before we start to compute the lhs.
2862 This is needed for correct code for cases such as
2863 val = setjmp (buf) on machines where reference to val
2864 requires loading up part of an address in a separate insn.
2866 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2867 a promoted variable where the zero- or sign- extension needs to be done.
2868 Handling this in the normal way is safe because no computation is done
2870 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
2871 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
2872 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
2877 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2879 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2881 /* Handle calls that return values in multiple non-contiguous locations.
2882 The Irix 6 ABI has examples of this. */
2883 if (GET_CODE (to_rtx
) == PARALLEL
)
2884 emit_group_load (to_rtx
, value
);
2885 else if (GET_MODE (to_rtx
) == BLKmode
)
2886 emit_block_move (to_rtx
, value
, expr_size (from
),
2887 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
2889 emit_move_insn (to_rtx
, value
);
2890 preserve_temp_slots (to_rtx
);
2893 return want_value
? to_rtx
: NULL_RTX
;
2896 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2897 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2900 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2902 /* Don't move directly into a return register. */
2903 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
2908 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
2909 emit_move_insn (to_rtx
, temp
);
2910 preserve_temp_slots (to_rtx
);
2913 return want_value
? to_rtx
: NULL_RTX
;
2916 /* In case we are returning the contents of an object which overlaps
2917 the place the value is being stored, use a safe function when copying
2918 a value through a pointer into a structure value return block. */
2919 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2920 && current_function_returns_struct
2921 && !current_function_returns_pcc_struct
)
2926 size
= expr_size (from
);
2927 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2929 #ifdef TARGET_MEM_FUNCTIONS
2930 emit_library_call (memcpy_libfunc
, 0,
2931 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2932 XEXP (from_rtx
, 0), Pmode
,
2933 convert_to_mode (TYPE_MODE (sizetype
),
2934 size
, TREE_UNSIGNED (sizetype
)),
2935 TYPE_MODE (sizetype
));
2937 emit_library_call (bcopy_libfunc
, 0,
2938 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2939 XEXP (to_rtx
, 0), Pmode
,
2940 convert_to_mode (TYPE_MODE (integer_type_node
),
2941 size
, TREE_UNSIGNED (integer_type_node
)),
2942 TYPE_MODE (integer_type_node
));
2945 preserve_temp_slots (to_rtx
);
2948 return want_value
? to_rtx
: NULL_RTX
;
2951 /* Compute FROM and store the value in the rtx we got. */
2954 result
= store_expr (from
, to_rtx
, want_value
);
2955 preserve_temp_slots (result
);
2958 return want_value
? result
: NULL_RTX
;
2961 /* Generate code for computing expression EXP,
2962 and storing the value into TARGET.
2963 TARGET may contain a QUEUED rtx.
2965 If WANT_VALUE is nonzero, return a copy of the value
2966 not in TARGET, so that we can be sure to use the proper
2967 value in a containing expression even if TARGET has something
2968 else stored in it. If possible, we copy the value through a pseudo
2969 and return that pseudo. Or, if the value is constant, we try to
2970 return the constant. In some cases, we return a pseudo
2971 copied *from* TARGET.
2973 If the mode is BLKmode then we may return TARGET itself.
2974 It turns out that in BLKmode it doesn't cause a problem.
2975 because C has no operators that could combine two different
2976 assignments into the same BLKmode object with different values
2977 with no sequence point. Will other languages need this to
2980 If WANT_VALUE is 0, we return NULL, to make sure
2981 to catch quickly any cases where the caller uses the value
2982 and fails to set WANT_VALUE. */
2985 store_expr (exp
, target
, want_value
)
2987 register rtx target
;
2991 int dont_return_target
= 0;
2993 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2995 /* Perform first part of compound expression, then assign from second
2997 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
2999 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3001 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3003 /* For conditional expression, get safe form of the target. Then
3004 test the condition, doing the appropriate assignment on either
3005 side. This avoids the creation of unnecessary temporaries.
3006 For non-BLKmode, it is more efficient not to do this. */
3008 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3009 rtx flag
= NULL_RTX
;
3010 tree left_cleanups
= NULL_TREE
;
3011 tree right_cleanups
= NULL_TREE
;
3012 tree old_cleanups
= cleanups_this_call
;
3014 /* Used to save a pointer to the place to put the setting of
3015 the flag that indicates if this side of the conditional was
3016 taken. We backpatch the code, if we find out later that we
3017 have any conditional cleanups that need to be performed. */
3018 rtx dest_right_flag
= NULL_RTX
;
3019 rtx dest_left_flag
= NULL_RTX
;
3022 target
= protect_from_queue (target
, 1);
3024 do_pending_stack_adjust ();
3026 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3027 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3028 dest_left_flag
= get_last_insn ();
3029 /* Handle conditional cleanups, if any. */
3030 left_cleanups
= defer_cleanups_to (old_cleanups
);
3032 emit_jump_insn (gen_jump (lab2
));
3035 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3036 dest_right_flag
= get_last_insn ();
3037 /* Handle conditional cleanups, if any. */
3038 right_cleanups
= defer_cleanups_to (old_cleanups
);
3043 /* Add back in any conditional cleanups. */
3044 if (left_cleanups
|| right_cleanups
)
3050 /* Now that we know that a flag is needed, go back and add in the
3051 setting of the flag. */
3053 flag
= gen_reg_rtx (word_mode
);
3055 /* Do the left side flag. */
3056 last
= get_last_insn ();
3057 /* Flag left cleanups as needed. */
3058 emit_move_insn (flag
, const1_rtx
);
3059 /* ??? deprecated, use sequences instead. */
3060 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_left_flag
);
3062 /* Do the right side flag. */
3063 last
= get_last_insn ();
3064 /* Flag left cleanups as needed. */
3065 emit_move_insn (flag
, const0_rtx
);
3066 /* ??? deprecated, use sequences instead. */
3067 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_right_flag
);
3069 /* All cleanups must be on the function_obstack. */
3070 push_obstacks_nochange ();
3071 resume_temporary_allocation ();
3073 /* convert flag, which is an rtx, into a tree. */
3074 cond
= make_node (RTL_EXPR
);
3075 TREE_TYPE (cond
) = integer_type_node
;
3076 RTL_EXPR_RTL (cond
) = flag
;
3077 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
3078 cond
= save_expr (cond
);
3080 if (! left_cleanups
)
3081 left_cleanups
= integer_zero_node
;
3082 if (! right_cleanups
)
3083 right_cleanups
= integer_zero_node
;
3084 new_cleanups
= build (COND_EXPR
, void_type_node
,
3085 truthvalue_conversion (cond
),
3086 left_cleanups
, right_cleanups
);
3087 new_cleanups
= fold (new_cleanups
);
3091 /* Now add in the conditionalized cleanups. */
3093 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
3094 expand_eh_region_start ();
3096 return want_value
? target
: NULL_RTX
;
3098 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3099 && GET_MODE (target
) != BLKmode
)
3100 /* If target is in memory and caller wants value in a register instead,
3101 arrange that. Pass TARGET as target for expand_expr so that,
3102 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3103 We know expand_expr will not use the target in that case.
3104 Don't do this if TARGET is volatile because we are supposed
3105 to write it and then read it. */
3107 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3108 GET_MODE (target
), 0);
3109 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3110 temp
= copy_to_reg (temp
);
3111 dont_return_target
= 1;
3113 else if (queued_subexp_p (target
))
3114 /* If target contains a postincrement, let's not risk
3115 using it as the place to generate the rhs. */
3117 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3119 /* Expand EXP into a new pseudo. */
3120 temp
= gen_reg_rtx (GET_MODE (target
));
3121 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3124 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3126 /* If target is volatile, ANSI requires accessing the value
3127 *from* the target, if it is accessed. So make that happen.
3128 In no case return the target itself. */
3129 if (! MEM_VOLATILE_P (target
) && want_value
)
3130 dont_return_target
= 1;
3132 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3133 /* If this is an scalar in a register that is stored in a wider mode
3134 than the declared mode, compute the result into its declared mode
3135 and then convert to the wider mode. Our value is the computed
3138 /* If we don't want a value, we can do the conversion inside EXP,
3139 which will often result in some optimizations. Do the conversion
3140 in two steps: first change the signedness, if needed, then
3141 the extend. But don't do this if the type of EXP is a subtype
3142 of something else since then the conversion might involve
3143 more than just converting modes. */
3144 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3145 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3147 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3148 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3151 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3155 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3156 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3160 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3162 /* If TEMP is a volatile MEM and we want a result value, make
3163 the access now so it gets done only once. Likewise if
3164 it contains TARGET. */
3165 if (GET_CODE (temp
) == MEM
&& want_value
3166 && (MEM_VOLATILE_P (temp
)
3167 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3168 temp
= copy_to_reg (temp
);
3170 /* If TEMP is a VOIDmode constant, use convert_modes to make
3171 sure that we properly convert it. */
3172 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3173 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3174 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3175 SUBREG_PROMOTED_UNSIGNED_P (target
));
3177 convert_move (SUBREG_REG (target
), temp
,
3178 SUBREG_PROMOTED_UNSIGNED_P (target
));
3179 return want_value
? temp
: NULL_RTX
;
3183 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3184 /* Return TARGET if it's a specified hardware register.
3185 If TARGET is a volatile mem ref, either return TARGET
3186 or return a reg copied *from* TARGET; ANSI requires this.
3188 Otherwise, if TEMP is not TARGET, return TEMP
3189 if it is constant (for efficiency),
3190 or if we really want the correct value. */
3191 if (!(target
&& GET_CODE (target
) == REG
3192 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3193 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3195 && (CONSTANT_P (temp
) || want_value
))
3196 dont_return_target
= 1;
3199 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3200 the same as that of TARGET, adjust the constant. This is needed, for
3201 example, in case it is a CONST_DOUBLE and we want only a word-sized
3203 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3204 && TREE_CODE (exp
) != ERROR_MARK
3205 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3206 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3207 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3209 /* If value was not generated in the target, store it there.
3210 Convert the value to TARGET's type first if nec. */
3212 if (temp
!= target
&& TREE_CODE (exp
) != ERROR_MARK
)
3214 target
= protect_from_queue (target
, 1);
3215 if (GET_MODE (temp
) != GET_MODE (target
)
3216 && GET_MODE (temp
) != VOIDmode
)
3218 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3219 if (dont_return_target
)
3221 /* In this case, we will return TEMP,
3222 so make sure it has the proper mode.
3223 But don't forget to store the value into TARGET. */
3224 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3225 emit_move_insn (target
, temp
);
3228 convert_move (target
, temp
, unsignedp
);
3231 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3233 /* Handle copying a string constant into an array.
3234 The string constant may be shorter than the array.
3235 So copy just the string's actual length, and clear the rest. */
3239 /* Get the size of the data type of the string,
3240 which is actually the size of the target. */
3241 size
= expr_size (exp
);
3242 if (GET_CODE (size
) == CONST_INT
3243 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3244 emit_block_move (target
, temp
, size
,
3245 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3248 /* Compute the size of the data to copy from the string. */
3250 = size_binop (MIN_EXPR
,
3251 make_tree (sizetype
, size
),
3253 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3254 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3258 /* Copy that much. */
3259 emit_block_move (target
, temp
, copy_size_rtx
,
3260 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3262 /* Figure out how much is left in TARGET that we have to clear.
3263 Do all calculations in ptr_mode. */
3265 addr
= XEXP (target
, 0);
3266 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3268 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3270 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3271 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3275 addr
= force_reg (ptr_mode
, addr
);
3276 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3277 copy_size_rtx
, NULL_RTX
, 0,
3280 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3281 copy_size_rtx
, NULL_RTX
, 0,
3284 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3285 GET_MODE (size
), 0, 0);
3286 label
= gen_label_rtx ();
3287 emit_jump_insn (gen_blt (label
));
3290 if (size
!= const0_rtx
)
3292 #ifdef TARGET_MEM_FUNCTIONS
3293 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3295 const0_rtx
, TYPE_MODE (integer_type_node
),
3296 convert_to_mode (TYPE_MODE (sizetype
),
3298 TREE_UNSIGNED (sizetype
)),
3299 TYPE_MODE (sizetype
));
3301 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3303 convert_to_mode (TYPE_MODE (integer_type_node
),
3305 TREE_UNSIGNED (integer_type_node
)),
3306 TYPE_MODE (integer_type_node
));
3314 /* Handle calls that return values in multiple non-contiguous locations.
3315 The Irix 6 ABI has examples of this. */
3316 else if (GET_CODE (target
) == PARALLEL
)
3317 emit_group_load (target
, temp
);
3318 else if (GET_MODE (temp
) == BLKmode
)
3319 emit_block_move (target
, temp
, expr_size (exp
),
3320 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3322 emit_move_insn (target
, temp
);
3325 /* If we don't want a value, return NULL_RTX. */
3329 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3330 ??? The latter test doesn't seem to make sense. */
3331 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3334 /* Return TARGET itself if it is a hard register. */
3335 else if (want_value
&& GET_MODE (target
) != BLKmode
3336 && ! (GET_CODE (target
) == REG
3337 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3338 return copy_to_reg (target
);
3344 /* Return 1 if EXP just contains zeros. */
3352 switch (TREE_CODE (exp
))
3356 case NON_LVALUE_EXPR
:
3357 return is_zeros_p (TREE_OPERAND (exp
, 0));
3360 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3364 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3367 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp
), dconst0
);
3370 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3371 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3372 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3373 if (! is_zeros_p (TREE_VALUE (elt
)))
3382 /* Return 1 if EXP contains mostly (3/4) zeros. */
3385 mostly_zeros_p (exp
)
3388 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3390 int elts
= 0, zeros
= 0;
3391 tree elt
= CONSTRUCTOR_ELTS (exp
);
3392 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3394 /* If there are no ranges of true bits, it is all zero. */
3395 return elt
== NULL_TREE
;
3397 for (; elt
; elt
= TREE_CHAIN (elt
))
3399 /* We do not handle the case where the index is a RANGE_EXPR,
3400 so the statistic will be somewhat inaccurate.
3401 We do make a more accurate count in store_constructor itself,
3402 so since this function is only used for nested array elements,
3403 this should be close enough. */
3404 if (mostly_zeros_p (TREE_VALUE (elt
)))
3409 return 4 * zeros
>= 3 * elts
;
3412 return is_zeros_p (exp
);
3415 /* Helper function for store_constructor.
3416 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3417 TYPE is the type of the CONSTRUCTOR, not the element type.
3418 CLEARED is as for store_constructor.
3420 This provides a recursive shortcut back to store_constructor when it isn't
3421 necessary to go through store_field. This is so that we can pass through
3422 the cleared field to let store_constructor know that we may not have to
3423 clear a substructure if the outer structure has already been cleared. */
3426 store_constructor_field (target
, bitsize
, bitpos
,
3427 mode
, exp
, type
, cleared
)
3429 int bitsize
, bitpos
;
3430 enum machine_mode mode
;
3434 if (TREE_CODE (exp
) == CONSTRUCTOR
3435 && bitpos
% BITS_PER_UNIT
== 0
3436 /* If we have a non-zero bitpos for a register target, then we just
3437 let store_field do the bitfield handling. This is unlikely to
3438 generate unnecessary clear instructions anyways. */
3439 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3442 target
= change_address (target
, VOIDmode
,
3443 plus_constant (XEXP (target
, 0),
3444 bitpos
/ BITS_PER_UNIT
));
3445 store_constructor (exp
, target
, cleared
);
3448 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3449 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3450 int_size_in_bytes (type
));
3453 /* Store the value of constructor EXP into the rtx TARGET.
3454 TARGET is either a REG or a MEM.
3455 CLEARED is true if TARGET is known to have been zero'd. */
3458 store_constructor (exp
, target
, cleared
)
3463 tree type
= TREE_TYPE (exp
);
3465 /* We know our target cannot conflict, since safe_from_p has been called. */
3467 /* Don't try copying piece by piece into a hard register
3468 since that is vulnerable to being clobbered by EXP.
3469 Instead, construct in a pseudo register and then copy it all. */
3470 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3472 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3473 store_constructor (exp
, temp
, 0);
3474 emit_move_insn (target
, temp
);
3479 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3480 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3484 /* Inform later passes that the whole union value is dead. */
3485 if (TREE_CODE (type
) == UNION_TYPE
3486 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3487 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3489 /* If we are building a static constructor into a register,
3490 set the initial value as zero so we can fold the value into
3491 a constant. But if more than one register is involved,
3492 this probably loses. */
3493 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3494 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3497 emit_move_insn (target
, const0_rtx
);
3502 /* If the constructor has fewer fields than the structure
3503 or if we are initializing the structure to mostly zeros,
3504 clear the whole structure first. */
3505 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3506 != list_length (TYPE_FIELDS (type
)))
3507 || mostly_zeros_p (exp
))
3510 clear_storage (target
, expr_size (exp
),
3511 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3516 /* Inform later passes that the old value is dead. */
3517 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3519 /* Store each element of the constructor into
3520 the corresponding field of TARGET. */
3522 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3524 register tree field
= TREE_PURPOSE (elt
);
3525 register enum machine_mode mode
;
3529 tree pos
, constant
= 0, offset
= 0;
3530 rtx to_rtx
= target
;
3532 /* Just ignore missing fields.
3533 We cleared the whole structure, above,
3534 if any fields are missing. */
3538 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3541 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3542 unsignedp
= TREE_UNSIGNED (field
);
3543 mode
= DECL_MODE (field
);
3544 if (DECL_BIT_FIELD (field
))
3547 pos
= DECL_FIELD_BITPOS (field
);
3548 if (TREE_CODE (pos
) == INTEGER_CST
)
3550 else if (TREE_CODE (pos
) == PLUS_EXPR
3551 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3552 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3557 bitpos
= TREE_INT_CST_LOW (constant
);
3563 if (contains_placeholder_p (offset
))
3564 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3567 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3568 size_int (BITS_PER_UNIT
));
3570 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3571 if (GET_CODE (to_rtx
) != MEM
)
3575 = change_address (to_rtx
, VOIDmode
,
3576 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
3577 force_reg (ptr_mode
, offset_rtx
)));
3579 if (TREE_READONLY (field
))
3581 if (GET_CODE (to_rtx
) == MEM
)
3582 to_rtx
= change_address (to_rtx
, GET_MODE (to_rtx
),
3584 RTX_UNCHANGING_P (to_rtx
) = 1;
3587 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3588 mode
, TREE_VALUE (elt
), type
, cleared
);
3591 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3596 tree domain
= TYPE_DOMAIN (type
);
3597 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3598 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3599 tree elttype
= TREE_TYPE (type
);
3601 /* If the constructor has fewer elements than the array,
3602 clear the whole array first. Similarly if this this is
3603 static constructor of a non-BLKmode object. */
3604 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3608 HOST_WIDE_INT count
= 0, zero_count
= 0;
3610 /* This loop is a more accurate version of the loop in
3611 mostly_zeros_p (it handles RANGE_EXPR in an index).
3612 It is also needed to check for missing elements. */
3613 for (elt
= CONSTRUCTOR_ELTS (exp
);
3615 elt
= TREE_CHAIN (elt
))
3617 tree index
= TREE_PURPOSE (elt
);
3618 HOST_WIDE_INT this_node_count
;
3619 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3621 tree lo_index
= TREE_OPERAND (index
, 0);
3622 tree hi_index
= TREE_OPERAND (index
, 1);
3623 if (TREE_CODE (lo_index
) != INTEGER_CST
3624 || TREE_CODE (hi_index
) != INTEGER_CST
)
3629 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3630 - TREE_INT_CST_LOW (lo_index
) + 1;
3633 this_node_count
= 1;
3634 count
+= this_node_count
;
3635 if (mostly_zeros_p (TREE_VALUE (elt
)))
3636 zero_count
+= this_node_count
;
3638 /* Clear the entire array first if there are any missing elements,
3639 or if the incidence of zero elements is >= 75%. */
3640 if (count
< maxelt
- minelt
+ 1
3641 || 4 * zero_count
>= 3 * count
)
3647 clear_storage (target
, expr_size (exp
),
3648 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3652 /* Inform later passes that the old value is dead. */
3653 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3655 /* Store each element of the constructor into
3656 the corresponding element of TARGET, determined
3657 by counting the elements. */
3658 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3660 elt
= TREE_CHAIN (elt
), i
++)
3662 register enum machine_mode mode
;
3666 tree value
= TREE_VALUE (elt
);
3667 tree index
= TREE_PURPOSE (elt
);
3668 rtx xtarget
= target
;
3670 if (cleared
&& is_zeros_p (value
))
3673 mode
= TYPE_MODE (elttype
);
3674 bitsize
= GET_MODE_BITSIZE (mode
);
3675 unsignedp
= TREE_UNSIGNED (elttype
);
3677 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3679 tree lo_index
= TREE_OPERAND (index
, 0);
3680 tree hi_index
= TREE_OPERAND (index
, 1);
3681 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3682 struct nesting
*loop
;
3683 HOST_WIDE_INT lo
, hi
, count
;
3686 /* If the range is constant and "small", unroll the loop. */
3687 if (TREE_CODE (lo_index
) == INTEGER_CST
3688 && TREE_CODE (hi_index
) == INTEGER_CST
3689 && (lo
= TREE_INT_CST_LOW (lo_index
),
3690 hi
= TREE_INT_CST_LOW (hi_index
),
3691 count
= hi
- lo
+ 1,
3692 (GET_CODE (target
) != MEM
3694 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3695 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3698 lo
-= minelt
; hi
-= minelt
;
3699 for (; lo
<= hi
; lo
++)
3701 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3702 store_constructor_field (target
, bitsize
, bitpos
,
3703 mode
, value
, type
, cleared
);
3708 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3709 loop_top
= gen_label_rtx ();
3710 loop_end
= gen_label_rtx ();
3712 unsignedp
= TREE_UNSIGNED (domain
);
3714 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3716 DECL_RTL (index
) = index_r
3717 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3720 if (TREE_CODE (value
) == SAVE_EXPR
3721 && SAVE_EXPR_RTL (value
) == 0)
3723 /* Make sure value gets expanded once before the
3725 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3728 store_expr (lo_index
, index_r
, 0);
3729 loop
= expand_start_loop (0);
3731 /* Assign value to element index. */
3732 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3733 size_int (BITS_PER_UNIT
));
3734 position
= size_binop (MULT_EXPR
,
3735 size_binop (MINUS_EXPR
, index
,
3736 TYPE_MIN_VALUE (domain
)),
3738 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3739 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3740 xtarget
= change_address (target
, mode
, addr
);
3741 if (TREE_CODE (value
) == CONSTRUCTOR
)
3742 store_constructor (value
, xtarget
, cleared
);
3744 store_expr (value
, xtarget
, 0);
3746 expand_exit_loop_if_false (loop
,
3747 build (LT_EXPR
, integer_type_node
,
3750 expand_increment (build (PREINCREMENT_EXPR
,
3752 index
, integer_one_node
), 0, 0);
3754 emit_label (loop_end
);
3756 /* Needed by stupid register allocation. to extend the
3757 lifetime of pseudo-regs used by target past the end
3759 emit_insn (gen_rtx (USE
, GET_MODE (target
), target
));
3762 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3763 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3769 index
= size_int (i
);
3772 index
= size_binop (MINUS_EXPR
, index
,
3773 TYPE_MIN_VALUE (domain
));
3774 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3775 size_int (BITS_PER_UNIT
));
3776 position
= size_binop (MULT_EXPR
, index
, position
);
3777 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3778 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3779 xtarget
= change_address (target
, mode
, addr
);
3780 store_expr (value
, xtarget
, 0);
3785 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3786 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3788 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3789 store_constructor_field (target
, bitsize
, bitpos
,
3790 mode
, value
, type
, cleared
);
3794 /* set constructor assignments */
3795 else if (TREE_CODE (type
) == SET_TYPE
)
3797 tree elt
= CONSTRUCTOR_ELTS (exp
);
3798 rtx xtarget
= XEXP (target
, 0);
3799 int set_word_size
= TYPE_ALIGN (type
);
3800 int nbytes
= int_size_in_bytes (type
), nbits
;
3801 tree domain
= TYPE_DOMAIN (type
);
3802 tree domain_min
, domain_max
, bitlength
;
3804 /* The default implementation strategy is to extract the constant
3805 parts of the constructor, use that to initialize the target,
3806 and then "or" in whatever non-constant ranges we need in addition.
3808 If a large set is all zero or all ones, it is
3809 probably better to set it using memset (if available) or bzero.
3810 Also, if a large set has just a single range, it may also be
3811 better to first clear all the first clear the set (using
3812 bzero/memset), and set the bits we want. */
3814 /* Check for all zeros. */
3815 if (elt
== NULL_TREE
)
3818 clear_storage (target
, expr_size (exp
),
3819 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3823 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3824 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3825 bitlength
= size_binop (PLUS_EXPR
,
3826 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3829 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
3831 nbits
= TREE_INT_CST_LOW (bitlength
);
3833 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3834 are "complicated" (more than one range), initialize (the
3835 constant parts) by copying from a constant. */
3836 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
3837 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
3839 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3840 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3841 char *bit_buffer
= (char *) alloca (nbits
);
3842 HOST_WIDE_INT word
= 0;
3845 int offset
= 0; /* In bytes from beginning of set. */
3846 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
3849 if (bit_buffer
[ibit
])
3851 if (BYTES_BIG_ENDIAN
)
3852 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3854 word
|= 1 << bit_pos
;
3857 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
3859 if (word
!= 0 || ! cleared
)
3861 rtx datum
= GEN_INT (word
);
3863 /* The assumption here is that it is safe to use
3864 XEXP if the set is multi-word, but not if
3865 it's single-word. */
3866 if (GET_CODE (target
) == MEM
)
3868 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
3869 to_rtx
= change_address (target
, mode
, to_rtx
);
3871 else if (offset
== 0)
3875 emit_move_insn (to_rtx
, datum
);
3881 offset
+= set_word_size
/ BITS_PER_UNIT
;
3887 /* Don't bother clearing storage if the set is all ones. */
3888 if (TREE_CHAIN (elt
) != NULL_TREE
3889 || (TREE_PURPOSE (elt
) == NULL_TREE
3891 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
3892 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
3893 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
3894 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
3896 clear_storage (target
, expr_size (exp
),
3897 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3900 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
3902 /* start of range of element or NULL */
3903 tree startbit
= TREE_PURPOSE (elt
);
3904 /* end of range of element, or element value */
3905 tree endbit
= TREE_VALUE (elt
);
3906 HOST_WIDE_INT startb
, endb
;
3907 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
3909 bitlength_rtx
= expand_expr (bitlength
,
3910 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
3912 /* handle non-range tuple element like [ expr ] */
3913 if (startbit
== NULL_TREE
)
3915 startbit
= save_expr (endbit
);
3918 startbit
= convert (sizetype
, startbit
);
3919 endbit
= convert (sizetype
, endbit
);
3920 if (! integer_zerop (domain_min
))
3922 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
3923 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
3925 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
3926 EXPAND_CONST_ADDRESS
);
3927 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
3928 EXPAND_CONST_ADDRESS
);
3932 targetx
= assign_stack_temp (GET_MODE (target
),
3933 GET_MODE_SIZE (GET_MODE (target
)),
3935 emit_move_insn (targetx
, target
);
3937 else if (GET_CODE (target
) == MEM
)
3942 #ifdef TARGET_MEM_FUNCTIONS
3943 /* Optimization: If startbit and endbit are
3944 constants divisible by BITS_PER_UNIT,
3945 call memset instead. */
3946 if (TREE_CODE (startbit
) == INTEGER_CST
3947 && TREE_CODE (endbit
) == INTEGER_CST
3948 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
3949 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
3951 emit_library_call (memset_libfunc
, 0,
3953 plus_constant (XEXP (targetx
, 0),
3954 startb
/ BITS_PER_UNIT
),
3956 constm1_rtx
, TYPE_MODE (integer_type_node
),
3957 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
3958 TYPE_MODE (sizetype
));
3963 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__setbits"),
3964 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
3965 bitlength_rtx
, TYPE_MODE (sizetype
),
3966 startbit_rtx
, TYPE_MODE (sizetype
),
3967 endbit_rtx
, TYPE_MODE (sizetype
));
3970 emit_move_insn (target
, targetx
);
3978 /* Store the value of EXP (an expression tree)
3979 into a subfield of TARGET which has mode MODE and occupies
3980 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3981 If MODE is VOIDmode, it means that we are storing into a bit-field.
3983 If VALUE_MODE is VOIDmode, return nothing in particular.
3984 UNSIGNEDP is not used in this case.
3986 Otherwise, return an rtx for the value stored. This rtx
3987 has mode VALUE_MODE if that is convenient to do.
3988 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3990 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3991 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3994 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
3995 unsignedp
, align
, total_size
)
3997 int bitsize
, bitpos
;
3998 enum machine_mode mode
;
4000 enum machine_mode value_mode
;
4005 HOST_WIDE_INT width_mask
= 0;
4007 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4008 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4010 /* If we are storing into an unaligned field of an aligned union that is
4011 in a register, we may have the mode of TARGET being an integer mode but
4012 MODE == BLKmode. In that case, get an aligned object whose size and
4013 alignment are the same as TARGET and store TARGET into it (we can avoid
4014 the store if the field being stored is the entire width of TARGET). Then
4015 call ourselves recursively to store the field into a BLKmode version of
4016 that object. Finally, load from the object into TARGET. This is not
4017 very efficient in general, but should only be slightly more expensive
4018 than the otherwise-required unaligned accesses. Perhaps this can be
4019 cleaned up later. */
4022 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4024 rtx object
= assign_stack_temp (GET_MODE (target
),
4025 GET_MODE_SIZE (GET_MODE (target
)), 0);
4026 rtx blk_object
= copy_rtx (object
);
4028 MEM_IN_STRUCT_P (object
) = 1;
4029 MEM_IN_STRUCT_P (blk_object
) = 1;
4030 PUT_MODE (blk_object
, BLKmode
);
4032 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4033 emit_move_insn (object
, target
);
4035 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4038 /* Even though we aren't returning target, we need to
4039 give it the updated value. */
4040 emit_move_insn (target
, object
);
4045 /* If the structure is in a register or if the component
4046 is a bit field, we cannot use addressing to access it.
4047 Use bit-field techniques or SUBREG to store in it. */
4049 if (mode
== VOIDmode
4050 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
4051 || GET_CODE (target
) == REG
4052 || GET_CODE (target
) == SUBREG
4053 /* If the field isn't aligned enough to store as an ordinary memref,
4054 store it as a bit field. */
4055 || (SLOW_UNALIGNED_ACCESS
4056 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
4057 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
4059 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4061 /* If BITSIZE is narrower than the size of the type of EXP
4062 we will be narrowing TEMP. Normally, what's wanted are the
4063 low-order bits. However, if EXP's type is a record and this is
4064 big-endian machine, we want the upper BITSIZE bits. */
4065 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4066 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4067 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4068 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4069 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4073 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4075 if (mode
!= VOIDmode
&& mode
!= BLKmode
4076 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4077 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4079 /* If the modes of TARGET and TEMP are both BLKmode, both
4080 must be in memory and BITPOS must be aligned on a byte
4081 boundary. If so, we simply do a block copy. */
4082 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4084 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4085 || bitpos
% BITS_PER_UNIT
!= 0)
4088 target
= change_address (target
, VOIDmode
,
4089 plus_constant (XEXP (target
, 0),
4090 bitpos
/ BITS_PER_UNIT
));
4092 emit_block_move (target
, temp
,
4093 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4097 return value_mode
== VOIDmode
? const0_rtx
: target
;
4100 /* Store the value in the bitfield. */
4101 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4102 if (value_mode
!= VOIDmode
)
4104 /* The caller wants an rtx for the value. */
4105 /* If possible, avoid refetching from the bitfield itself. */
4107 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4110 enum machine_mode tmode
;
4113 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4114 tmode
= GET_MODE (temp
);
4115 if (tmode
== VOIDmode
)
4117 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4118 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4119 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4121 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4122 NULL_RTX
, value_mode
, 0, align
,
4129 rtx addr
= XEXP (target
, 0);
4132 /* If a value is wanted, it must be the lhs;
4133 so make the address stable for multiple use. */
4135 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4136 && ! CONSTANT_ADDRESS_P (addr
)
4137 /* A frame-pointer reference is already stable. */
4138 && ! (GET_CODE (addr
) == PLUS
4139 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4140 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4141 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4142 addr
= copy_to_reg (addr
);
4144 /* Now build a reference to just the desired component. */
4146 to_rtx
= change_address (target
, mode
,
4147 plus_constant (addr
, (bitpos
/ BITS_PER_UNIT
)));
4148 MEM_IN_STRUCT_P (to_rtx
) = 1;
4150 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4154 /* Return true if any object containing the innermost array is an unaligned
4155 packed structure field. */
4158 get_inner_unaligned_p (exp
)
4161 int needed_alignment
= TYPE_ALIGN (TREE_TYPE (exp
));
4165 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4167 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4171 else if (TREE_CODE (exp
) != ARRAY_REF
4172 && TREE_CODE (exp
) != NON_LVALUE_EXPR
4173 && ! ((TREE_CODE (exp
) == NOP_EXPR
4174 || TREE_CODE (exp
) == CONVERT_EXPR
)
4175 && (TYPE_MODE (TREE_TYPE (exp
))
4176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4179 exp
= TREE_OPERAND (exp
, 0);
4185 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4186 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4187 ARRAY_REFs and find the ultimate containing object, which we return.
4189 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4190 bit position, and *PUNSIGNEDP to the signedness of the field.
4191 If the position of the field is variable, we store a tree
4192 giving the variable offset (in units) in *POFFSET.
4193 This offset is in addition to the bit position.
4194 If the position is not variable, we store 0 in *POFFSET.
4196 If any of the extraction expressions is volatile,
4197 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4199 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4200 is a mode that can be used to access the field. In that case, *PBITSIZE
4203 If the field describes a variable-sized object, *PMODE is set to
4204 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4205 this case, but the address of the object can be found. */
4208 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4209 punsignedp
, pvolatilep
)
4214 enum machine_mode
*pmode
;
4218 tree orig_exp
= exp
;
4220 enum machine_mode mode
= VOIDmode
;
4221 tree offset
= integer_zero_node
;
4223 if (TREE_CODE (exp
) == COMPONENT_REF
)
4225 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4226 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4227 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4228 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4230 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4232 size_tree
= TREE_OPERAND (exp
, 1);
4233 *punsignedp
= TREE_UNSIGNED (exp
);
4237 mode
= TYPE_MODE (TREE_TYPE (exp
));
4238 *pbitsize
= GET_MODE_BITSIZE (mode
);
4239 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4244 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4245 mode
= BLKmode
, *pbitsize
= -1;
4247 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4250 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4251 and find the ultimate containing object. */
4257 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4259 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4260 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4261 : TREE_OPERAND (exp
, 2));
4262 tree constant
= integer_zero_node
, var
= pos
;
4264 /* If this field hasn't been filled in yet, don't go
4265 past it. This should only happen when folding expressions
4266 made during type construction. */
4270 /* Assume here that the offset is a multiple of a unit.
4271 If not, there should be an explicitly added constant. */
4272 if (TREE_CODE (pos
) == PLUS_EXPR
4273 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4274 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4275 else if (TREE_CODE (pos
) == INTEGER_CST
)
4276 constant
= pos
, var
= integer_zero_node
;
4278 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4279 offset
= size_binop (PLUS_EXPR
, offset
,
4280 size_binop (EXACT_DIV_EXPR
, var
,
4281 size_int (BITS_PER_UNIT
)));
4284 else if (TREE_CODE (exp
) == ARRAY_REF
)
4286 /* This code is based on the code in case ARRAY_REF in expand_expr
4287 below. We assume here that the size of an array element is
4288 always an integral multiple of BITS_PER_UNIT. */
4290 tree index
= TREE_OPERAND (exp
, 1);
4291 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4293 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4294 tree index_type
= TREE_TYPE (index
);
4296 if (! integer_zerop (low_bound
))
4297 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4299 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4301 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4303 index_type
= TREE_TYPE (index
);
4306 index
= fold (build (MULT_EXPR
, index_type
, index
,
4307 TYPE_SIZE (TREE_TYPE (exp
))));
4309 if (TREE_CODE (index
) == INTEGER_CST
4310 && TREE_INT_CST_HIGH (index
) == 0)
4311 *pbitpos
+= TREE_INT_CST_LOW (index
);
4313 offset
= size_binop (PLUS_EXPR
, offset
,
4314 size_binop (FLOOR_DIV_EXPR
, index
,
4315 size_int (BITS_PER_UNIT
)));
4317 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4318 && ! ((TREE_CODE (exp
) == NOP_EXPR
4319 || TREE_CODE (exp
) == CONVERT_EXPR
)
4320 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4321 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4323 && (TYPE_MODE (TREE_TYPE (exp
))
4324 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4327 /* If any reference in the chain is volatile, the effect is volatile. */
4328 if (TREE_THIS_VOLATILE (exp
))
4330 exp
= TREE_OPERAND (exp
, 0);
4333 if (integer_zerop (offset
))
4336 if (offset
!= 0 && contains_placeholder_p (offset
))
4337 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4344 /* Given an rtx VALUE that may contain additions and multiplications,
4345 return an equivalent value that just refers to a register or memory.
4346 This is done by generating instructions to perform the arithmetic
4347 and returning a pseudo-register containing the value.
4349 The returned value may be a REG, SUBREG, MEM or constant. */
4352 force_operand (value
, target
)
4355 register optab binoptab
= 0;
4356 /* Use a temporary to force order of execution of calls to
4360 /* Use subtarget as the target for operand 0 of a binary operation. */
4361 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4363 if (GET_CODE (value
) == PLUS
)
4364 binoptab
= add_optab
;
4365 else if (GET_CODE (value
) == MINUS
)
4366 binoptab
= sub_optab
;
4367 else if (GET_CODE (value
) == MULT
)
4369 op2
= XEXP (value
, 1);
4370 if (!CONSTANT_P (op2
)
4371 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4373 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4374 return expand_mult (GET_MODE (value
), tmp
,
4375 force_operand (op2
, NULL_RTX
),
4381 op2
= XEXP (value
, 1);
4382 if (!CONSTANT_P (op2
)
4383 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4385 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4387 binoptab
= add_optab
;
4388 op2
= negate_rtx (GET_MODE (value
), op2
);
4391 /* Check for an addition with OP2 a constant integer and our first
4392 operand a PLUS of a virtual register and something else. In that
4393 case, we want to emit the sum of the virtual register and the
4394 constant first and then add the other value. This allows virtual
4395 register instantiation to simply modify the constant rather than
4396 creating another one around this addition. */
4397 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4398 && GET_CODE (XEXP (value
, 0)) == PLUS
4399 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4400 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4401 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4403 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4404 XEXP (XEXP (value
, 0), 0), op2
,
4405 subtarget
, 0, OPTAB_LIB_WIDEN
);
4406 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4407 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4408 target
, 0, OPTAB_LIB_WIDEN
);
4411 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4412 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4413 force_operand (op2
, NULL_RTX
),
4414 target
, 0, OPTAB_LIB_WIDEN
);
4415 /* We give UNSIGNEDP = 0 to expand_binop
4416 because the only operations we are expanding here are signed ones. */
4421 /* Subroutine of expand_expr:
4422 save the non-copied parts (LIST) of an expr (LHS), and return a list
4423 which can restore these values to their previous values,
4424 should something modify their storage. */
4427 save_noncopied_parts (lhs
, list
)
4434 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4435 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4436 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4439 tree part
= TREE_VALUE (tail
);
4440 tree part_type
= TREE_TYPE (part
);
4441 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4442 rtx target
= assign_temp (part_type
, 0, 1, 1);
4443 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4444 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4445 parts
= tree_cons (to_be_saved
,
4446 build (RTL_EXPR
, part_type
, NULL_TREE
,
4449 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4454 /* Subroutine of expand_expr:
4455 record the non-copied parts (LIST) of an expr (LHS), and return a list
4456 which specifies the initial values of these parts. */
4459 init_noncopied_parts (lhs
, list
)
4466 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4467 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4468 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4471 tree part
= TREE_VALUE (tail
);
4472 tree part_type
= TREE_TYPE (part
);
4473 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4474 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4479 /* Subroutine of expand_expr: return nonzero iff there is no way that
4480 EXP can reference X, which is being modified. */
4483 safe_from_p (x
, exp
)
4491 /* If EXP has varying size, we MUST use a target since we currently
4492 have no way of allocating temporaries of variable size
4493 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4494 So we assume here that something at a higher level has prevented a
4495 clash. This is somewhat bogus, but the best we can do. Only
4496 do this when X is BLKmode. */
4497 || (TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4498 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4499 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
4500 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
4501 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
4503 && GET_MODE (x
) == BLKmode
))
4506 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4507 find the underlying pseudo. */
4508 if (GET_CODE (x
) == SUBREG
)
4511 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4515 /* If X is a location in the outgoing argument area, it is always safe. */
4516 if (GET_CODE (x
) == MEM
4517 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4518 || (GET_CODE (XEXP (x
, 0)) == PLUS
4519 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4522 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4525 exp_rtl
= DECL_RTL (exp
);
4532 if (TREE_CODE (exp
) == TREE_LIST
)
4533 return ((TREE_VALUE (exp
) == 0
4534 || safe_from_p (x
, TREE_VALUE (exp
)))
4535 && (TREE_CHAIN (exp
) == 0
4536 || safe_from_p (x
, TREE_CHAIN (exp
))));
4541 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4545 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
4546 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
4550 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4551 the expression. If it is set, we conflict iff we are that rtx or
4552 both are in memory. Otherwise, we check all operands of the
4553 expression recursively. */
4555 switch (TREE_CODE (exp
))
4558 return (staticp (TREE_OPERAND (exp
, 0))
4559 || safe_from_p (x
, TREE_OPERAND (exp
, 0)));
4562 if (GET_CODE (x
) == MEM
)
4567 exp_rtl
= CALL_EXPR_RTL (exp
);
4570 /* Assume that the call will clobber all hard registers and
4572 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4573 || GET_CODE (x
) == MEM
)
4580 /* If a sequence exists, we would have to scan every instruction
4581 in the sequence to see if it was safe. This is probably not
4583 if (RTL_EXPR_SEQUENCE (exp
))
4586 exp_rtl
= RTL_EXPR_RTL (exp
);
4589 case WITH_CLEANUP_EXPR
:
4590 exp_rtl
= RTL_EXPR_RTL (exp
);
4593 case CLEANUP_POINT_EXPR
:
4594 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4597 exp_rtl
= SAVE_EXPR_RTL (exp
);
4601 /* The only operand we look at is operand 1. The rest aren't
4602 part of the expression. */
4603 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
4605 case METHOD_CALL_EXPR
:
4606 /* This takes a rtx argument, but shouldn't appear here. */
4610 /* If we have an rtx, we do not need to scan our operands. */
4614 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
4615 for (i
= 0; i
< nops
; i
++)
4616 if (TREE_OPERAND (exp
, i
) != 0
4617 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
4621 /* If we have an rtl, find any enclosed object. Then see if we conflict
4625 if (GET_CODE (exp_rtl
) == SUBREG
)
4627 exp_rtl
= SUBREG_REG (exp_rtl
);
4628 if (GET_CODE (exp_rtl
) == REG
4629 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
4633 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4634 are memory and EXP is not readonly. */
4635 return ! (rtx_equal_p (x
, exp_rtl
)
4636 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
4637 && ! TREE_READONLY (exp
)));
4640 /* If we reach here, it is safe. */
4644 /* Subroutine of expand_expr: return nonzero iff EXP is an
4645 expression whose type is statically determinable. */
4651 if (TREE_CODE (exp
) == PARM_DECL
4652 || TREE_CODE (exp
) == VAR_DECL
4653 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
4654 || TREE_CODE (exp
) == COMPONENT_REF
4655 || TREE_CODE (exp
) == ARRAY_REF
)
4660 /* Subroutine of expand_expr: return rtx if EXP is a
4661 variable or parameter; else return 0. */
4668 switch (TREE_CODE (exp
))
4672 return DECL_RTL (exp
);
4678 /* expand_expr: generate code for computing expression EXP.
4679 An rtx for the computed value is returned. The value is never null.
4680 In the case of a void EXP, const0_rtx is returned.
4682 The value may be stored in TARGET if TARGET is nonzero.
4683 TARGET is just a suggestion; callers must assume that
4684 the rtx returned may not be the same as TARGET.
4686 If TARGET is CONST0_RTX, it means that the value will be ignored.
4688 If TMODE is not VOIDmode, it suggests generating the
4689 result in mode TMODE. But this is done only when convenient.
4690 Otherwise, TMODE is ignored and the value generated in its natural mode.
4691 TMODE is just a suggestion; callers must assume that
4692 the rtx returned may not have mode TMODE.
4694 Note that TARGET may have neither TMODE nor MODE. In that case, it
4695 probably will not be used.
4697 If MODIFIER is EXPAND_SUM then when EXP is an addition
4698 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4699 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4700 products as above, or REG or MEM, or constant.
4701 Ordinarily in such cases we would output mul or add instructions
4702 and then return a pseudo reg containing the sum.
4704 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4705 it also marks a label as absolutely required (it can't be dead).
4706 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4707 This is used for outputting expressions used in initializers.
4709 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4710 with a constant address even if that address is not normally legitimate.
4711 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4714 expand_expr (exp
, target
, tmode
, modifier
)
4717 enum machine_mode tmode
;
4718 enum expand_modifier modifier
;
4720 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4721 This is static so it will be accessible to our recursive callees. */
4722 static tree placeholder_list
= 0;
4723 register rtx op0
, op1
, temp
;
4724 tree type
= TREE_TYPE (exp
);
4725 int unsignedp
= TREE_UNSIGNED (type
);
4726 register enum machine_mode mode
= TYPE_MODE (type
);
4727 register enum tree_code code
= TREE_CODE (exp
);
4729 /* Use subtarget as the target for operand 0 of a binary operation. */
4730 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4731 rtx original_target
= target
;
4732 /* Maybe defer this until sure not doing bytecode? */
4733 int ignore
= (target
== const0_rtx
4734 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4735 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4736 || code
== COND_EXPR
)
4737 && TREE_CODE (type
) == VOID_TYPE
));
4741 if (output_bytecode
&& modifier
!= EXPAND_INITIALIZER
)
4743 bc_expand_expr (exp
);
4747 /* Don't use hard regs as subtargets, because the combiner
4748 can only handle pseudo regs. */
4749 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4751 /* Avoid subtargets inside loops,
4752 since they hide some invariant expressions. */
4753 if (preserve_subexpressions_p ())
4756 /* If we are going to ignore this result, we need only do something
4757 if there is a side-effect somewhere in the expression. If there
4758 is, short-circuit the most common cases here. Note that we must
4759 not call expand_expr with anything but const0_rtx in case this
4760 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4764 if (! TREE_SIDE_EFFECTS (exp
))
4767 /* Ensure we reference a volatile object even if value is ignored. */
4768 if (TREE_THIS_VOLATILE (exp
)
4769 && TREE_CODE (exp
) != FUNCTION_DECL
4770 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4772 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
4773 if (GET_CODE (temp
) == MEM
)
4774 temp
= copy_to_reg (temp
);
4778 if (TREE_CODE_CLASS (code
) == '1')
4779 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4780 VOIDmode
, modifier
);
4781 else if (TREE_CODE_CLASS (code
) == '2'
4782 || TREE_CODE_CLASS (code
) == '<')
4784 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
4785 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
4788 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4789 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4790 /* If the second operand has no side effects, just evaluate
4792 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4793 VOIDmode
, modifier
);
4798 /* If will do cse, generate all results into pseudo registers
4799 since 1) that allows cse to find more things
4800 and 2) otherwise cse could produce an insn the machine
4803 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4804 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4811 tree function
= decl_function_context (exp
);
4812 /* Handle using a label in a containing function. */
4813 if (function
!= current_function_decl
&& function
!= 0)
4815 struct function
*p
= find_function_data (function
);
4816 /* Allocate in the memory associated with the function
4817 that the label is in. */
4818 push_obstacks (p
->function_obstack
,
4819 p
->function_maybepermanent_obstack
);
4821 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4822 label_rtx (exp
), p
->forced_labels
);
4825 else if (modifier
== EXPAND_INITIALIZER
)
4826 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4827 label_rtx (exp
), forced_labels
);
4828 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
4829 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
4830 if (function
!= current_function_decl
&& function
!= 0)
4831 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
4836 if (DECL_RTL (exp
) == 0)
4838 error_with_decl (exp
, "prior parameter's size depends on `%s'");
4839 return CONST0_RTX (mode
);
4842 /* ... fall through ... */
4845 /* If a static var's type was incomplete when the decl was written,
4846 but the type is complete now, lay out the decl now. */
4847 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4848 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
4850 push_obstacks_nochange ();
4851 end_temporary_allocation ();
4852 layout_decl (exp
, 0);
4853 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
4857 /* ... fall through ... */
4861 if (DECL_RTL (exp
) == 0)
4864 /* Ensure variable marked as used even if it doesn't go through
4865 a parser. If it hasn't be used yet, write out an external
4867 if (! TREE_USED (exp
))
4869 assemble_external (exp
);
4870 TREE_USED (exp
) = 1;
4873 /* Show we haven't gotten RTL for this yet. */
4876 /* Handle variables inherited from containing functions. */
4877 context
= decl_function_context (exp
);
4879 /* We treat inline_function_decl as an alias for the current function
4880 because that is the inline function whose vars, types, etc.
4881 are being merged into the current function.
4882 See expand_inline_function. */
4884 if (context
!= 0 && context
!= current_function_decl
4885 && context
!= inline_function_decl
4886 /* If var is static, we don't need a static chain to access it. */
4887 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
4888 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
4892 /* Mark as non-local and addressable. */
4893 DECL_NONLOCAL (exp
) = 1;
4894 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
4896 mark_addressable (exp
);
4897 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
4899 addr
= XEXP (DECL_RTL (exp
), 0);
4900 if (GET_CODE (addr
) == MEM
)
4901 addr
= gen_rtx (MEM
, Pmode
,
4902 fix_lexical_addr (XEXP (addr
, 0), exp
));
4904 addr
= fix_lexical_addr (addr
, exp
);
4905 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
4908 /* This is the case of an array whose size is to be determined
4909 from its initializer, while the initializer is still being parsed.
4912 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4913 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
4914 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
4915 XEXP (DECL_RTL (exp
), 0));
4917 /* If DECL_RTL is memory, we are in the normal case and either
4918 the address is not valid or it is not a register and -fforce-addr
4919 is specified, get the address into a register. */
4921 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4922 && modifier
!= EXPAND_CONST_ADDRESS
4923 && modifier
!= EXPAND_SUM
4924 && modifier
!= EXPAND_INITIALIZER
4925 && (! memory_address_p (DECL_MODE (exp
),
4926 XEXP (DECL_RTL (exp
), 0))
4928 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
4929 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
4930 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
4932 /* If we got something, return it. But first, set the alignment
4933 the address is a register. */
4936 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
4937 mark_reg_pointer (XEXP (temp
, 0),
4938 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
4943 /* If the mode of DECL_RTL does not match that of the decl, it
4944 must be a promoted value. We return a SUBREG of the wanted mode,
4945 but mark it so that we know that it was already extended. */
4947 if (GET_CODE (DECL_RTL (exp
)) == REG
4948 && GET_MODE (DECL_RTL (exp
)) != mode
)
4950 /* Get the signedness used for this variable. Ensure we get the
4951 same mode we got when the variable was declared. */
4952 if (GET_MODE (DECL_RTL (exp
))
4953 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
4956 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
4957 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4958 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4962 return DECL_RTL (exp
);
4965 return immed_double_const (TREE_INT_CST_LOW (exp
),
4966 TREE_INT_CST_HIGH (exp
),
4970 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
4973 /* If optimized, generate immediate CONST_DOUBLE
4974 which will be turned into memory by reload if necessary.
4976 We used to force a register so that loop.c could see it. But
4977 this does not allow gen_* patterns to perform optimizations with
4978 the constants. It also produces two insns in cases like "x = 1.0;".
4979 On most machines, floating-point constants are not permitted in
4980 many insns, so we'd end up copying it to a register in any case.
4982 Now, we do the copying in expand_binop, if appropriate. */
4983 return immed_real_const (exp
);
4987 if (! TREE_CST_RTL (exp
))
4988 output_constant_def (exp
);
4990 /* TREE_CST_RTL probably contains a constant address.
4991 On RISC machines where a constant address isn't valid,
4992 make some insns to get that address into a register. */
4993 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
4994 && modifier
!= EXPAND_CONST_ADDRESS
4995 && modifier
!= EXPAND_INITIALIZER
4996 && modifier
!= EXPAND_SUM
4997 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
4999 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
5000 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
5001 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
5002 return TREE_CST_RTL (exp
);
5005 context
= decl_function_context (exp
);
5007 /* We treat inline_function_decl as an alias for the current function
5008 because that is the inline function whose vars, types, etc.
5009 are being merged into the current function.
5010 See expand_inline_function. */
5011 if (context
== current_function_decl
|| context
== inline_function_decl
)
5014 /* If this is non-local, handle it. */
5017 temp
= SAVE_EXPR_RTL (exp
);
5018 if (temp
&& GET_CODE (temp
) == REG
)
5020 put_var_into_stack (exp
);
5021 temp
= SAVE_EXPR_RTL (exp
);
5023 if (temp
== 0 || GET_CODE (temp
) != MEM
)
5025 return change_address (temp
, mode
,
5026 fix_lexical_addr (XEXP (temp
, 0), exp
));
5028 if (SAVE_EXPR_RTL (exp
) == 0)
5030 if (mode
== VOIDmode
)
5033 temp
= assign_temp (type
, 0, 0, 0);
5035 SAVE_EXPR_RTL (exp
) = temp
;
5036 if (!optimize
&& GET_CODE (temp
) == REG
)
5037 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
5040 /* If the mode of TEMP does not match that of the expression, it
5041 must be a promoted value. We pass store_expr a SUBREG of the
5042 wanted mode but mark it so that we know that it was already
5043 extended. Note that `unsignedp' was modified above in
5046 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5048 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5049 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5050 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5053 if (temp
== const0_rtx
)
5054 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
5056 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5059 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5060 must be a promoted value. We return a SUBREG of the wanted mode,
5061 but mark it so that we know that it was already extended. */
5063 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5064 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5066 /* Compute the signedness and make the proper SUBREG. */
5067 promote_mode (type
, mode
, &unsignedp
, 0);
5068 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5069 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5070 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5074 return SAVE_EXPR_RTL (exp
);
5079 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5080 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5084 case PLACEHOLDER_EXPR
:
5085 /* If there is an object on the head of the placeholder list,
5086 see if some object in it's references is of type TYPE. For
5087 further information, see tree.def. */
5088 if (placeholder_list
)
5091 tree old_list
= placeholder_list
;
5093 for (object
= TREE_PURPOSE (placeholder_list
);
5094 (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
5095 != TYPE_MAIN_VARIANT (type
))
5096 && (TREE_CODE_CLASS (TREE_CODE (object
)) == 'r'
5097 || TREE_CODE_CLASS (TREE_CODE (object
)) == '1'
5098 || TREE_CODE_CLASS (TREE_CODE (object
)) == '2'
5099 || TREE_CODE_CLASS (TREE_CODE (object
)) == 'e');
5100 object
= TREE_OPERAND (object
, 0))
5104 && (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
5105 == TYPE_MAIN_VARIANT (type
)))
5107 /* Expand this object skipping the list entries before
5108 it was found in case it is also a PLACEHOLDER_EXPR.
5109 In that case, we want to translate it using subsequent
5111 placeholder_list
= TREE_CHAIN (placeholder_list
);
5112 temp
= expand_expr (object
, original_target
, tmode
, modifier
);
5113 placeholder_list
= old_list
;
5118 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5121 case WITH_RECORD_EXPR
:
5122 /* Put the object on the placeholder list, expand our first operand,
5123 and pop the list. */
5124 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5126 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5128 placeholder_list
= TREE_CHAIN (placeholder_list
);
5132 expand_exit_loop_if_false (NULL_PTR
,
5133 invert_truthvalue (TREE_OPERAND (exp
, 0)));
5138 expand_start_loop (1);
5139 expand_expr_stmt (TREE_OPERAND (exp
, 0));
5147 tree vars
= TREE_OPERAND (exp
, 0);
5148 int vars_need_expansion
= 0;
5150 /* Need to open a binding contour here because
5151 if there are any cleanups they most be contained here. */
5152 expand_start_bindings (0);
5154 /* Mark the corresponding BLOCK for output in its proper place. */
5155 if (TREE_OPERAND (exp
, 2) != 0
5156 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
5157 insert_block (TREE_OPERAND (exp
, 2));
5159 /* If VARS have not yet been expanded, expand them now. */
5162 if (DECL_RTL (vars
) == 0)
5164 vars_need_expansion
= 1;
5167 expand_decl_init (vars
);
5168 vars
= TREE_CHAIN (vars
);
5171 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
5173 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
5179 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
5181 emit_insns (RTL_EXPR_SEQUENCE (exp
));
5182 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
5183 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
5184 free_temps_for_rtl_expr (exp
);
5185 return RTL_EXPR_RTL (exp
);
5188 /* If we don't need the result, just ensure we evaluate any
5193 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5194 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
5198 /* All elts simple constants => refer to a constant in memory. But
5199 if this is a non-BLKmode mode, let it store a field at a time
5200 since that should make a CONST_INT or CONST_DOUBLE when we
5201 fold. Likewise, if we have a target we can use, it is best to
5202 store directly into the target unless the type is large enough
5203 that memcpy will be used. If we are making an initializer and
5204 all operands are constant, put it in memory as well. */
5205 else if ((TREE_STATIC (exp
)
5206 && ((mode
== BLKmode
5207 && ! (target
!= 0 && safe_from_p (target
, exp
)))
5208 || TREE_ADDRESSABLE (exp
)
5209 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5210 && (move_by_pieces_ninsns
5211 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
5212 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
5214 && ! mostly_zeros_p (exp
))))
5215 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
5217 rtx constructor
= output_constant_def (exp
);
5218 if (modifier
!= EXPAND_CONST_ADDRESS
5219 && modifier
!= EXPAND_INITIALIZER
5220 && modifier
!= EXPAND_SUM
5221 && (! memory_address_p (GET_MODE (constructor
),
5222 XEXP (constructor
, 0))
5224 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5225 constructor
= change_address (constructor
, VOIDmode
,
5226 XEXP (constructor
, 0));
5232 /* Handle calls that pass values in multiple non-contiguous
5233 locations. The Irix 6 ABI has examples of this. */
5234 if (target
== 0 || ! safe_from_p (target
, exp
)
5235 || GET_CODE (target
) == PARALLEL
)
5237 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5238 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5240 target
= assign_temp (type
, 0, 1, 1);
5243 if (TREE_READONLY (exp
))
5245 if (GET_CODE (target
) == MEM
)
5246 target
= change_address (target
, GET_MODE (target
),
5248 RTX_UNCHANGING_P (target
) = 1;
5251 store_constructor (exp
, target
, 0);
5257 tree exp1
= TREE_OPERAND (exp
, 0);
5260 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5261 op0
= memory_address (mode
, op0
);
5263 temp
= gen_rtx (MEM
, mode
, op0
);
5264 /* If address was computed by addition,
5265 mark this as an element of an aggregate. */
5266 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5267 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
5268 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
5269 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5270 || (TREE_CODE (exp1
) == ADDR_EXPR
5271 && (exp2
= TREE_OPERAND (exp1
, 0))
5272 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
5273 MEM_IN_STRUCT_P (temp
) = 1;
5274 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5276 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5277 here, because, in C and C++, the fact that a location is accessed
5278 through a pointer to const does not mean that the value there can
5279 never change. Languages where it can never change should
5280 also set TREE_STATIC. */
5281 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5286 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5290 tree array
= TREE_OPERAND (exp
, 0);
5291 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5292 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5293 tree index
= TREE_OPERAND (exp
, 1);
5294 tree index_type
= TREE_TYPE (index
);
5297 if (TREE_CODE (low_bound
) != INTEGER_CST
5298 && contains_placeholder_p (low_bound
))
5299 low_bound
= build (WITH_RECORD_EXPR
, sizetype
, low_bound
, exp
);
5301 /* Optimize the special-case of a zero lower bound.
5303 We convert the low_bound to sizetype to avoid some problems
5304 with constant folding. (E.g. suppose the lower bound is 1,
5305 and its mode is QI. Without the conversion, (ARRAY
5306 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5307 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5309 But sizetype isn't quite right either (especially if
5310 the lowbound is negative). FIXME */
5312 if (! integer_zerop (low_bound
))
5313 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5314 convert (sizetype
, low_bound
)));
5316 if ((TREE_CODE (index
) != INTEGER_CST
5317 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5318 && (! SLOW_UNALIGNED_ACCESS
|| ! get_inner_unaligned_p (exp
)))
5320 /* Nonconstant array index or nonconstant element size, and
5321 not an array in an unaligned (packed) structure field.
5322 Generate the tree for *(&array+index) and expand that,
5323 except do it in a language-independent way
5324 and don't complain about non-lvalue arrays.
5325 `mark_addressable' should already have been called
5326 for any array for which this case will be reached. */
5328 /* Don't forget the const or volatile flag from the array
5330 tree variant_type
= build_type_variant (type
,
5331 TREE_READONLY (exp
),
5332 TREE_THIS_VOLATILE (exp
));
5333 tree array_adr
= build1 (ADDR_EXPR
,
5334 build_pointer_type (variant_type
), array
);
5336 tree size
= size_in_bytes (type
);
5338 /* Convert the integer argument to a type the same size as sizetype
5339 so the multiply won't overflow spuriously. */
5340 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
5341 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
5344 if (TREE_CODE (size
) != INTEGER_CST
5345 && contains_placeholder_p (size
))
5346 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, exp
);
5348 /* Don't think the address has side effects
5349 just because the array does.
5350 (In some cases the address might have side effects,
5351 and we fail to record that fact here. However, it should not
5352 matter, since expand_expr should not care.) */
5353 TREE_SIDE_EFFECTS (array_adr
) = 0;
5357 (INDIRECT_REF
, type
,
5358 fold (build (PLUS_EXPR
,
5359 TYPE_POINTER_TO (variant_type
),
5364 TYPE_POINTER_TO (variant_type
),
5365 fold (build (MULT_EXPR
, TREE_TYPE (index
),
5367 convert (TREE_TYPE (index
),
5370 /* Volatility, etc., of new expression is same as old
5372 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
5373 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
5374 TREE_READONLY (elt
) = TREE_READONLY (exp
);
5376 return expand_expr (elt
, target
, tmode
, modifier
);
5379 /* Fold an expression like: "foo"[2].
5380 This is not done in fold so it won't happen inside &.
5381 Don't fold if this is for wide characters since it's too
5382 difficult to do correctly and this is a very rare case. */
5384 if (TREE_CODE (array
) == STRING_CST
5385 && TREE_CODE (index
) == INTEGER_CST
5386 && !TREE_INT_CST_HIGH (index
)
5387 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5388 && GET_MODE_CLASS (mode
) == MODE_INT
5389 && GET_MODE_SIZE (mode
) == 1)
5390 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5392 /* If this is a constant index into a constant array,
5393 just get the value from the array. Handle both the cases when
5394 we have an explicit constructor and when our operand is a variable
5395 that was declared const. */
5397 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5399 if (TREE_CODE (index
) == INTEGER_CST
5400 && TREE_INT_CST_HIGH (index
) == 0)
5402 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5404 i
= TREE_INT_CST_LOW (index
);
5406 elem
= TREE_CHAIN (elem
);
5408 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5413 else if (optimize
>= 1
5414 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5415 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5416 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5418 if (TREE_CODE (index
) == INTEGER_CST
5419 && TREE_INT_CST_HIGH (index
) == 0)
5421 tree init
= DECL_INITIAL (array
);
5423 i
= TREE_INT_CST_LOW (index
);
5424 if (TREE_CODE (init
) == CONSTRUCTOR
)
5426 tree elem
= CONSTRUCTOR_ELTS (init
);
5429 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5430 elem
= TREE_CHAIN (elem
);
5432 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5435 else if (TREE_CODE (init
) == STRING_CST
5436 && i
< TREE_STRING_LENGTH (init
))
5437 return GEN_INT (TREE_STRING_POINTER (init
)[i
]);
5442 /* Treat array-ref with constant index as a component-ref. */
5446 /* If the operand is a CONSTRUCTOR, we can just extract the
5447 appropriate field if it is present. Don't do this if we have
5448 already written the data since we want to refer to that copy
5449 and varasm.c assumes that's what we'll do. */
5450 if (code
!= ARRAY_REF
5451 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5452 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5456 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5457 elt
= TREE_CHAIN (elt
))
5458 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
5459 return expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5463 enum machine_mode mode1
;
5468 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
5469 &mode1
, &unsignedp
, &volatilep
);
5472 /* If we got back the original object, something is wrong. Perhaps
5473 we are evaluating an expression too early. In any event, don't
5474 infinitely recurse. */
5478 /* If TEM's type is a union of variable size, pass TARGET to the inner
5479 computation, since it will need a temporary and TARGET is known
5480 to have to do. This occurs in unchecked conversion in Ada. */
5482 op0
= expand_expr (tem
,
5483 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
5484 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
5486 ? target
: NULL_RTX
),
5488 modifier
== EXPAND_INITIALIZER
? modifier
: 0);
5490 /* If this is a constant, put it into a register if it is a
5491 legitimate constant and memory if it isn't. */
5492 if (CONSTANT_P (op0
))
5494 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
5495 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
5496 op0
= force_reg (mode
, op0
);
5498 op0
= validize_mem (force_const_mem (mode
, op0
));
5501 alignment
= TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
;
5504 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5506 if (GET_CODE (op0
) != MEM
)
5508 op0
= change_address (op0
, VOIDmode
,
5509 gen_rtx (PLUS
, ptr_mode
, XEXP (op0
, 0),
5510 force_reg (ptr_mode
, offset_rtx
)));
5511 /* If we have a variable offset, the known alignment
5512 is only that of the innermost structure containing the field.
5513 (Actually, we could sometimes do better by using the
5514 size of an element of the innermost array, but no need.) */
5515 if (TREE_CODE (exp
) == COMPONENT_REF
5516 || TREE_CODE (exp
) == BIT_FIELD_REF
)
5517 alignment
= (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5521 /* Don't forget about volatility even if this is a bitfield. */
5522 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
5524 op0
= copy_rtx (op0
);
5525 MEM_VOLATILE_P (op0
) = 1;
5528 /* In cases where an aligned union has an unaligned object
5529 as a field, we might be extracting a BLKmode value from
5530 an integer-mode (e.g., SImode) object. Handle this case
5531 by doing the extract into an object as wide as the field
5532 (which we know to be the width of a basic mode), then
5533 storing into memory, and changing the mode to BLKmode.
5534 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5535 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5536 if (mode1
== VOIDmode
5537 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
5538 || (modifier
!= EXPAND_CONST_ADDRESS
5539 && modifier
!= EXPAND_INITIALIZER
5540 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
5541 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5542 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5543 /* If the field isn't aligned enough to fetch as a memref,
5544 fetch it as a bit field. */
5545 || (SLOW_UNALIGNED_ACCESS
5546 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
5547 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
5549 enum machine_mode ext_mode
= mode
;
5551 if (ext_mode
== BLKmode
)
5552 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
5554 if (ext_mode
== BLKmode
)
5556 /* In this case, BITPOS must start at a byte boundary and
5557 TARGET, if specified, must be a MEM. */
5558 if (GET_CODE (op0
) != MEM
5559 || (target
!= 0 && GET_CODE (target
) != MEM
)
5560 || bitpos
% BITS_PER_UNIT
!= 0)
5563 op0
= change_address (op0
, VOIDmode
,
5564 plus_constant (XEXP (op0
, 0),
5565 bitpos
/ BITS_PER_UNIT
));
5567 target
= assign_temp (type
, 0, 1, 1);
5569 emit_block_move (target
, op0
,
5570 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5577 op0
= validize_mem (op0
);
5579 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
5580 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5582 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
5583 unsignedp
, target
, ext_mode
, ext_mode
,
5585 int_size_in_bytes (TREE_TYPE (tem
)));
5587 /* If the result is a record type and BITSIZE is narrower than
5588 the mode of OP0, an integral mode, and this is a big endian
5589 machine, we must put the field into the high-order bits. */
5590 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
5591 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
5592 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
5593 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
5594 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
5598 if (mode
== BLKmode
)
5600 rtx
new = assign_stack_temp (ext_mode
,
5601 bitsize
/ BITS_PER_UNIT
, 0);
5603 emit_move_insn (new, op0
);
5604 op0
= copy_rtx (new);
5605 PUT_MODE (op0
, BLKmode
);
5606 MEM_IN_STRUCT_P (op0
) = 1;
5612 /* If the result is BLKmode, use that to access the object
5614 if (mode
== BLKmode
)
5617 /* Get a reference to just this component. */
5618 if (modifier
== EXPAND_CONST_ADDRESS
5619 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5620 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
5621 (bitpos
/ BITS_PER_UNIT
)));
5623 op0
= change_address (op0
, mode1
,
5624 plus_constant (XEXP (op0
, 0),
5625 (bitpos
/ BITS_PER_UNIT
)));
5626 if (GET_CODE (XEXP (op0
, 0)) == REG
)
5627 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5629 MEM_IN_STRUCT_P (op0
) = 1;
5630 MEM_VOLATILE_P (op0
) |= volatilep
;
5631 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
)
5634 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5635 convert_move (target
, op0
, unsignedp
);
5639 /* Intended for a reference to a buffer of a file-object in Pascal.
5640 But it's not certain that a special tree code will really be
5641 necessary for these. INDIRECT_REF might work for them. */
5647 /* Pascal set IN expression.
5650 rlo = set_low - (set_low%bits_per_word);
5651 the_word = set [ (index - rlo)/bits_per_word ];
5652 bit_index = index % bits_per_word;
5653 bitmask = 1 << bit_index;
5654 return !!(the_word & bitmask); */
5656 tree set
= TREE_OPERAND (exp
, 0);
5657 tree index
= TREE_OPERAND (exp
, 1);
5658 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
5659 tree set_type
= TREE_TYPE (set
);
5660 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
5661 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
5662 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
5663 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
5664 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
5665 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
5666 rtx setaddr
= XEXP (setval
, 0);
5667 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
5669 rtx diff
, quo
, rem
, addr
, bit
, result
;
5671 preexpand_calls (exp
);
5673 /* If domain is empty, answer is no. Likewise if index is constant
5674 and out of bounds. */
5675 if ((TREE_CODE (set_high_bound
) == INTEGER_CST
5676 && TREE_CODE (set_low_bound
) == INTEGER_CST
5677 && tree_int_cst_lt (set_high_bound
, set_low_bound
)
5678 || (TREE_CODE (index
) == INTEGER_CST
5679 && TREE_CODE (set_low_bound
) == INTEGER_CST
5680 && tree_int_cst_lt (index
, set_low_bound
))
5681 || (TREE_CODE (set_high_bound
) == INTEGER_CST
5682 && TREE_CODE (index
) == INTEGER_CST
5683 && tree_int_cst_lt (set_high_bound
, index
))))
5687 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5689 /* If we get here, we have to generate the code for both cases
5690 (in range and out of range). */
5692 op0
= gen_label_rtx ();
5693 op1
= gen_label_rtx ();
5695 if (! (GET_CODE (index_val
) == CONST_INT
5696 && GET_CODE (lo_r
) == CONST_INT
))
5698 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
5699 GET_MODE (index_val
), iunsignedp
, 0);
5700 emit_jump_insn (gen_blt (op1
));
5703 if (! (GET_CODE (index_val
) == CONST_INT
5704 && GET_CODE (hi_r
) == CONST_INT
))
5706 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5707 GET_MODE (index_val
), iunsignedp
, 0);
5708 emit_jump_insn (gen_bgt (op1
));
5711 /* Calculate the element number of bit zero in the first word
5713 if (GET_CODE (lo_r
) == CONST_INT
)
5714 rlow
= GEN_INT (INTVAL (lo_r
)
5715 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5717 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5718 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5719 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5721 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5722 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5724 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5725 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5726 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5727 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5729 addr
= memory_address (byte_mode
,
5730 expand_binop (index_mode
, add_optab
, diff
,
5731 setaddr
, NULL_RTX
, iunsignedp
,
5734 /* Extract the bit we want to examine */
5735 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5736 gen_rtx (MEM
, byte_mode
, addr
),
5737 make_tree (TREE_TYPE (index
), rem
),
5739 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
5740 GET_MODE (target
) == byte_mode
? target
: 0,
5741 1, OPTAB_LIB_WIDEN
);
5743 if (result
!= target
)
5744 convert_move (target
, result
, 1);
5746 /* Output the code to handle the out-of-range case. */
5749 emit_move_insn (target
, const0_rtx
);
5754 case WITH_CLEANUP_EXPR
:
5755 if (RTL_EXPR_RTL (exp
) == 0)
5758 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5760 = tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2), cleanups_this_call
);
5761 /* That's it for this cleanup. */
5762 TREE_OPERAND (exp
, 2) = 0;
5763 expand_eh_region_start ();
5765 return RTL_EXPR_RTL (exp
);
5767 case CLEANUP_POINT_EXPR
:
5769 extern int temp_slot_level
;
5770 tree old_cleanups
= cleanups_this_call
;
5771 int old_temp_level
= target_temp_slot_level
;
5773 target_temp_slot_level
= temp_slot_level
;
5774 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5775 /* If we're going to use this value, load it up now. */
5777 op0
= force_not_mem (op0
);
5778 expand_cleanups_to (old_cleanups
);
5779 preserve_temp_slots (op0
);
5782 target_temp_slot_level
= old_temp_level
;
5787 /* Check for a built-in function. */
5788 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5789 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5791 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5792 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
5794 /* If this call was expanded already by preexpand_calls,
5795 just return the result we got. */
5796 if (CALL_EXPR_RTL (exp
) != 0)
5797 return CALL_EXPR_RTL (exp
);
5799 return expand_call (exp
, target
, ignore
);
5801 case NON_LVALUE_EXPR
:
5804 case REFERENCE_EXPR
:
5805 if (TREE_CODE (type
) == UNION_TYPE
)
5807 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5810 if (mode
!= BLKmode
)
5811 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5813 target
= assign_temp (type
, 0, 1, 1);
5816 if (GET_CODE (target
) == MEM
)
5817 /* Store data into beginning of memory target. */
5818 store_expr (TREE_OPERAND (exp
, 0),
5819 change_address (target
, TYPE_MODE (valtype
), 0), 0);
5821 else if (GET_CODE (target
) == REG
)
5822 /* Store this field into a union of the proper type. */
5823 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
5824 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
5826 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5830 /* Return the entire union. */
5834 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5836 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
5839 /* If the signedness of the conversion differs and OP0 is
5840 a promoted SUBREG, clear that indication since we now
5841 have to do the proper extension. */
5842 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
5843 && GET_CODE (op0
) == SUBREG
)
5844 SUBREG_PROMOTED_VAR_P (op0
) = 0;
5849 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
5850 if (GET_MODE (op0
) == mode
)
5853 /* If OP0 is a constant, just convert it into the proper mode. */
5854 if (CONSTANT_P (op0
))
5856 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5857 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5859 if (modifier
== EXPAND_INITIALIZER
)
5860 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
5864 convert_to_mode (mode
, op0
,
5865 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5867 convert_move (target
, op0
,
5868 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5872 /* We come here from MINUS_EXPR when the second operand is a
5875 this_optab
= add_optab
;
5877 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5878 something else, make sure we add the register to the constant and
5879 then to the other thing. This case can occur during strength
5880 reduction and doing it this way will produce better code if the
5881 frame pointer or argument pointer is eliminated.
5883 fold-const.c will ensure that the constant is always in the inner
5884 PLUS_EXPR, so the only case we need to do anything about is if
5885 sp, ap, or fp is our second argument, in which case we must swap
5886 the innermost first argument and our second argument. */
5888 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5889 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
5890 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
5891 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
5892 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
5893 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
5895 tree t
= TREE_OPERAND (exp
, 1);
5897 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5898 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
5901 /* If the result is to be ptr_mode and we are adding an integer to
5902 something, we might be forming a constant. So try to use
5903 plus_constant. If it produces a sum and we can't accept it,
5904 use force_operand. This allows P = &ARR[const] to generate
5905 efficient code on machines where a SYMBOL_REF is not a valid
5908 If this is an EXPAND_SUM call, always return the sum. */
5909 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
5910 || mode
== ptr_mode
)
5912 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
5913 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5914 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
5916 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
5918 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
5919 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5920 op1
= force_operand (op1
, target
);
5924 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5925 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
5926 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
5928 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
5930 if (! CONSTANT_P (op0
))
5932 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5933 VOIDmode
, modifier
);
5934 /* Don't go to both_summands if modifier
5935 says it's not right to return a PLUS. */
5936 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5940 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
5941 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5942 op0
= force_operand (op0
, target
);
5947 /* No sense saving up arithmetic to be done
5948 if it's all in the wrong mode to form part of an address.
5949 And force_operand won't know whether to sign-extend or
5951 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5952 || mode
!= ptr_mode
)
5955 preexpand_calls (exp
);
5956 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5959 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
5960 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
5963 /* Make sure any term that's a sum with a constant comes last. */
5964 if (GET_CODE (op0
) == PLUS
5965 && CONSTANT_P (XEXP (op0
, 1)))
5971 /* If adding to a sum including a constant,
5972 associate it to put the constant outside. */
5973 if (GET_CODE (op1
) == PLUS
5974 && CONSTANT_P (XEXP (op1
, 1)))
5976 rtx constant_term
= const0_rtx
;
5978 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
5981 /* Ensure that MULT comes first if there is one. */
5982 else if (GET_CODE (op0
) == MULT
)
5983 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
5985 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
5987 /* Let's also eliminate constants from op0 if possible. */
5988 op0
= eliminate_constant_term (op0
, &constant_term
);
5990 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5991 their sum should be a constant. Form it into OP1, since the
5992 result we want will then be OP0 + OP1. */
5994 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
5999 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
6002 /* Put a constant term last and put a multiplication first. */
6003 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
6004 temp
= op1
, op1
= op0
, op0
= temp
;
6006 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
6007 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
6010 /* For initializers, we are allowed to return a MINUS of two
6011 symbolic constants. Here we handle all cases when both operands
6013 /* Handle difference of two symbolic constants,
6014 for the sake of an initializer. */
6015 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6016 && really_constant_p (TREE_OPERAND (exp
, 0))
6017 && really_constant_p (TREE_OPERAND (exp
, 1)))
6019 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
6020 VOIDmode
, modifier
);
6021 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6022 VOIDmode
, modifier
);
6024 /* If the last operand is a CONST_INT, use plus_constant of
6025 the negated constant. Else make the MINUS. */
6026 if (GET_CODE (op1
) == CONST_INT
)
6027 return plus_constant (op0
, - INTVAL (op1
));
6029 return gen_rtx (MINUS
, mode
, op0
, op1
);
6031 /* Convert A - const to A + (-const). */
6032 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6034 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6035 TREE_OPERAND (exp
, 1)));
6037 /* Deal with the case where we can't negate the constant
6039 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6041 tree newtype
= signed_type (type
);
6042 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6043 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
6044 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
6046 if (! TREE_OVERFLOW (newneg
))
6047 return expand_expr (convert (type
,
6048 build (PLUS_EXPR
, newtype
,
6050 target
, tmode
, modifier
);
6054 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
6058 this_optab
= sub_optab
;
6062 preexpand_calls (exp
);
6063 /* If first operand is constant, swap them.
6064 Thus the following special case checks need only
6065 check the second operand. */
6066 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6068 register tree t1
= TREE_OPERAND (exp
, 0);
6069 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
6070 TREE_OPERAND (exp
, 1) = t1
;
6073 /* Attempt to return something suitable for generating an
6074 indexed address, for machines that support that. */
6076 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
6077 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6078 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6080 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
6082 /* Apply distributive law if OP0 is x+c. */
6083 if (GET_CODE (op0
) == PLUS
6084 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
6085 return gen_rtx (PLUS
, mode
,
6086 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
6087 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
6088 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
6089 * INTVAL (XEXP (op0
, 1))));
6091 if (GET_CODE (op0
) != REG
)
6092 op0
= force_operand (op0
, NULL_RTX
);
6093 if (GET_CODE (op0
) != REG
)
6094 op0
= copy_to_mode_reg (mode
, op0
);
6096 return gen_rtx (MULT
, mode
, op0
,
6097 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
6100 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6103 /* Check for multiplying things that have been extended
6104 from a narrower type. If this machine supports multiplying
6105 in that narrower type with a result in the desired type,
6106 do it that way, and avoid the explicit type-conversion. */
6107 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
6108 && TREE_CODE (type
) == INTEGER_TYPE
6109 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6110 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6111 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6112 && int_fits_type_p (TREE_OPERAND (exp
, 1),
6113 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6114 /* Don't use a widening multiply if a shift will do. */
6115 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
6116 > HOST_BITS_PER_WIDE_INT
)
6117 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
6119 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6120 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6122 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
6123 /* If both operands are extended, they must either both
6124 be zero-extended or both be sign-extended. */
6125 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6127 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
6129 enum machine_mode innermode
6130 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
6131 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6132 ? smul_widen_optab
: umul_widen_optab
);
6133 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6134 ? umul_widen_optab
: smul_widen_optab
);
6135 if (mode
== GET_MODE_WIDER_MODE (innermode
))
6137 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
6139 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6140 NULL_RTX
, VOIDmode
, 0);
6141 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6142 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6145 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6146 NULL_RTX
, VOIDmode
, 0);
6149 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
6150 && innermode
== word_mode
)
6153 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6154 NULL_RTX
, VOIDmode
, 0);
6155 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6156 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6159 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6160 NULL_RTX
, VOIDmode
, 0);
6161 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
6162 unsignedp
, OPTAB_LIB_WIDEN
);
6163 htem
= expand_mult_highpart_adjust (innermode
,
6164 gen_highpart (innermode
, temp
),
6166 gen_highpart (innermode
, temp
),
6168 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
6173 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6174 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6175 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
6177 case TRUNC_DIV_EXPR
:
6178 case FLOOR_DIV_EXPR
:
6180 case ROUND_DIV_EXPR
:
6181 case EXACT_DIV_EXPR
:
6182 preexpand_calls (exp
);
6183 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6185 /* Possible optimization: compute the dividend with EXPAND_SUM
6186 then if the divisor is constant can optimize the case
6187 where some terms of the dividend have coeffs divisible by it. */
6188 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6189 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6190 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
6193 this_optab
= flodiv_optab
;
6196 case TRUNC_MOD_EXPR
:
6197 case FLOOR_MOD_EXPR
:
6199 case ROUND_MOD_EXPR
:
6200 preexpand_calls (exp
);
6201 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6203 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6204 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6205 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
6207 case FIX_ROUND_EXPR
:
6208 case FIX_FLOOR_EXPR
:
6210 abort (); /* Not used for C. */
6212 case FIX_TRUNC_EXPR
:
6213 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6215 target
= gen_reg_rtx (mode
);
6216 expand_fix (target
, op0
, unsignedp
);
6220 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6222 target
= gen_reg_rtx (mode
);
6223 /* expand_float can't figure out what to do if FROM has VOIDmode.
6224 So give it the correct mode. With -O, cse will optimize this. */
6225 if (GET_MODE (op0
) == VOIDmode
)
6226 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6228 expand_float (target
, op0
,
6229 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6233 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6234 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6240 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6242 /* Handle complex values specially. */
6243 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6244 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6245 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6247 /* Unsigned abs is simply the operand. Testing here means we don't
6248 risk generating incorrect code below. */
6249 if (TREE_UNSIGNED (type
))
6252 return expand_abs (mode
, op0
, target
, unsignedp
,
6253 safe_from_p (target
, TREE_OPERAND (exp
, 0)));
6257 target
= original_target
;
6258 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
6259 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6260 || GET_MODE (target
) != mode
6261 || (GET_CODE (target
) == REG
6262 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6263 target
= gen_reg_rtx (mode
);
6264 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6265 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6267 /* First try to do it with a special MIN or MAX instruction.
6268 If that does not win, use a conditional jump to select the proper
6270 this_optab
= (TREE_UNSIGNED (type
)
6271 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6272 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6274 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6279 /* At this point, a MEM target is no longer useful; we will get better
6282 if (GET_CODE (target
) == MEM
)
6283 target
= gen_reg_rtx (mode
);
6286 emit_move_insn (target
, op0
);
6288 op0
= gen_label_rtx ();
6290 /* If this mode is an integer too wide to compare properly,
6291 compare word by word. Rely on cse to optimize constant cases. */
6292 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6294 if (code
== MAX_EXPR
)
6295 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6296 target
, op1
, NULL_RTX
, op0
);
6298 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6299 op1
, target
, NULL_RTX
, op0
);
6300 emit_move_insn (target
, op1
);
6304 if (code
== MAX_EXPR
)
6305 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6306 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6307 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6309 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6310 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6311 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6312 if (temp
== const0_rtx
)
6313 emit_move_insn (target
, op1
);
6314 else if (temp
!= const_true_rtx
)
6316 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6317 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6320 emit_move_insn (target
, op1
);
6327 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6328 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6334 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6335 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6340 /* ??? Can optimize bitwise operations with one arg constant.
6341 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6342 and (a bitwise1 b) bitwise2 b (etc)
6343 but that is probably not worth while. */
6345 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6346 boolean values when we want in all cases to compute both of them. In
6347 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6348 as actual zero-or-1 values and then bitwise anding. In cases where
6349 there cannot be any side effects, better code would be made by
6350 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6351 how to recognize those cases. */
6353 case TRUTH_AND_EXPR
:
6355 this_optab
= and_optab
;
6360 this_optab
= ior_optab
;
6363 case TRUTH_XOR_EXPR
:
6365 this_optab
= xor_optab
;
6372 preexpand_calls (exp
);
6373 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6375 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6376 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6379 /* Could determine the answer when only additive constants differ. Also,
6380 the addition of one can be handled by changing the condition. */
6387 preexpand_calls (exp
);
6388 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
6392 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6393 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
6395 && GET_CODE (original_target
) == REG
6396 && (GET_MODE (original_target
)
6397 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6399 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6402 if (temp
!= original_target
)
6403 temp
= copy_to_reg (temp
);
6405 op1
= gen_label_rtx ();
6406 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
6407 GET_MODE (temp
), unsignedp
, 0);
6408 emit_jump_insn (gen_beq (op1
));
6409 emit_move_insn (temp
, const1_rtx
);
6414 /* If no set-flag instruction, must generate a conditional
6415 store into a temporary variable. Drop through
6416 and handle this like && and ||. */
6418 case TRUTH_ANDIF_EXPR
:
6419 case TRUTH_ORIF_EXPR
:
6421 && (target
== 0 || ! safe_from_p (target
, exp
)
6422 /* Make sure we don't have a hard reg (such as function's return
6423 value) live across basic blocks, if not optimizing. */
6424 || (!optimize
&& GET_CODE (target
) == REG
6425 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
6426 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6429 emit_clr_insn (target
);
6431 op1
= gen_label_rtx ();
6432 jumpifnot (exp
, op1
);
6435 emit_0_to_1_insn (target
);
6438 return ignore
? const0_rtx
: target
;
6440 case TRUTH_NOT_EXPR
:
6441 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6442 /* The parser is careful to generate TRUTH_NOT_EXPR
6443 only with operands that are always zero or one. */
6444 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
6445 target
, 1, OPTAB_LIB_WIDEN
);
6451 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6453 return expand_expr (TREE_OPERAND (exp
, 1),
6454 (ignore
? const0_rtx
: target
),
6459 rtx flag
= NULL_RTX
;
6460 tree left_cleanups
= NULL_TREE
;
6461 tree right_cleanups
= NULL_TREE
;
6463 /* Used to save a pointer to the place to put the setting of
6464 the flag that indicates if this side of the conditional was
6465 taken. We backpatch the code, if we find out later that we
6466 have any conditional cleanups that need to be performed. */
6467 rtx dest_right_flag
= NULL_RTX
;
6468 rtx dest_left_flag
= NULL_RTX
;
6470 /* Note that COND_EXPRs whose type is a structure or union
6471 are required to be constructed to contain assignments of
6472 a temporary variable, so that we can evaluate them here
6473 for side effect only. If type is void, we must do likewise. */
6475 /* If an arm of the branch requires a cleanup,
6476 only that cleanup is performed. */
6479 tree binary_op
= 0, unary_op
= 0;
6480 tree old_cleanups
= cleanups_this_call
;
6482 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6483 convert it to our mode, if necessary. */
6484 if (integer_onep (TREE_OPERAND (exp
, 1))
6485 && integer_zerop (TREE_OPERAND (exp
, 2))
6486 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6490 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6495 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
6496 if (GET_MODE (op0
) == mode
)
6500 target
= gen_reg_rtx (mode
);
6501 convert_move (target
, op0
, unsignedp
);
6505 /* Check for X ? A + B : A. If we have this, we can copy
6506 A to the output and conditionally add B. Similarly for unary
6507 operations. Don't do this if X has side-effects because
6508 those side effects might affect A or B and the "?" operation is
6509 a sequence point in ANSI. (We test for side effects later.) */
6511 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
6512 && operand_equal_p (TREE_OPERAND (exp
, 2),
6513 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6514 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
6515 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
6516 && operand_equal_p (TREE_OPERAND (exp
, 1),
6517 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6518 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
6519 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
6520 && operand_equal_p (TREE_OPERAND (exp
, 2),
6521 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6522 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
6523 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
6524 && operand_equal_p (TREE_OPERAND (exp
, 1),
6525 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6526 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
6528 /* If we are not to produce a result, we have no target. Otherwise,
6529 if a target was specified use it; it will not be used as an
6530 intermediate target unless it is safe. If no target, use a
6535 else if (original_target
6536 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0))
6537 || (singleton
&& GET_CODE (original_target
) == REG
6538 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
6539 && original_target
== var_rtx (singleton
)))
6540 && GET_MODE (original_target
) == mode
6541 && ! (GET_CODE (original_target
) == MEM
6542 && MEM_VOLATILE_P (original_target
)))
6543 temp
= original_target
;
6544 else if (TREE_ADDRESSABLE (type
))
6547 temp
= assign_temp (type
, 0, 0, 1);
6549 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6550 operation, do this as A + (X != 0). Similarly for other simple
6551 binary operators. */
6552 if (temp
&& singleton
&& binary_op
6553 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6554 && (TREE_CODE (binary_op
) == PLUS_EXPR
6555 || TREE_CODE (binary_op
) == MINUS_EXPR
6556 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
6557 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
6558 && integer_onep (TREE_OPERAND (binary_op
, 1))
6559 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6562 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
6563 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
6564 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
6567 /* If we had X ? A : A + 1, do this as A + (X == 0).
6569 We have to invert the truth value here and then put it
6570 back later if do_store_flag fails. We cannot simply copy
6571 TREE_OPERAND (exp, 0) to another variable and modify that
6572 because invert_truthvalue can modify the tree pointed to
6574 if (singleton
== TREE_OPERAND (exp
, 1))
6575 TREE_OPERAND (exp
, 0)
6576 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6578 result
= do_store_flag (TREE_OPERAND (exp
, 0),
6579 (safe_from_p (temp
, singleton
)
6581 mode
, BRANCH_COST
<= 1);
6585 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
6586 return expand_binop (mode
, boptab
, op1
, result
, temp
,
6587 unsignedp
, OPTAB_LIB_WIDEN
);
6589 else if (singleton
== TREE_OPERAND (exp
, 1))
6590 TREE_OPERAND (exp
, 0)
6591 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6594 do_pending_stack_adjust ();
6596 op0
= gen_label_rtx ();
6598 flag
= gen_reg_rtx (word_mode
);
6599 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
6603 /* If the target conflicts with the other operand of the
6604 binary op, we can't use it. Also, we can't use the target
6605 if it is a hard register, because evaluating the condition
6606 might clobber it. */
6608 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
6609 || (GET_CODE (temp
) == REG
6610 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
6611 temp
= gen_reg_rtx (mode
);
6612 store_expr (singleton
, temp
, 0);
6615 expand_expr (singleton
,
6616 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6617 dest_left_flag
= get_last_insn ();
6618 if (singleton
== TREE_OPERAND (exp
, 1))
6619 jumpif (TREE_OPERAND (exp
, 0), op0
);
6621 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6623 /* Allows cleanups up to here. */
6624 old_cleanups
= cleanups_this_call
;
6625 if (binary_op
&& temp
== 0)
6626 /* Just touch the other operand. */
6627 expand_expr (TREE_OPERAND (binary_op
, 1),
6628 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6630 store_expr (build (TREE_CODE (binary_op
), type
,
6631 make_tree (type
, temp
),
6632 TREE_OPERAND (binary_op
, 1)),
6635 store_expr (build1 (TREE_CODE (unary_op
), type
,
6636 make_tree (type
, temp
)),
6639 dest_right_flag
= get_last_insn ();
6642 /* This is now done in jump.c and is better done there because it
6643 produces shorter register lifetimes. */
6645 /* Check for both possibilities either constants or variables
6646 in registers (but not the same as the target!). If so, can
6647 save branches by assigning one, branching, and assigning the
6649 else if (temp
&& GET_MODE (temp
) != BLKmode
6650 && (TREE_CONSTANT (TREE_OPERAND (exp
, 1))
6651 || ((TREE_CODE (TREE_OPERAND (exp
, 1)) == PARM_DECL
6652 || TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
)
6653 && DECL_RTL (TREE_OPERAND (exp
, 1))
6654 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 1))) == REG
6655 && DECL_RTL (TREE_OPERAND (exp
, 1)) != temp
))
6656 && (TREE_CONSTANT (TREE_OPERAND (exp
, 2))
6657 || ((TREE_CODE (TREE_OPERAND (exp
, 2)) == PARM_DECL
6658 || TREE_CODE (TREE_OPERAND (exp
, 2)) == VAR_DECL
)
6659 && DECL_RTL (TREE_OPERAND (exp
, 2))
6660 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 2))) == REG
6661 && DECL_RTL (TREE_OPERAND (exp
, 2)) != temp
)))
6663 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6664 temp
= gen_reg_rtx (mode
);
6665 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6666 dest_left_flag
= get_last_insn ();
6667 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6669 /* Allows cleanups up to here. */
6670 old_cleanups
= cleanups_this_call
;
6671 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6673 dest_right_flag
= get_last_insn ();
6676 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6677 comparison operator. If we have one of these cases, set the
6678 output to A, branch on A (cse will merge these two references),
6679 then set the output to FOO. */
6681 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6682 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6683 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6684 TREE_OPERAND (exp
, 1), 0)
6685 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6686 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
6688 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6689 temp
= gen_reg_rtx (mode
);
6690 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6691 dest_left_flag
= get_last_insn ();
6692 jumpif (TREE_OPERAND (exp
, 0), op0
);
6694 /* Allows cleanups up to here. */
6695 old_cleanups
= cleanups_this_call
;
6696 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6698 dest_right_flag
= get_last_insn ();
6701 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6702 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6703 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6704 TREE_OPERAND (exp
, 2), 0)
6705 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6706 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
6708 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6709 temp
= gen_reg_rtx (mode
);
6710 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6711 dest_left_flag
= get_last_insn ();
6712 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6714 /* Allows cleanups up to here. */
6715 old_cleanups
= cleanups_this_call
;
6716 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6718 dest_right_flag
= get_last_insn ();
6722 op1
= gen_label_rtx ();
6723 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6725 /* Allows cleanups up to here. */
6726 old_cleanups
= cleanups_this_call
;
6728 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6730 expand_expr (TREE_OPERAND (exp
, 1),
6731 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6732 dest_left_flag
= get_last_insn ();
6734 /* Handle conditional cleanups, if any. */
6735 left_cleanups
= defer_cleanups_to (old_cleanups
);
6738 emit_jump_insn (gen_jump (op1
));
6742 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6744 expand_expr (TREE_OPERAND (exp
, 2),
6745 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6746 dest_right_flag
= get_last_insn ();
6749 /* Handle conditional cleanups, if any. */
6750 right_cleanups
= defer_cleanups_to (old_cleanups
);
6756 /* Add back in, any conditional cleanups. */
6757 if (left_cleanups
|| right_cleanups
)
6763 /* Now that we know that a flag is needed, go back and add in the
6764 setting of the flag. */
6766 /* Do the left side flag. */
6767 last
= get_last_insn ();
6768 /* Flag left cleanups as needed. */
6769 emit_move_insn (flag
, const1_rtx
);
6770 /* ??? deprecated, use sequences instead. */
6771 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_left_flag
);
6773 /* Do the right side flag. */
6774 last
= get_last_insn ();
6775 /* Flag left cleanups as needed. */
6776 emit_move_insn (flag
, const0_rtx
);
6777 /* ??? deprecated, use sequences instead. */
6778 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_right_flag
);
6780 /* All cleanups must be on the function_obstack. */
6781 push_obstacks_nochange ();
6782 resume_temporary_allocation ();
6784 /* convert flag, which is an rtx, into a tree. */
6785 cond
= make_node (RTL_EXPR
);
6786 TREE_TYPE (cond
) = integer_type_node
;
6787 RTL_EXPR_RTL (cond
) = flag
;
6788 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
6789 cond
= save_expr (cond
);
6791 if (! left_cleanups
)
6792 left_cleanups
= integer_zero_node
;
6793 if (! right_cleanups
)
6794 right_cleanups
= integer_zero_node
;
6795 new_cleanups
= build (COND_EXPR
, void_type_node
,
6796 truthvalue_conversion (cond
),
6797 left_cleanups
, right_cleanups
);
6798 new_cleanups
= fold (new_cleanups
);
6802 /* Now add in the conditionalized cleanups. */
6804 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
6805 expand_eh_region_start ();
6812 /* Something needs to be initialized, but we didn't know
6813 where that thing was when building the tree. For example,
6814 it could be the return value of a function, or a parameter
6815 to a function which lays down in the stack, or a temporary
6816 variable which must be passed by reference.
6818 We guarantee that the expression will either be constructed
6819 or copied into our original target. */
6821 tree slot
= TREE_OPERAND (exp
, 0);
6822 tree cleanups
= NULL_TREE
;
6826 if (TREE_CODE (slot
) != VAR_DECL
)
6830 target
= original_target
;
6834 if (DECL_RTL (slot
) != 0)
6836 target
= DECL_RTL (slot
);
6837 /* If we have already expanded the slot, so don't do
6839 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6844 target
= assign_temp (type
, 2, 1, 1);
6845 /* All temp slots at this level must not conflict. */
6846 preserve_temp_slots (target
);
6847 DECL_RTL (slot
) = target
;
6849 /* Since SLOT is not known to the called function
6850 to belong to its stack frame, we must build an explicit
6851 cleanup. This case occurs when we must build up a reference
6852 to pass the reference as an argument. In this case,
6853 it is very likely that such a reference need not be
6856 if (TREE_OPERAND (exp
, 2) == 0)
6857 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
6858 cleanups
= TREE_OPERAND (exp
, 2);
6863 /* This case does occur, when expanding a parameter which
6864 needs to be constructed on the stack. The target
6865 is the actual stack address that we want to initialize.
6866 The function we call will perform the cleanup in this case. */
6868 /* If we have already assigned it space, use that space,
6869 not target that we were passed in, as our target
6870 parameter is only a hint. */
6871 if (DECL_RTL (slot
) != 0)
6873 target
= DECL_RTL (slot
);
6874 /* If we have already expanded the slot, so don't do
6876 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6880 DECL_RTL (slot
) = target
;
6883 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
6884 /* Mark it as expanded. */
6885 TREE_OPERAND (exp
, 1) = NULL_TREE
;
6887 store_expr (exp1
, target
, 0);
6891 cleanups_this_call
= tree_cons (NULL_TREE
,
6893 cleanups_this_call
);
6894 expand_eh_region_start ();
6902 tree lhs
= TREE_OPERAND (exp
, 0);
6903 tree rhs
= TREE_OPERAND (exp
, 1);
6904 tree noncopied_parts
= 0;
6905 tree lhs_type
= TREE_TYPE (lhs
);
6907 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6908 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
6909 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
6910 TYPE_NONCOPIED_PARTS (lhs_type
));
6911 while (noncopied_parts
!= 0)
6913 expand_assignment (TREE_VALUE (noncopied_parts
),
6914 TREE_PURPOSE (noncopied_parts
), 0, 0);
6915 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6922 /* If lhs is complex, expand calls in rhs before computing it.
6923 That's so we don't compute a pointer and save it over a call.
6924 If lhs is simple, compute it first so we can give it as a
6925 target if the rhs is just a call. This avoids an extra temp and copy
6926 and that prevents a partial-subsumption which makes bad code.
6927 Actually we could treat component_ref's of vars like vars. */
6929 tree lhs
= TREE_OPERAND (exp
, 0);
6930 tree rhs
= TREE_OPERAND (exp
, 1);
6931 tree noncopied_parts
= 0;
6932 tree lhs_type
= TREE_TYPE (lhs
);
6936 if (TREE_CODE (lhs
) != VAR_DECL
6937 && TREE_CODE (lhs
) != RESULT_DECL
6938 && TREE_CODE (lhs
) != PARM_DECL
)
6939 preexpand_calls (exp
);
6941 /* Check for |= or &= of a bitfield of size one into another bitfield
6942 of size 1. In this case, (unless we need the result of the
6943 assignment) we can do this more efficiently with a
6944 test followed by an assignment, if necessary.
6946 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6947 things change so we do, this code should be enhanced to
6950 && TREE_CODE (lhs
) == COMPONENT_REF
6951 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
6952 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
6953 && TREE_OPERAND (rhs
, 0) == lhs
6954 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
6955 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
6956 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
6958 rtx label
= gen_label_rtx ();
6960 do_jump (TREE_OPERAND (rhs
, 1),
6961 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
6962 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
6963 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
6964 (TREE_CODE (rhs
) == BIT_IOR_EXPR
6966 : integer_zero_node
)),
6968 do_pending_stack_adjust ();
6973 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
6974 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
6975 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
6976 TYPE_NONCOPIED_PARTS (lhs_type
));
6978 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6979 while (noncopied_parts
!= 0)
6981 expand_assignment (TREE_PURPOSE (noncopied_parts
),
6982 TREE_VALUE (noncopied_parts
), 0, 0);
6983 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6988 case PREINCREMENT_EXPR
:
6989 case PREDECREMENT_EXPR
:
6990 return expand_increment (exp
, 0, ignore
);
6992 case POSTINCREMENT_EXPR
:
6993 case POSTDECREMENT_EXPR
:
6994 /* Faster to treat as pre-increment if result is not used. */
6995 return expand_increment (exp
, ! ignore
, ignore
);
6998 /* If nonzero, TEMP will be set to the address of something that might
6999 be a MEM corresponding to a stack slot. */
7002 /* Are we taking the address of a nested function? */
7003 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
7004 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
7005 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0)))
7007 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
7008 op0
= force_operand (op0
, target
);
7010 /* If we are taking the address of something erroneous, just
7012 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
7016 /* We make sure to pass const0_rtx down if we came in with
7017 ignore set, to avoid doing the cleanups twice for something. */
7018 op0
= expand_expr (TREE_OPERAND (exp
, 0),
7019 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
7020 (modifier
== EXPAND_INITIALIZER
7021 ? modifier
: EXPAND_CONST_ADDRESS
));
7023 /* If we are going to ignore the result, OP0 will have been set
7024 to const0_rtx, so just return it. Don't get confused and
7025 think we are taking the address of the constant. */
7029 op0
= protect_from_queue (op0
, 0);
7031 /* We would like the object in memory. If it is a constant,
7032 we can have it be statically allocated into memory. For
7033 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7034 memory and store the value into it. */
7036 if (CONSTANT_P (op0
))
7037 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7039 else if (GET_CODE (op0
) == MEM
)
7041 mark_temp_addr_taken (op0
);
7042 temp
= XEXP (op0
, 0);
7045 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7046 || GET_CODE (op0
) == CONCAT
)
7048 /* If this object is in a register, it must be not
7050 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7051 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
7053 mark_temp_addr_taken (memloc
);
7054 emit_move_insn (memloc
, op0
);
7058 if (GET_CODE (op0
) != MEM
)
7061 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7063 temp
= XEXP (op0
, 0);
7064 #ifdef POINTERS_EXTEND_UNSIGNED
7065 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
7066 && mode
== ptr_mode
)
7067 temp
= convert_memory_address (ptr_mode
, temp
);
7072 op0
= force_operand (XEXP (op0
, 0), target
);
7075 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
7076 op0
= force_reg (Pmode
, op0
);
7078 if (GET_CODE (op0
) == REG
7079 && ! REG_USERVAR_P (op0
))
7080 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
7082 /* If we might have had a temp slot, add an equivalent address
7085 update_temp_slot_address (temp
, op0
);
7087 #ifdef POINTERS_EXTEND_UNSIGNED
7088 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7089 && mode
== ptr_mode
)
7090 op0
= convert_memory_address (ptr_mode
, op0
);
7095 case ENTRY_VALUE_EXPR
:
7098 /* COMPLEX type for Extended Pascal & Fortran */
7101 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7104 /* Get the rtx code of the operands. */
7105 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7106 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
7109 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7113 /* Move the real (op0) and imaginary (op1) parts to their location. */
7114 emit_move_insn (gen_realpart (mode
, target
), op0
);
7115 emit_move_insn (gen_imagpart (mode
, target
), op1
);
7117 insns
= get_insns ();
7120 /* Complex construction should appear as a single unit. */
7121 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7122 each with a separate pseudo as destination.
7123 It's not correct for flow to treat them as a unit. */
7124 if (GET_CODE (target
) != CONCAT
)
7125 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
7133 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7134 return gen_realpart (mode
, op0
);
7137 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7138 return gen_imagpart (mode
, op0
);
7142 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7146 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7149 target
= gen_reg_rtx (mode
);
7153 /* Store the realpart and the negated imagpart to target. */
7154 emit_move_insn (gen_realpart (partmode
, target
),
7155 gen_realpart (partmode
, op0
));
7157 imag_t
= gen_imagpart (partmode
, target
);
7158 temp
= expand_unop (partmode
, neg_optab
,
7159 gen_imagpart (partmode
, op0
), imag_t
, 0);
7161 emit_move_insn (imag_t
, temp
);
7163 insns
= get_insns ();
7166 /* Conjugate should appear as a single unit
7167 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7168 each with a separate pseudo as destination.
7169 It's not correct for flow to treat them as a unit. */
7170 if (GET_CODE (target
) != CONCAT
)
7171 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
7179 op0
= CONST0_RTX (tmode
);
7185 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7188 /* Here to do an ordinary binary operator, generating an instruction
7189 from the optab already placed in `this_optab'. */
7191 preexpand_calls (exp
);
7192 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
7194 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7195 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7197 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
7198 unsignedp
, OPTAB_LIB_WIDEN
);
7205 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7208 bc_expand_expr (exp
)
7211 enum tree_code code
;
7214 struct binary_operator
*binoptab
;
7215 struct unary_operator
*unoptab
;
7216 struct increment_operator
*incroptab
;
7217 struct bc_label
*lab
, *lab1
;
7218 enum bytecode_opcode opcode
;
7221 code
= TREE_CODE (exp
);
7227 if (DECL_RTL (exp
) == 0)
7229 error_with_decl (exp
, "prior parameter's size depends on `%s'");
7233 bc_load_parmaddr (DECL_RTL (exp
));
7234 bc_load_memory (TREE_TYPE (exp
), exp
);
7240 if (DECL_RTL (exp
) == 0)
7244 if (BYTECODE_LABEL (DECL_RTL (exp
)))
7245 bc_load_externaddr (DECL_RTL (exp
));
7247 bc_load_localaddr (DECL_RTL (exp
));
7249 if (TREE_PUBLIC (exp
))
7250 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
7251 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
7253 bc_load_localaddr (DECL_RTL (exp
));
7255 bc_load_memory (TREE_TYPE (exp
), exp
);
7260 #ifdef DEBUG_PRINT_CODE
7261 fprintf (stderr
, " [%x]\n", TREE_INT_CST_LOW (exp
));
7263 bc_emit_instruction (mode_to_const_map
[(int) (DECL_BIT_FIELD (exp
)
7265 : TYPE_MODE (TREE_TYPE (exp
)))],
7266 (HOST_WIDE_INT
) TREE_INT_CST_LOW (exp
));
7272 #ifdef DEBUG_PRINT_CODE
7273 fprintf (stderr
, " [%g]\n", (double) TREE_INT_CST_LOW (exp
));
7275 /* FIX THIS: find a better way to pass real_cst's. -bson */
7276 bc_emit_instruction (mode_to_const_map
[TYPE_MODE (TREE_TYPE (exp
))],
7277 (double) TREE_REAL_CST (exp
));
7286 /* We build a call description vector describing the type of
7287 the return value and of the arguments; this call vector,
7288 together with a pointer to a location for the return value
7289 and the base of the argument list, is passed to the low
7290 level machine dependent call subroutine, which is responsible
7291 for putting the arguments wherever real functions expect
7292 them, as well as getting the return value back. */
7294 tree calldesc
= 0, arg
;
7298 /* Push the evaluated args on the evaluation stack in reverse
7299 order. Also make an entry for each arg in the calldesc
7300 vector while we're at it. */
7302 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7304 for (arg
= TREE_OPERAND (exp
, 1); arg
; arg
= TREE_CHAIN (arg
))
7307 bc_expand_expr (TREE_VALUE (arg
));
7309 calldesc
= tree_cons ((tree
) 0,
7310 size_in_bytes (TREE_TYPE (TREE_VALUE (arg
))),
7312 calldesc
= tree_cons ((tree
) 0,
7313 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg
))),
7317 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7319 /* Allocate a location for the return value and push its
7320 address on the evaluation stack. Also make an entry
7321 at the front of the calldesc for the return value type. */
7323 type
= TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7324 retval
= bc_allocate_local (int_size_in_bytes (type
), TYPE_ALIGN (type
));
7325 bc_load_localaddr (retval
);
7327 calldesc
= tree_cons ((tree
) 0, size_in_bytes (type
), calldesc
);
7328 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (type
), calldesc
);
7330 /* Prepend the argument count. */
7331 calldesc
= tree_cons ((tree
) 0,
7332 build_int_2 (nargs
, 0),
7335 /* Push the address of the call description vector on the stack. */
7336 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
7337 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
7338 build_index_type (build_int_2 (nargs
* 2, 0)));
7339 r
= output_constant_def (calldesc
);
7340 bc_load_externaddr (r
);
7342 /* Push the address of the function to be called. */
7343 bc_expand_expr (TREE_OPERAND (exp
, 0));
7345 /* Call the function, popping its address and the calldesc vector
7346 address off the evaluation stack in the process. */
7347 bc_emit_instruction (call
);
7349 /* Pop the arguments off the stack. */
7350 bc_adjust_stack (nargs
);
7352 /* Load the return value onto the stack. */
7353 bc_load_localaddr (retval
);
7354 bc_load_memory (type
, TREE_OPERAND (exp
, 0));
7360 if (!SAVE_EXPR_RTL (exp
))
7362 /* First time around: copy to local variable */
7363 SAVE_EXPR_RTL (exp
) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp
)),
7364 TYPE_ALIGN (TREE_TYPE(exp
)));
7365 bc_expand_expr (TREE_OPERAND (exp
, 0));
7366 bc_emit_instruction (duplicate
);
7368 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7369 bc_store_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7373 /* Consecutive reference: use saved copy */
7374 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7375 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7380 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7381 how are they handled instead? */
7384 TREE_USED (exp
) = 1;
7385 bc_expand_expr (STMT_BODY (exp
));
7392 bc_expand_expr (TREE_OPERAND (exp
, 0));
7393 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)), TREE_TYPE (exp
));
7398 expand_assignment (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1), 0, 0);
7403 bc_expand_address (TREE_OPERAND (exp
, 0));
7408 bc_expand_expr (TREE_OPERAND (exp
, 0));
7409 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7414 bc_expand_expr (bc_canonicalize_array_ref (exp
));
7419 bc_expand_component_address (exp
);
7421 /* If we have a bitfield, generate a proper load */
7422 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp
, 1)), TREE_OPERAND (exp
, 1));
7427 bc_expand_expr (TREE_OPERAND (exp
, 0));
7428 bc_emit_instruction (drop
);
7429 bc_expand_expr (TREE_OPERAND (exp
, 1));
7434 bc_expand_expr (TREE_OPERAND (exp
, 0));
7435 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7436 lab
= bc_get_bytecode_label ();
7437 bc_emit_bytecode (xjumpifnot
);
7438 bc_emit_bytecode_labelref (lab
);
7440 #ifdef DEBUG_PRINT_CODE
7441 fputc ('\n', stderr
);
7443 bc_expand_expr (TREE_OPERAND (exp
, 1));
7444 lab1
= bc_get_bytecode_label ();
7445 bc_emit_bytecode (jump
);
7446 bc_emit_bytecode_labelref (lab1
);
7448 #ifdef DEBUG_PRINT_CODE
7449 fputc ('\n', stderr
);
7452 bc_emit_bytecode_labeldef (lab
);
7453 bc_expand_expr (TREE_OPERAND (exp
, 2));
7454 bc_emit_bytecode_labeldef (lab1
);
7457 case TRUTH_ANDIF_EXPR
:
7459 opcode
= xjumpifnot
;
7462 case TRUTH_ORIF_EXPR
:
7469 binoptab
= optab_plus_expr
;
7474 binoptab
= optab_minus_expr
;
7479 binoptab
= optab_mult_expr
;
7482 case TRUNC_DIV_EXPR
:
7483 case FLOOR_DIV_EXPR
:
7485 case ROUND_DIV_EXPR
:
7486 case EXACT_DIV_EXPR
:
7488 binoptab
= optab_trunc_div_expr
;
7491 case TRUNC_MOD_EXPR
:
7492 case FLOOR_MOD_EXPR
:
7494 case ROUND_MOD_EXPR
:
7496 binoptab
= optab_trunc_mod_expr
;
7499 case FIX_ROUND_EXPR
:
7500 case FIX_FLOOR_EXPR
:
7502 abort (); /* Not used for C. */
7504 case FIX_TRUNC_EXPR
:
7511 abort (); /* FIXME */
7515 binoptab
= optab_rdiv_expr
;
7520 binoptab
= optab_bit_and_expr
;
7525 binoptab
= optab_bit_ior_expr
;
7530 binoptab
= optab_bit_xor_expr
;
7535 binoptab
= optab_lshift_expr
;
7540 binoptab
= optab_rshift_expr
;
7543 case TRUTH_AND_EXPR
:
7545 binoptab
= optab_truth_and_expr
;
7550 binoptab
= optab_truth_or_expr
;
7555 binoptab
= optab_lt_expr
;
7560 binoptab
= optab_le_expr
;
7565 binoptab
= optab_ge_expr
;
7570 binoptab
= optab_gt_expr
;
7575 binoptab
= optab_eq_expr
;
7580 binoptab
= optab_ne_expr
;
7585 unoptab
= optab_negate_expr
;
7590 unoptab
= optab_bit_not_expr
;
7593 case TRUTH_NOT_EXPR
:
7595 unoptab
= optab_truth_not_expr
;
7598 case PREDECREMENT_EXPR
:
7600 incroptab
= optab_predecrement_expr
;
7603 case PREINCREMENT_EXPR
:
7605 incroptab
= optab_preincrement_expr
;
7608 case POSTDECREMENT_EXPR
:
7610 incroptab
= optab_postdecrement_expr
;
7613 case POSTINCREMENT_EXPR
:
7615 incroptab
= optab_postincrement_expr
;
7620 bc_expand_constructor (exp
);
7630 tree vars
= TREE_OPERAND (exp
, 0);
7631 int vars_need_expansion
= 0;
7633 /* Need to open a binding contour here because
7634 if there are any cleanups they most be contained here. */
7635 expand_start_bindings (0);
7637 /* Mark the corresponding BLOCK for output. */
7638 if (TREE_OPERAND (exp
, 2) != 0)
7639 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
7641 /* If VARS have not yet been expanded, expand them now. */
7644 if (DECL_RTL (vars
) == 0)
7646 vars_need_expansion
= 1;
7649 expand_decl_init (vars
);
7650 vars
= TREE_CHAIN (vars
);
7653 bc_expand_expr (TREE_OPERAND (exp
, 1));
7655 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7665 bc_expand_binary_operation (binoptab
, TREE_TYPE (exp
),
7666 TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1));
7672 bc_expand_unary_operation (unoptab
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7678 bc_expand_expr (TREE_OPERAND (exp
, 0));
7679 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7680 lab
= bc_get_bytecode_label ();
7682 bc_emit_instruction (duplicate
);
7683 bc_emit_bytecode (opcode
);
7684 bc_emit_bytecode_labelref (lab
);
7686 #ifdef DEBUG_PRINT_CODE
7687 fputc ('\n', stderr
);
7690 bc_emit_instruction (drop
);
7692 bc_expand_expr (TREE_OPERAND (exp
, 1));
7693 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7694 bc_emit_bytecode_labeldef (lab
);
7700 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7702 /* Push the quantum. */
7703 bc_expand_expr (TREE_OPERAND (exp
, 1));
7705 /* Convert it to the lvalue's type. */
7706 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)), type
);
7708 /* Push the address of the lvalue */
7709 bc_expand_expr (build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
), TREE_OPERAND (exp
, 0)));
7711 /* Perform actual increment */
7712 bc_expand_increment (incroptab
, type
);
7716 /* Return the alignment in bits of EXP, a pointer valued expression.
7717 But don't return more than MAX_ALIGN no matter what.
7718 The alignment returned is, by default, the alignment of the thing that
7719 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7721 Otherwise, look at the expression to see if we can do better, i.e., if the
7722 expression is actually pointing at an object whose alignment is tighter. */
7725 get_pointer_alignment (exp
, max_align
)
7729 unsigned align
, inner
;
7731 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7734 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7735 align
= MIN (align
, max_align
);
7739 switch (TREE_CODE (exp
))
7743 case NON_LVALUE_EXPR
:
7744 exp
= TREE_OPERAND (exp
, 0);
7745 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7747 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7748 align
= MIN (inner
, max_align
);
7752 /* If sum of pointer + int, restrict our maximum alignment to that
7753 imposed by the integer. If not, we can't do any better than
7755 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7758 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7763 exp
= TREE_OPERAND (exp
, 0);
7767 /* See what we are pointing at and look at its alignment. */
7768 exp
= TREE_OPERAND (exp
, 0);
7769 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7770 align
= FUNCTION_BOUNDARY
;
7771 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7772 align
= DECL_ALIGN (exp
);
7773 #ifdef CONSTANT_ALIGNMENT
7774 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7775 align
= CONSTANT_ALIGNMENT (exp
, align
);
7777 return MIN (align
, max_align
);
7785 /* Return the tree node and offset if a given argument corresponds to
7786 a string constant. */
7789 string_constant (arg
, ptr_offset
)
7795 if (TREE_CODE (arg
) == ADDR_EXPR
7796 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7798 *ptr_offset
= integer_zero_node
;
7799 return TREE_OPERAND (arg
, 0);
7801 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7803 tree arg0
= TREE_OPERAND (arg
, 0);
7804 tree arg1
= TREE_OPERAND (arg
, 1);
7809 if (TREE_CODE (arg0
) == ADDR_EXPR
7810 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7813 return TREE_OPERAND (arg0
, 0);
7815 else if (TREE_CODE (arg1
) == ADDR_EXPR
7816 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7819 return TREE_OPERAND (arg1
, 0);
7826 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7827 way, because it could contain a zero byte in the middle.
7828 TREE_STRING_LENGTH is the size of the character array, not the string.
7830 Unfortunately, string_constant can't access the values of const char
7831 arrays with initializers, so neither can we do so here. */
7841 src
= string_constant (src
, &offset_node
);
7844 max
= TREE_STRING_LENGTH (src
);
7845 ptr
= TREE_STRING_POINTER (src
);
7846 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7848 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7849 compute the offset to the following null if we don't know where to
7850 start searching for it. */
7852 for (i
= 0; i
< max
; i
++)
7855 /* We don't know the starting offset, but we do know that the string
7856 has no internal zero bytes. We can assume that the offset falls
7857 within the bounds of the string; otherwise, the programmer deserves
7858 what he gets. Subtract the offset from the length of the string,
7860 /* This would perhaps not be valid if we were dealing with named
7861 arrays in addition to literal string constants. */
7862 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7865 /* We have a known offset into the string. Start searching there for
7866 a null character. */
7867 if (offset_node
== 0)
7871 /* Did we get a long long offset? If so, punt. */
7872 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7874 offset
= TREE_INT_CST_LOW (offset_node
);
7876 /* If the offset is known to be out of bounds, warn, and call strlen at
7878 if (offset
< 0 || offset
> max
)
7880 warning ("offset outside bounds of constant string");
7883 /* Use strlen to search for the first zero byte. Since any strings
7884 constructed with build_string will have nulls appended, we win even
7885 if we get handed something like (char[4])"abcd".
7887 Since OFFSET is our starting index into the string, no further
7888 calculation is needed. */
7889 return size_int (strlen (ptr
+ offset
));
7893 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7894 enum built_in_function fndecl_code
;
7900 /* Some machines need special handling before we can access
7901 arbitrary frames. For example, on the sparc, we must first flush
7902 all register windows to the stack. */
7903 #ifdef SETUP_FRAME_ADDRESSES
7904 SETUP_FRAME_ADDRESSES ();
7907 /* On the sparc, the return address is not in the frame, it is in a
7908 register. There is no way to access it off of the current frame
7909 pointer, but it can be accessed off the previous frame pointer by
7910 reading the value from the register window save area. */
7911 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7912 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7916 /* Scan back COUNT frames to the specified frame. */
7917 for (i
= 0; i
< count
; i
++)
7919 /* Assume the dynamic chain pointer is in the word that the
7920 frame address points to, unless otherwise specified. */
7921 #ifdef DYNAMIC_CHAIN_ADDRESS
7922 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7924 tem
= memory_address (Pmode
, tem
);
7925 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
7928 /* For __builtin_frame_address, return what we've got. */
7929 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
7932 /* For __builtin_return_address, Get the return address from that
7934 #ifdef RETURN_ADDR_RTX
7935 tem
= RETURN_ADDR_RTX (count
, tem
);
7937 tem
= memory_address (Pmode
,
7938 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
7939 tem
= gen_rtx (MEM
, Pmode
, tem
);
7944 /* Expand an expression EXP that calls a built-in function,
7945 with result going to TARGET if that's convenient
7946 (and in mode MODE if that's convenient).
7947 SUBTARGET may be used as the target for computing one of EXP's operands.
7948 IGNORE is nonzero if the value is to be ignored. */
7950 #define CALLED_AS_BUILT_IN(NODE) \
7951 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7954 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
7958 enum machine_mode mode
;
7961 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7962 tree arglist
= TREE_OPERAND (exp
, 1);
7965 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
7966 optab builtin_optab
;
7968 switch (DECL_FUNCTION_CODE (fndecl
))
7973 /* build_function_call changes these into ABS_EXPR. */
7978 /* Treat these like sqrt, but only if the user asks for them. */
7979 if (! flag_fast_math
)
7981 case BUILT_IN_FSQRT
:
7982 /* If not optimizing, call the library function. */
7987 /* Arg could be wrong type if user redeclared this fcn wrong. */
7988 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
7991 /* Stabilize and compute the argument. */
7992 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
7993 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
7995 exp
= copy_node (exp
);
7996 arglist
= copy_node (arglist
);
7997 TREE_OPERAND (exp
, 1) = arglist
;
7998 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
8000 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8002 /* Make a suitable register to place result in. */
8003 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8008 switch (DECL_FUNCTION_CODE (fndecl
))
8011 builtin_optab
= sin_optab
; break;
8013 builtin_optab
= cos_optab
; break;
8014 case BUILT_IN_FSQRT
:
8015 builtin_optab
= sqrt_optab
; break;
8020 /* Compute into TARGET.
8021 Set TARGET to wherever the result comes back. */
8022 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8023 builtin_optab
, op0
, target
, 0);
8025 /* If we were unable to expand via the builtin, stop the
8026 sequence (without outputting the insns) and break, causing
8027 a call the the library function. */
8034 /* Check the results by default. But if flag_fast_math is turned on,
8035 then assume sqrt will always be called with valid arguments. */
8037 if (! flag_fast_math
)
8039 /* Don't define the builtin FP instructions
8040 if your machine is not IEEE. */
8041 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
8044 lab1
= gen_label_rtx ();
8046 /* Test the result; if it is NaN, set errno=EDOM because
8047 the argument was not in the domain. */
8048 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
8049 emit_jump_insn (gen_beq (lab1
));
8053 #ifdef GEN_ERRNO_RTX
8054 rtx errno_rtx
= GEN_ERRNO_RTX
;
8057 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "errno"));
8060 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
8063 /* We can't set errno=EDOM directly; let the library call do it.
8064 Pop the arguments right away in case the call gets deleted. */
8066 expand_call (exp
, target
, 0);
8073 /* Output the entire sequence. */
8074 insns
= get_insns ();
8080 /* __builtin_apply_args returns block of memory allocated on
8081 the stack into which is stored the arg pointer, structure
8082 value address, static chain, and all the registers that might
8083 possibly be used in performing a function call. The code is
8084 moved to the start of the function so the incoming values are
8086 case BUILT_IN_APPLY_ARGS
:
8087 /* Don't do __builtin_apply_args more than once in a function.
8088 Save the result of the first call and reuse it. */
8089 if (apply_args_value
!= 0)
8090 return apply_args_value
;
8092 /* When this function is called, it means that registers must be
8093 saved on entry to this function. So we migrate the
8094 call to the first insn of this function. */
8099 temp
= expand_builtin_apply_args ();
8103 apply_args_value
= temp
;
8105 /* Put the sequence after the NOTE that starts the function.
8106 If this is inside a SEQUENCE, make the outer-level insn
8107 chain current, so the code is placed at the start of the
8109 push_topmost_sequence ();
8110 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8111 pop_topmost_sequence ();
8115 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8116 FUNCTION with a copy of the parameters described by
8117 ARGUMENTS, and ARGSIZE. It returns a block of memory
8118 allocated on the stack into which is stored all the registers
8119 that might possibly be used for returning the result of a
8120 function. ARGUMENTS is the value returned by
8121 __builtin_apply_args. ARGSIZE is the number of bytes of
8122 arguments that must be copied. ??? How should this value be
8123 computed? We'll also need a safe worst case value for varargs
8125 case BUILT_IN_APPLY
:
8127 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8128 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8129 || TREE_CHAIN (arglist
) == 0
8130 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8131 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8132 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8140 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8141 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8143 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8146 /* __builtin_return (RESULT) causes the function to return the
8147 value described by RESULT. RESULT is address of the block of
8148 memory returned by __builtin_apply. */
8149 case BUILT_IN_RETURN
:
8151 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8152 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8153 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8154 NULL_RTX
, VOIDmode
, 0));
8157 case BUILT_IN_SAVEREGS
:
8158 /* Don't do __builtin_saveregs more than once in a function.
8159 Save the result of the first call and reuse it. */
8160 if (saveregs_value
!= 0)
8161 return saveregs_value
;
8163 /* When this function is called, it means that registers must be
8164 saved on entry to this function. So we migrate the
8165 call to the first insn of this function. */
8169 /* Now really call the function. `expand_call' does not call
8170 expand_builtin, so there is no danger of infinite recursion here. */
8173 #ifdef EXPAND_BUILTIN_SAVEREGS
8174 /* Do whatever the machine needs done in this case. */
8175 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8177 /* The register where the function returns its value
8178 is likely to have something else in it, such as an argument.
8179 So preserve that register around the call. */
8181 if (value_mode
!= VOIDmode
)
8183 rtx valreg
= hard_libcall_value (value_mode
);
8184 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8186 emit_move_insn (saved_valreg
, valreg
);
8187 temp
= expand_call (exp
, target
, ignore
);
8188 emit_move_insn (valreg
, saved_valreg
);
8191 /* Generate the call, putting the value in a pseudo. */
8192 temp
= expand_call (exp
, target
, ignore
);
8198 saveregs_value
= temp
;
8200 /* Put the sequence after the NOTE that starts the function.
8201 If this is inside a SEQUENCE, make the outer-level insn
8202 chain current, so the code is placed at the start of the
8204 push_topmost_sequence ();
8205 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8206 pop_topmost_sequence ();
8210 /* __builtin_args_info (N) returns word N of the arg space info
8211 for the current function. The number and meanings of words
8212 is controlled by the definition of CUMULATIVE_ARGS. */
8213 case BUILT_IN_ARGS_INFO
:
8215 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8217 int *word_ptr
= (int *) ¤t_function_args_info
;
8218 tree type
, elts
, result
;
8220 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8221 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8222 __FILE__
, __LINE__
);
8226 tree arg
= TREE_VALUE (arglist
);
8227 if (TREE_CODE (arg
) != INTEGER_CST
)
8228 error ("argument of `__builtin_args_info' must be constant");
8231 int wordnum
= TREE_INT_CST_LOW (arg
);
8233 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8234 error ("argument of `__builtin_args_info' out of range");
8236 return GEN_INT (word_ptr
[wordnum
]);
8240 error ("missing argument in `__builtin_args_info'");
8245 for (i
= 0; i
< nwords
; i
++)
8246 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8248 type
= build_array_type (integer_type_node
,
8249 build_index_type (build_int_2 (nwords
, 0)));
8250 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8251 TREE_CONSTANT (result
) = 1;
8252 TREE_STATIC (result
) = 1;
8253 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8254 TREE_CONSTANT (result
) = 1;
8255 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
8259 /* Return the address of the first anonymous stack arg. */
8260 case BUILT_IN_NEXT_ARG
:
8262 tree fntype
= TREE_TYPE (current_function_decl
);
8264 if ((TYPE_ARG_TYPES (fntype
) == 0
8265 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8267 && ! current_function_varargs
)
8269 error ("`va_start' used in function with fixed args");
8275 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8276 tree arg
= TREE_VALUE (arglist
);
8278 /* Strip off all nops for the sake of the comparison. This
8279 is not quite the same as STRIP_NOPS. It does more.
8280 We must also strip off INDIRECT_EXPR for C++ reference
8282 while (TREE_CODE (arg
) == NOP_EXPR
8283 || TREE_CODE (arg
) == CONVERT_EXPR
8284 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8285 || TREE_CODE (arg
) == INDIRECT_REF
)
8286 arg
= TREE_OPERAND (arg
, 0);
8287 if (arg
!= last_parm
)
8288 warning ("second parameter of `va_start' not last named argument");
8290 else if (! current_function_varargs
)
8291 /* Evidently an out of date version of <stdarg.h>; can't validate
8292 va_start's second argument, but can still work as intended. */
8293 warning ("`__builtin_next_arg' called without an argument");
8296 return expand_binop (Pmode
, add_optab
,
8297 current_function_internal_arg_pointer
,
8298 current_function_arg_offset_rtx
,
8299 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8301 case BUILT_IN_CLASSIFY_TYPE
:
8304 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8305 enum tree_code code
= TREE_CODE (type
);
8306 if (code
== VOID_TYPE
)
8307 return GEN_INT (void_type_class
);
8308 if (code
== INTEGER_TYPE
)
8309 return GEN_INT (integer_type_class
);
8310 if (code
== CHAR_TYPE
)
8311 return GEN_INT (char_type_class
);
8312 if (code
== ENUMERAL_TYPE
)
8313 return GEN_INT (enumeral_type_class
);
8314 if (code
== BOOLEAN_TYPE
)
8315 return GEN_INT (boolean_type_class
);
8316 if (code
== POINTER_TYPE
)
8317 return GEN_INT (pointer_type_class
);
8318 if (code
== REFERENCE_TYPE
)
8319 return GEN_INT (reference_type_class
);
8320 if (code
== OFFSET_TYPE
)
8321 return GEN_INT (offset_type_class
);
8322 if (code
== REAL_TYPE
)
8323 return GEN_INT (real_type_class
);
8324 if (code
== COMPLEX_TYPE
)
8325 return GEN_INT (complex_type_class
);
8326 if (code
== FUNCTION_TYPE
)
8327 return GEN_INT (function_type_class
);
8328 if (code
== METHOD_TYPE
)
8329 return GEN_INT (method_type_class
);
8330 if (code
== RECORD_TYPE
)
8331 return GEN_INT (record_type_class
);
8332 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8333 return GEN_INT (union_type_class
);
8334 if (code
== ARRAY_TYPE
)
8336 if (TYPE_STRING_FLAG (type
))
8337 return GEN_INT (string_type_class
);
8339 return GEN_INT (array_type_class
);
8341 if (code
== SET_TYPE
)
8342 return GEN_INT (set_type_class
);
8343 if (code
== FILE_TYPE
)
8344 return GEN_INT (file_type_class
);
8345 if (code
== LANG_TYPE
)
8346 return GEN_INT (lang_type_class
);
8348 return GEN_INT (no_type_class
);
8350 case BUILT_IN_CONSTANT_P
:
8355 tree arg
= TREE_VALUE (arglist
);
8358 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
8359 || (TREE_CODE (arg
) == ADDR_EXPR
8360 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8361 ? const1_rtx
: const0_rtx
);
8364 case BUILT_IN_FRAME_ADDRESS
:
8365 /* The argument must be a nonnegative integer constant.
8366 It counts the number of frames to scan up the stack.
8367 The value is the address of that frame. */
8368 case BUILT_IN_RETURN_ADDRESS
:
8369 /* The argument must be a nonnegative integer constant.
8370 It counts the number of frames to scan up the stack.
8371 The value is the return address saved in that frame. */
8373 /* Warning about missing arg was already issued. */
8375 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
)
8377 error ("invalid arg to `__builtin_return_address'");
8380 else if (tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8382 error ("invalid arg to `__builtin_return_address'");
8387 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8388 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8389 hard_frame_pointer_rtx
);
8391 /* For __builtin_frame_address, return what we've got. */
8392 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8395 if (GET_CODE (tem
) != REG
)
8396 tem
= copy_to_reg (tem
);
8400 case BUILT_IN_ALLOCA
:
8402 /* Arg could be non-integer if user redeclared this fcn wrong. */
8403 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8406 /* Compute the argument. */
8407 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8409 /* Allocate the desired space. */
8410 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8413 /* If not optimizing, call the library function. */
8414 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8418 /* Arg could be non-integer if user redeclared this fcn wrong. */
8419 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8422 /* Compute the argument. */
8423 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8424 /* Compute ffs, into TARGET if possible.
8425 Set TARGET to wherever the result comes back. */
8426 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8427 ffs_optab
, op0
, target
, 1);
8432 case BUILT_IN_STRLEN
:
8433 /* If not optimizing, call the library function. */
8434 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8438 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8439 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8443 tree src
= TREE_VALUE (arglist
);
8444 tree len
= c_strlen (src
);
8447 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8449 rtx result
, src_rtx
, char_rtx
;
8450 enum machine_mode insn_mode
= value_mode
, char_mode
;
8451 enum insn_code icode
;
8453 /* If the length is known, just return it. */
8455 return expand_expr (len
, target
, mode
, 0);
8457 /* If SRC is not a pointer type, don't do this operation inline. */
8461 /* Call a function if we can't compute strlen in the right mode. */
8463 while (insn_mode
!= VOIDmode
)
8465 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8466 if (icode
!= CODE_FOR_nothing
)
8469 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8471 if (insn_mode
== VOIDmode
)
8474 /* Make a place to write the result of the instruction. */
8477 && GET_CODE (result
) == REG
8478 && GET_MODE (result
) == insn_mode
8479 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8480 result
= gen_reg_rtx (insn_mode
);
8482 /* Make sure the operands are acceptable to the predicates. */
8484 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8485 result
= gen_reg_rtx (insn_mode
);
8487 src_rtx
= memory_address (BLKmode
,
8488 expand_expr (src
, NULL_RTX
, ptr_mode
,
8490 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8491 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8493 char_rtx
= const0_rtx
;
8494 char_mode
= insn_operand_mode
[(int)icode
][2];
8495 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8496 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8498 emit_insn (GEN_FCN (icode
) (result
,
8499 gen_rtx (MEM
, BLKmode
, src_rtx
),
8500 char_rtx
, GEN_INT (align
)));
8502 /* Return the value in the proper mode for this function. */
8503 if (GET_MODE (result
) == value_mode
)
8505 else if (target
!= 0)
8507 convert_move (target
, result
, 0);
8511 return convert_to_mode (value_mode
, result
, 0);
8514 case BUILT_IN_STRCPY
:
8515 /* If not optimizing, call the library function. */
8516 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8520 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8521 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8522 || TREE_CHAIN (arglist
) == 0
8523 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8527 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8532 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8534 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8538 case BUILT_IN_MEMCPY
:
8539 /* If not optimizing, call the library function. */
8540 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8544 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8545 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8546 || TREE_CHAIN (arglist
) == 0
8547 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8548 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8549 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8553 tree dest
= TREE_VALUE (arglist
);
8554 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8555 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8559 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8561 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8562 rtx dest_rtx
, dest_mem
, src_mem
;
8564 /* If either SRC or DEST is not a pointer type, don't do
8565 this operation in-line. */
8566 if (src_align
== 0 || dest_align
== 0)
8568 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8569 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8573 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8574 dest_mem
= gen_rtx (MEM
, BLKmode
,
8575 memory_address (BLKmode
, dest_rtx
));
8576 /* There could be a void* cast on top of the object. */
8577 while (TREE_CODE (dest
) == NOP_EXPR
)
8578 dest
= TREE_OPERAND (dest
, 0);
8579 type
= TREE_TYPE (TREE_TYPE (dest
));
8580 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8581 src_mem
= gen_rtx (MEM
, BLKmode
,
8582 memory_address (BLKmode
,
8583 expand_expr (src
, NULL_RTX
,
8586 /* There could be a void* cast on top of the object. */
8587 while (TREE_CODE (src
) == NOP_EXPR
)
8588 src
= TREE_OPERAND (src
, 0);
8589 type
= TREE_TYPE (TREE_TYPE (src
));
8590 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
8592 /* Copy word part most expediently. */
8593 emit_block_move (dest_mem
, src_mem
,
8594 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8595 MIN (src_align
, dest_align
));
8596 return force_operand (dest_rtx
, NULL_RTX
);
8599 case BUILT_IN_MEMSET
:
8600 /* If not optimizing, call the library function. */
8601 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8605 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8606 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8607 || TREE_CHAIN (arglist
) == 0
8608 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8610 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8612 != (TREE_CODE (TREE_TYPE
8614 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
8618 tree dest
= TREE_VALUE (arglist
);
8619 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
8620 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8624 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8625 rtx dest_rtx
, dest_mem
;
8627 /* If DEST is not a pointer type, don't do this
8628 operation in-line. */
8629 if (dest_align
== 0)
8632 /* If VAL is not 0, don't do this operation in-line. */
8633 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
8636 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8637 dest_mem
= gen_rtx (MEM
, BLKmode
,
8638 memory_address (BLKmode
, dest_rtx
));
8639 /* There could be a void* cast on top of the object. */
8640 while (TREE_CODE (dest
) == NOP_EXPR
)
8641 dest
= TREE_OPERAND (dest
, 0);
8642 type
= TREE_TYPE (TREE_TYPE (dest
));
8643 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8645 clear_storage (dest_mem
, expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8648 return force_operand (dest_rtx
, NULL_RTX
);
8651 /* These comparison functions need an instruction that returns an actual
8652 index. An ordinary compare that just sets the condition codes
8654 #ifdef HAVE_cmpstrsi
8655 case BUILT_IN_STRCMP
:
8656 /* If not optimizing, call the library function. */
8657 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8661 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8662 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8663 || TREE_CHAIN (arglist
) == 0
8664 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8666 else if (!HAVE_cmpstrsi
)
8669 tree arg1
= TREE_VALUE (arglist
);
8670 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8674 len
= c_strlen (arg1
);
8676 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
8677 len2
= c_strlen (arg2
);
8679 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
8681 /* If we don't have a constant length for the first, use the length
8682 of the second, if we know it. We don't require a constant for
8683 this case; some cost analysis could be done if both are available
8684 but neither is constant. For now, assume they're equally cheap.
8686 If both strings have constant lengths, use the smaller. This
8687 could arise if optimization results in strcpy being called with
8688 two fixed strings, or if the code was machine-generated. We should
8689 add some code to the `memcmp' handler below to deal with such
8690 situations, someday. */
8691 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
8698 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
8700 if (tree_int_cst_lt (len2
, len
))
8704 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8708 case BUILT_IN_MEMCMP
:
8709 /* If not optimizing, call the library function. */
8710 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8714 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8715 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8716 || TREE_CHAIN (arglist
) == 0
8717 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8718 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8719 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8721 else if (!HAVE_cmpstrsi
)
8724 tree arg1
= TREE_VALUE (arglist
);
8725 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8726 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8730 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8732 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8733 enum machine_mode insn_mode
8734 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
8736 /* If we don't have POINTER_TYPE, call the function. */
8737 if (arg1_align
== 0 || arg2_align
== 0)
8739 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
8740 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8744 /* Make a place to write the result of the instruction. */
8747 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
8748 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8749 result
= gen_reg_rtx (insn_mode
);
8751 emit_insn (gen_cmpstrsi (result
,
8752 gen_rtx (MEM
, BLKmode
,
8753 expand_expr (arg1
, NULL_RTX
,
8756 gen_rtx (MEM
, BLKmode
,
8757 expand_expr (arg2
, NULL_RTX
,
8760 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8761 GEN_INT (MIN (arg1_align
, arg2_align
))));
8763 /* Return the value in the proper mode for this function. */
8764 mode
= TYPE_MODE (TREE_TYPE (exp
));
8765 if (GET_MODE (result
) == mode
)
8767 else if (target
!= 0)
8769 convert_move (target
, result
, 0);
8773 return convert_to_mode (mode
, result
, 0);
8776 case BUILT_IN_STRCMP
:
8777 case BUILT_IN_MEMCMP
:
8781 /* __builtin_setjmp is passed a pointer to an array of five words
8782 (not all will be used on all machines). It operates similarly to
8783 the C library function of the same name, but is more efficient.
8784 Much of the code below (and for longjmp) is copied from the handling
8787 NOTE: This is intended for use by GNAT and will only work in
8788 the method used by it. This code will likely NOT survive to
8789 the GCC 2.8.0 release. */
8790 case BUILT_IN_SETJMP
:
8792 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8796 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
8798 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
8799 enum machine_mode sa_mode
= Pmode
;
8801 int old_inhibit_defer_pop
= inhibit_defer_pop
;
8802 int return_pops
= RETURN_POPS_ARGS (get_identifier ("__dummy"),
8803 get_identifier ("__dummy"), 0);
8805 CUMULATIVE_ARGS args_so_far
;
8808 #ifdef POINTERS_EXTEND_UNSIGNED
8809 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
8812 buf_addr
= force_reg (Pmode
, buf_addr
);
8814 if (target
== 0 || GET_CODE (target
) != REG
8815 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
8816 target
= gen_reg_rtx (value_mode
);
8820 CONST_CALL_P (emit_note (NULL_PTR
, NOTE_INSN_SETJMP
)) = 1;
8821 current_function_calls_setjmp
= 1;
8823 /* We store the frame pointer and the address of lab1 in the buffer
8824 and use the rest of it for the stack save area, which is
8825 machine-dependent. */
8826 emit_move_insn (gen_rtx (MEM
, Pmode
, buf_addr
),
8827 virtual_stack_vars_rtx
);
8829 (validize_mem (gen_rtx (MEM
, Pmode
,
8830 plus_constant (buf_addr
,
8831 GET_MODE_SIZE (Pmode
)))),
8832 gen_rtx (LABEL_REF
, Pmode
, lab1
));
8834 #ifdef HAVE_save_stack_nonlocal
8835 if (HAVE_save_stack_nonlocal
)
8836 sa_mode
= insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0];
8839 stack_save
= gen_rtx (MEM
, sa_mode
,
8840 plus_constant (buf_addr
,
8841 2 * GET_MODE_SIZE (Pmode
)));
8842 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
8846 emit_insn (gen_setjmp ());
8849 /* Set TARGET to zero and branch around the other case. */
8850 emit_move_insn (target
, const0_rtx
);
8851 emit_jump_insn (gen_jump (lab2
));
8855 /* Note that setjmp clobbers FP when we get here, so we have to
8856 make sure it's marked as used by this function. */
8857 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
8859 /* Mark the static chain as clobbered here so life information
8860 doesn't get messed up for it. */
8861 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, static_chain_rtx
));
8863 /* Now put in the code to restore the frame pointer, and argument
8864 pointer, if needed. The code below is from expand_end_bindings
8865 in stmt.c; see detailed documentation there. */
8866 #ifdef HAVE_nonlocal_goto
8867 if (! HAVE_nonlocal_goto
)
8869 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
8871 current_function_has_nonlocal_goto
= 1;
8873 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8874 if (fixed_regs
[ARG_POINTER_REGNUM
])
8876 #ifdef ELIMINABLE_REGS
8877 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
8879 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
8880 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
8881 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
8884 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
8887 /* Now restore our arg pointer from the address at which it
8888 was saved in our stack frame.
8889 If there hasn't be space allocated for it yet, make
8891 if (arg_pointer_save_area
== 0)
8892 arg_pointer_save_area
8893 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8894 emit_move_insn (virtual_incoming_args_rtx
,
8895 copy_to_reg (arg_pointer_save_area
));
8900 #ifdef HAVE_nonlocal_goto_receiver
8901 if (HAVE_nonlocal_goto_receiver
)
8902 emit_insn (gen_nonlocal_goto_receiver ());
8904 /* The static chain pointer contains the address of dummy function.
8905 We need to call it here to handle some PIC cases of restoring
8906 a global pointer. Then return 1. */
8907 op0
= copy_to_mode_reg (Pmode
, static_chain_rtx
);
8909 /* We can't actually call emit_library_call here, so do everything
8910 it does, which isn't much for a libfunc with no args. */
8911 op0
= memory_address (FUNCTION_MODE
, op0
);
8913 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
,
8914 gen_rtx (SYMBOL_REF
, Pmode
, "__dummy"), 1);
8915 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1);
8917 #ifndef ACCUMULATE_OUTGOING_ARGS
8918 #ifdef HAVE_call_pop
8920 emit_call_insn (gen_call_pop (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8921 const0_rtx
, next_arg_reg
,
8922 GEN_INT (return_pops
)));
8929 emit_call_insn (gen_call (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8930 const0_rtx
, next_arg_reg
, const0_rtx
));
8935 emit_move_insn (target
, const1_rtx
);
8940 /* __builtin_longjmp is passed a pointer to an array of five words
8941 and a value, which is a dummy. It's similar to the C library longjmp
8942 function but works with __builtin_setjmp above. */
8943 case BUILT_IN_LONGJMP
:
8944 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
8945 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8949 tree dummy_id
= get_identifier ("__dummy");
8950 tree dummy_type
= build_function_type (void_type_node
, NULL_TREE
);
8951 tree dummy_decl
= build_decl (FUNCTION_DECL
, dummy_id
, dummy_type
);
8952 #ifdef POINTERS_EXTEND_UNSIGNED
8955 convert_memory_address
8957 expand_expr (TREE_VALUE (arglist
),
8958 NULL_RTX
, VOIDmode
, 0)));
8961 = force_reg (Pmode
, expand_expr (TREE_VALUE (arglist
),
8965 rtx fp
= gen_rtx (MEM
, Pmode
, buf_addr
);
8966 rtx lab
= gen_rtx (MEM
, Pmode
,
8967 plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
)));
8968 enum machine_mode sa_mode
8969 #ifdef HAVE_save_stack_nonlocal
8970 = (HAVE_save_stack_nonlocal
8971 ? insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0]
8976 rtx stack
= gen_rtx (MEM
, sa_mode
,
8977 plus_constant (buf_addr
,
8978 2 * GET_MODE_SIZE (Pmode
)));
8980 DECL_EXTERNAL (dummy_decl
) = 1;
8981 TREE_PUBLIC (dummy_decl
) = 1;
8982 make_decl_rtl (dummy_decl
, NULL_PTR
, 1);
8984 /* Expand the second expression just for side-effects. */
8985 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
8986 const0_rtx
, VOIDmode
, 0);
8988 assemble_external (dummy_decl
);
8990 /* Pick up FP, label, and SP from the block and jump. This code is
8991 from expand_goto in stmt.c; see there for detailed comments. */
8992 #if HAVE_nonlocal_goto
8993 if (HAVE_nonlocal_goto
)
8994 emit_insn (gen_nonlocal_goto (fp
, lab
, stack
,
8995 XEXP (DECL_RTL (dummy_decl
), 0)));
8999 lab
= copy_to_reg (lab
);
9000 emit_move_insn (hard_frame_pointer_rtx
, fp
);
9001 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
9003 /* Put in the static chain register the address of the dummy
9005 emit_move_insn (static_chain_rtx
, XEXP (DECL_RTL (dummy_decl
), 0));
9006 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
9007 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
9008 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
9009 emit_indirect_jump (lab
);
9015 default: /* just do library call, if unknown builtin */
9016 error ("built-in function `%s' not currently supported",
9017 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9020 /* The switch statement above can drop through to cause the function
9021 to be called normally. */
9023 return expand_call (exp
, target
, ignore
);
9026 /* Built-in functions to perform an untyped call and return. */
9028 /* For each register that may be used for calling a function, this
9029 gives a mode used to copy the register's value. VOIDmode indicates
9030 the register is not used for calling a function. If the machine
9031 has register windows, this gives only the outbound registers.
9032 INCOMING_REGNO gives the corresponding inbound register. */
9033 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
9035 /* For each register that may be used for returning values, this gives
9036 a mode used to copy the register's value. VOIDmode indicates the
9037 register is not used for returning values. If the machine has
9038 register windows, this gives only the outbound registers.
9039 INCOMING_REGNO gives the corresponding inbound register. */
9040 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
9042 /* For each register that may be used for calling a function, this
9043 gives the offset of that register into the block returned by
9044 __builtin_apply_args. 0 indicates that the register is not
9045 used for calling a function. */
9046 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
9048 /* Return the offset of register REGNO into the block returned by
9049 __builtin_apply_args. This is not declared static, since it is
9050 needed in objc-act.c. */
9053 apply_args_register_offset (regno
)
9058 /* Arguments are always put in outgoing registers (in the argument
9059 block) if such make sense. */
9060 #ifdef OUTGOING_REGNO
9061 regno
= OUTGOING_REGNO(regno
);
9063 return apply_args_reg_offset
[regno
];
9066 /* Return the size required for the block returned by __builtin_apply_args,
9067 and initialize apply_args_mode. */
9072 static int size
= -1;
9074 enum machine_mode mode
;
9076 /* The values computed by this function never change. */
9079 /* The first value is the incoming arg-pointer. */
9080 size
= GET_MODE_SIZE (Pmode
);
9082 /* The second value is the structure value address unless this is
9083 passed as an "invisible" first argument. */
9084 if (struct_value_rtx
)
9085 size
+= GET_MODE_SIZE (Pmode
);
9087 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9088 if (FUNCTION_ARG_REGNO_P (regno
))
9090 /* Search for the proper mode for copying this register's
9091 value. I'm not sure this is right, but it works so far. */
9092 enum machine_mode best_mode
= VOIDmode
;
9094 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9096 mode
= GET_MODE_WIDER_MODE (mode
))
9097 if (HARD_REGNO_MODE_OK (regno
, mode
)
9098 && HARD_REGNO_NREGS (regno
, mode
) == 1)
9101 if (best_mode
== VOIDmode
)
9102 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9104 mode
= GET_MODE_WIDER_MODE (mode
))
9105 if (HARD_REGNO_MODE_OK (regno
, mode
)
9106 && (mov_optab
->handlers
[(int) mode
].insn_code
9107 != CODE_FOR_nothing
))
9111 if (mode
== VOIDmode
)
9114 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9115 if (size
% align
!= 0)
9116 size
= CEIL (size
, align
) * align
;
9117 apply_args_reg_offset
[regno
] = size
;
9118 size
+= GET_MODE_SIZE (mode
);
9119 apply_args_mode
[regno
] = mode
;
9123 apply_args_mode
[regno
] = VOIDmode
;
9124 apply_args_reg_offset
[regno
] = 0;
9130 /* Return the size required for the block returned by __builtin_apply,
9131 and initialize apply_result_mode. */
9134 apply_result_size ()
9136 static int size
= -1;
9138 enum machine_mode mode
;
9140 /* The values computed by this function never change. */
9145 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9146 if (FUNCTION_VALUE_REGNO_P (regno
))
9148 /* Search for the proper mode for copying this register's
9149 value. I'm not sure this is right, but it works so far. */
9150 enum machine_mode best_mode
= VOIDmode
;
9152 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9154 mode
= GET_MODE_WIDER_MODE (mode
))
9155 if (HARD_REGNO_MODE_OK (regno
, mode
))
9158 if (best_mode
== VOIDmode
)
9159 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9161 mode
= GET_MODE_WIDER_MODE (mode
))
9162 if (HARD_REGNO_MODE_OK (regno
, mode
)
9163 && (mov_optab
->handlers
[(int) mode
].insn_code
9164 != CODE_FOR_nothing
))
9168 if (mode
== VOIDmode
)
9171 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9172 if (size
% align
!= 0)
9173 size
= CEIL (size
, align
) * align
;
9174 size
+= GET_MODE_SIZE (mode
);
9175 apply_result_mode
[regno
] = mode
;
9178 apply_result_mode
[regno
] = VOIDmode
;
9180 /* Allow targets that use untyped_call and untyped_return to override
9181 the size so that machine-specific information can be stored here. */
9182 #ifdef APPLY_RESULT_SIZE
9183 size
= APPLY_RESULT_SIZE
;
9189 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9190 /* Create a vector describing the result block RESULT. If SAVEP is true,
9191 the result block is used to save the values; otherwise it is used to
9192 restore the values. */
9195 result_vector (savep
, result
)
9199 int regno
, size
, align
, nelts
;
9200 enum machine_mode mode
;
9202 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9205 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9206 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9208 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9209 if (size
% align
!= 0)
9210 size
= CEIL (size
, align
) * align
;
9211 reg
= gen_rtx (REG
, mode
, savep
? regno
: INCOMING_REGNO (regno
));
9212 mem
= change_address (result
, mode
,
9213 plus_constant (XEXP (result
, 0), size
));
9214 savevec
[nelts
++] = (savep
9215 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
9216 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
9217 size
+= GET_MODE_SIZE (mode
);
9219 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9221 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9223 /* Save the state required to perform an untyped call with the same
9224 arguments as were passed to the current function. */
9227 expand_builtin_apply_args ()
9230 int size
, align
, regno
;
9231 enum machine_mode mode
;
9233 /* Create a block where the arg-pointer, structure value address,
9234 and argument registers can be saved. */
9235 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9237 /* Walk past the arg-pointer and structure value address. */
9238 size
= GET_MODE_SIZE (Pmode
);
9239 if (struct_value_rtx
)
9240 size
+= GET_MODE_SIZE (Pmode
);
9242 /* Save each register used in calling a function to the block. */
9243 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9244 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9248 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9249 if (size
% align
!= 0)
9250 size
= CEIL (size
, align
) * align
;
9252 tem
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9255 /* For reg-stack.c's stack register household.
9256 Compare with a similar piece of code in function.c. */
9258 emit_insn (gen_rtx (USE
, mode
, tem
));
9261 emit_move_insn (change_address (registers
, mode
,
9262 plus_constant (XEXP (registers
, 0),
9265 size
+= GET_MODE_SIZE (mode
);
9268 /* Save the arg pointer to the block. */
9269 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9270 copy_to_reg (virtual_incoming_args_rtx
));
9271 size
= GET_MODE_SIZE (Pmode
);
9273 /* Save the structure value address unless this is passed as an
9274 "invisible" first argument. */
9275 if (struct_value_incoming_rtx
)
9277 emit_move_insn (change_address (registers
, Pmode
,
9278 plus_constant (XEXP (registers
, 0),
9280 copy_to_reg (struct_value_incoming_rtx
));
9281 size
+= GET_MODE_SIZE (Pmode
);
9284 /* Return the address of the block. */
9285 return copy_addr_to_reg (XEXP (registers
, 0));
9288 /* Perform an untyped call and save the state required to perform an
9289 untyped return of whatever value was returned by the given function. */
9292 expand_builtin_apply (function
, arguments
, argsize
)
9293 rtx function
, arguments
, argsize
;
9295 int size
, align
, regno
;
9296 enum machine_mode mode
;
9297 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9298 rtx old_stack_level
= 0;
9299 rtx call_fusage
= 0;
9301 /* Create a block where the return registers can be saved. */
9302 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9304 /* ??? The argsize value should be adjusted here. */
9306 /* Fetch the arg pointer from the ARGUMENTS block. */
9307 incoming_args
= gen_reg_rtx (Pmode
);
9308 emit_move_insn (incoming_args
,
9309 gen_rtx (MEM
, Pmode
, arguments
));
9310 #ifndef STACK_GROWS_DOWNWARD
9311 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9312 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9315 /* Perform postincrements before actually calling the function. */
9318 /* Push a new argument block and copy the arguments. */
9319 do_pending_stack_adjust ();
9320 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9322 /* Push a block of memory onto the stack to store the memory arguments.
9323 Save the address in a register, and copy the memory arguments. ??? I
9324 haven't figured out how the calling convention macros effect this,
9325 but it's likely that the source and/or destination addresses in
9326 the block copy will need updating in machine specific ways. */
9327 dest
= copy_addr_to_reg (push_block (argsize
, 0, 0));
9328 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
9329 gen_rtx (MEM
, BLKmode
, incoming_args
),
9331 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9333 /* Refer to the argument block. */
9335 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
9337 /* Walk past the arg-pointer and structure value address. */
9338 size
= GET_MODE_SIZE (Pmode
);
9339 if (struct_value_rtx
)
9340 size
+= GET_MODE_SIZE (Pmode
);
9342 /* Restore each of the registers previously saved. Make USE insns
9343 for each of these registers for use in making the call. */
9344 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9345 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9347 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9348 if (size
% align
!= 0)
9349 size
= CEIL (size
, align
) * align
;
9350 reg
= gen_rtx (REG
, mode
, regno
);
9351 emit_move_insn (reg
,
9352 change_address (arguments
, mode
,
9353 plus_constant (XEXP (arguments
, 0),
9356 use_reg (&call_fusage
, reg
);
9357 size
+= GET_MODE_SIZE (mode
);
9360 /* Restore the structure value address unless this is passed as an
9361 "invisible" first argument. */
9362 size
= GET_MODE_SIZE (Pmode
);
9363 if (struct_value_rtx
)
9365 rtx value
= gen_reg_rtx (Pmode
);
9366 emit_move_insn (value
,
9367 change_address (arguments
, Pmode
,
9368 plus_constant (XEXP (arguments
, 0),
9370 emit_move_insn (struct_value_rtx
, value
);
9371 if (GET_CODE (struct_value_rtx
) == REG
)
9372 use_reg (&call_fusage
, struct_value_rtx
);
9373 size
+= GET_MODE_SIZE (Pmode
);
9376 /* All arguments and registers used for the call are set up by now! */
9377 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9379 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9380 and we don't want to load it into a register as an optimization,
9381 because prepare_call_address already did it if it should be done. */
9382 if (GET_CODE (function
) != SYMBOL_REF
)
9383 function
= memory_address (FUNCTION_MODE
, function
);
9385 /* Generate the actual call instruction and save the return value. */
9386 #ifdef HAVE_untyped_call
9387 if (HAVE_untyped_call
)
9388 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
9389 result
, result_vector (1, result
)));
9392 #ifdef HAVE_call_value
9393 if (HAVE_call_value
)
9397 /* Locate the unique return register. It is not possible to
9398 express a call that sets more than one return register using
9399 call_value; use untyped_call for that. In fact, untyped_call
9400 only needs to save the return registers in the given block. */
9401 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9402 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9405 abort (); /* HAVE_untyped_call required. */
9406 valreg
= gen_rtx (REG
, mode
, regno
);
9409 emit_call_insn (gen_call_value (valreg
,
9410 gen_rtx (MEM
, FUNCTION_MODE
, function
),
9411 const0_rtx
, NULL_RTX
, const0_rtx
));
9413 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9421 /* Find the CALL insn we just emitted. */
9422 for (call_insn
= get_last_insn ();
9423 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9424 call_insn
= PREV_INSN (call_insn
))
9430 /* Put the register usage information on the CALL. If there is already
9431 some usage information, put ours at the end. */
9432 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9436 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9437 link
= XEXP (link
, 1))
9440 XEXP (link
, 1) = call_fusage
;
9443 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9445 /* Restore the stack. */
9446 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9448 /* Return the address of the result block. */
9449 return copy_addr_to_reg (XEXP (result
, 0));
9452 /* Perform an untyped return. */
9455 expand_builtin_return (result
)
9458 int size
, align
, regno
;
9459 enum machine_mode mode
;
9461 rtx call_fusage
= 0;
9463 apply_result_size ();
9464 result
= gen_rtx (MEM
, BLKmode
, result
);
9466 #ifdef HAVE_untyped_return
9467 if (HAVE_untyped_return
)
9469 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9475 /* Restore the return value and note that each value is used. */
9477 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9478 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9480 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9481 if (size
% align
!= 0)
9482 size
= CEIL (size
, align
) * align
;
9483 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9484 emit_move_insn (reg
,
9485 change_address (result
, mode
,
9486 plus_constant (XEXP (result
, 0),
9489 push_to_sequence (call_fusage
);
9490 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
9491 call_fusage
= get_insns ();
9493 size
+= GET_MODE_SIZE (mode
);
9496 /* Put the USE insns before the return. */
9497 emit_insns (call_fusage
);
9499 /* Return whatever values was restored by jumping directly to the end
9501 expand_null_return ();
9504 /* Expand code for a post- or pre- increment or decrement
9505 and return the RTX for the result.
9506 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9509 expand_increment (exp
, post
, ignore
)
9513 register rtx op0
, op1
;
9514 register rtx temp
, value
;
9515 register tree incremented
= TREE_OPERAND (exp
, 0);
9516 optab this_optab
= add_optab
;
9518 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9519 int op0_is_copy
= 0;
9520 int single_insn
= 0;
9521 /* 1 means we can't store into OP0 directly,
9522 because it is a subreg narrower than a word,
9523 and we don't dare clobber the rest of the word. */
9526 if (output_bytecode
)
9528 bc_expand_expr (exp
);
9532 /* Stabilize any component ref that might need to be
9533 evaluated more than once below. */
9535 || TREE_CODE (incremented
) == BIT_FIELD_REF
9536 || (TREE_CODE (incremented
) == COMPONENT_REF
9537 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9538 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9539 incremented
= stabilize_reference (incremented
);
9540 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9541 ones into save exprs so that they don't accidentally get evaluated
9542 more than once by the code below. */
9543 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9544 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9545 incremented
= save_expr (incremented
);
9547 /* Compute the operands as RTX.
9548 Note whether OP0 is the actual lvalue or a copy of it:
9549 I believe it is a copy iff it is a register or subreg
9550 and insns were generated in computing it. */
9552 temp
= get_last_insn ();
9553 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9555 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9556 in place but instead must do sign- or zero-extension during assignment,
9557 so we copy it into a new register and let the code below use it as
9560 Note that we can safely modify this SUBREG since it is know not to be
9561 shared (it was made by the expand_expr call above). */
9563 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9566 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9570 else if (GET_CODE (op0
) == SUBREG
9571 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9573 /* We cannot increment this SUBREG in place. If we are
9574 post-incrementing, get a copy of the old value. Otherwise,
9575 just mark that we cannot increment in place. */
9577 op0
= copy_to_reg (op0
);
9582 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9583 && temp
!= get_last_insn ());
9584 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9586 /* Decide whether incrementing or decrementing. */
9587 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9588 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9589 this_optab
= sub_optab
;
9591 /* Convert decrement by a constant into a negative increment. */
9592 if (this_optab
== sub_optab
9593 && GET_CODE (op1
) == CONST_INT
)
9595 op1
= GEN_INT (- INTVAL (op1
));
9596 this_optab
= add_optab
;
9599 /* For a preincrement, see if we can do this with a single instruction. */
9602 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9603 if (icode
!= (int) CODE_FOR_nothing
9604 /* Make sure that OP0 is valid for operands 0 and 1
9605 of the insn we want to queue. */
9606 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9607 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9608 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9612 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9613 then we cannot just increment OP0. We must therefore contrive to
9614 increment the original value. Then, for postincrement, we can return
9615 OP0 since it is a copy of the old value. For preincrement, expand here
9616 unless we can do it with a single insn.
9618 Likewise if storing directly into OP0 would clobber high bits
9619 we need to preserve (bad_subreg). */
9620 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9622 /* This is the easiest way to increment the value wherever it is.
9623 Problems with multiple evaluation of INCREMENTED are prevented
9624 because either (1) it is a component_ref or preincrement,
9625 in which case it was stabilized above, or (2) it is an array_ref
9626 with constant index in an array in a register, which is
9627 safe to reevaluate. */
9628 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9629 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9630 ? MINUS_EXPR
: PLUS_EXPR
),
9633 TREE_OPERAND (exp
, 1));
9635 while (TREE_CODE (incremented
) == NOP_EXPR
9636 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9638 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9639 incremented
= TREE_OPERAND (incremented
, 0);
9642 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9643 return post
? op0
: temp
;
9648 /* We have a true reference to the value in OP0.
9649 If there is an insn to add or subtract in this mode, queue it.
9650 Queueing the increment insn avoids the register shuffling
9651 that often results if we must increment now and first save
9652 the old value for subsequent use. */
9654 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9655 op0
= stabilize (op0
);
9658 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9659 if (icode
!= (int) CODE_FOR_nothing
9660 /* Make sure that OP0 is valid for operands 0 and 1
9661 of the insn we want to queue. */
9662 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9663 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
9665 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9666 op1
= force_reg (mode
, op1
);
9668 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9670 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9672 rtx addr
= force_reg (Pmode
, XEXP (op0
, 0));
9675 op0
= change_address (op0
, VOIDmode
, addr
);
9676 temp
= force_reg (GET_MODE (op0
), op0
);
9677 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9678 op1
= force_reg (mode
, op1
);
9680 /* The increment queue is LIFO, thus we have to `queue'
9681 the instructions in reverse order. */
9682 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9683 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9688 /* Preincrement, or we can't increment with one simple insn. */
9690 /* Save a copy of the value before inc or dec, to return it later. */
9691 temp
= value
= copy_to_reg (op0
);
9693 /* Arrange to return the incremented value. */
9694 /* Copy the rtx because expand_binop will protect from the queue,
9695 and the results of that would be invalid for us to return
9696 if our caller does emit_queue before using our result. */
9697 temp
= copy_rtx (value
= op0
);
9699 /* Increment however we can. */
9700 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9701 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9702 /* Make sure the value is stored into OP0. */
9704 emit_move_insn (op0
, op1
);
9709 /* Expand all function calls contained within EXP, innermost ones first.
9710 But don't look within expressions that have sequence points.
9711 For each CALL_EXPR, record the rtx for its value
9712 in the CALL_EXPR_RTL field. */
9715 preexpand_calls (exp
)
9718 register int nops
, i
;
9719 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9721 if (! do_preexpand_calls
)
9724 /* Only expressions and references can contain calls. */
9726 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9729 switch (TREE_CODE (exp
))
9732 /* Do nothing if already expanded. */
9733 if (CALL_EXPR_RTL (exp
) != 0
9734 /* Do nothing if the call returns a variable-sized object. */
9735 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
9736 /* Do nothing to built-in functions. */
9737 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9738 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9740 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9743 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9748 case TRUTH_ANDIF_EXPR
:
9749 case TRUTH_ORIF_EXPR
:
9750 /* If we find one of these, then we can be sure
9751 the adjust will be done for it (since it makes jumps).
9752 Do it now, so that if this is inside an argument
9753 of a function, we don't get the stack adjustment
9754 after some other args have already been pushed. */
9755 do_pending_stack_adjust ();
9760 case WITH_CLEANUP_EXPR
:
9761 case CLEANUP_POINT_EXPR
:
9765 if (SAVE_EXPR_RTL (exp
) != 0)
9769 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9770 for (i
= 0; i
< nops
; i
++)
9771 if (TREE_OPERAND (exp
, i
) != 0)
9773 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9774 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9776 preexpand_calls (TREE_OPERAND (exp
, i
));
9780 /* At the start of a function, record that we have no previously-pushed
9781 arguments waiting to be popped. */
9784 init_pending_stack_adjust ()
9786 pending_stack_adjust
= 0;
9789 /* When exiting from function, if safe, clear out any pending stack adjust
9790 so the adjustment won't get done. */
9793 clear_pending_stack_adjust ()
9795 #ifdef EXIT_IGNORE_STACK
9797 && ! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
9798 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9799 && ! flag_inline_functions
)
9800 pending_stack_adjust
= 0;
9804 /* Pop any previously-pushed arguments that have not been popped yet. */
9807 do_pending_stack_adjust ()
9809 if (inhibit_defer_pop
== 0)
9811 if (pending_stack_adjust
!= 0)
9812 adjust_stack (GEN_INT (pending_stack_adjust
));
9813 pending_stack_adjust
= 0;
9817 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9818 Returns the cleanups to be performed. */
9821 defer_cleanups_to (old_cleanups
)
9824 tree new_cleanups
= NULL_TREE
;
9825 tree cleanups
= cleanups_this_call
;
9826 tree last
= NULL_TREE
;
9828 while (cleanups_this_call
!= old_cleanups
)
9830 expand_eh_region_end (TREE_VALUE (cleanups_this_call
));
9831 last
= cleanups_this_call
;
9832 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
9837 /* Remove the list from the chain of cleanups. */
9838 TREE_CHAIN (last
) = NULL_TREE
;
9840 /* reverse them so that we can build them in the right order. */
9841 cleanups
= nreverse (cleanups
);
9843 /* All cleanups must be on the function_obstack. */
9844 push_obstacks_nochange ();
9845 resume_temporary_allocation ();
9850 new_cleanups
= build (COMPOUND_EXPR
, TREE_TYPE (new_cleanups
),
9851 TREE_VALUE (cleanups
), new_cleanups
);
9853 new_cleanups
= TREE_VALUE (cleanups
);
9855 cleanups
= TREE_CHAIN (cleanups
);
9861 return new_cleanups
;
9864 /* Expand all cleanups up to OLD_CLEANUPS.
9865 Needed here, and also for language-dependent calls. */
9868 expand_cleanups_to (old_cleanups
)
9871 while (cleanups_this_call
!= old_cleanups
)
9873 expand_eh_region_end (TREE_VALUE (cleanups_this_call
));
9874 expand_expr (TREE_VALUE (cleanups_this_call
), const0_rtx
, VOIDmode
, 0);
9875 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
9879 /* Expand conditional expressions. */
9881 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9882 LABEL is an rtx of code CODE_LABEL, in this function and all the
9886 jumpifnot (exp
, label
)
9890 do_jump (exp
, label
, NULL_RTX
);
9893 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9900 do_jump (exp
, NULL_RTX
, label
);
9903 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9904 the result is zero, or IF_TRUE_LABEL if the result is one.
9905 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9906 meaning fall through in that case.
9908 do_jump always does any pending stack adjust except when it does not
9909 actually perform a jump. An example where there is no jump
9910 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9912 This function is responsible for optimizing cases such as
9913 &&, || and comparison operators in EXP. */
9916 do_jump (exp
, if_false_label
, if_true_label
)
9918 rtx if_false_label
, if_true_label
;
9920 register enum tree_code code
= TREE_CODE (exp
);
9921 /* Some cases need to create a label to jump to
9922 in order to properly fall through.
9923 These cases set DROP_THROUGH_LABEL nonzero. */
9924 rtx drop_through_label
= 0;
9929 enum machine_mode mode
;
9939 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9945 /* This is not true with #pragma weak */
9947 /* The address of something can never be zero. */
9949 emit_jump (if_true_label
);
9954 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9955 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9956 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9959 /* If we are narrowing the operand, we have to do the compare in the
9961 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9962 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9964 case NON_LVALUE_EXPR
:
9965 case REFERENCE_EXPR
:
9970 /* These cannot change zero->non-zero or vice versa. */
9971 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9975 /* This is never less insns than evaluating the PLUS_EXPR followed by
9976 a test and can be longer if the test is eliminated. */
9978 /* Reduce to minus. */
9979 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9980 TREE_OPERAND (exp
, 0),
9981 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9982 TREE_OPERAND (exp
, 1))));
9983 /* Process as MINUS. */
9987 /* Non-zero iff operands of minus differ. */
9988 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
9989 TREE_OPERAND (exp
, 0),
9990 TREE_OPERAND (exp
, 1)),
9995 /* If we are AND'ing with a small constant, do this comparison in the
9996 smallest type that fits. If the machine doesn't have comparisons
9997 that small, it will be converted back to the wider comparison.
9998 This helps if we are testing the sign bit of a narrower object.
9999 combine can't do this for us because it can't know whether a
10000 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10002 if (! SLOW_BYTE_ACCESS
10003 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
10004 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
10005 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
10006 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
10007 && (type
= type_for_mode (mode
, 1)) != 0
10008 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10009 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10010 != CODE_FOR_nothing
))
10012 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10017 case TRUTH_NOT_EXPR
:
10018 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10021 case TRUTH_ANDIF_EXPR
:
10024 tree cleanups
, old_cleanups
;
10026 if (if_false_label
== 0)
10027 if_false_label
= drop_through_label
= gen_label_rtx ();
10029 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
10030 seq1
= get_insns ();
10033 old_cleanups
= cleanups_this_call
;
10035 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10036 seq2
= get_insns ();
10037 cleanups
= defer_cleanups_to (old_cleanups
);
10042 rtx flag
= gen_reg_rtx (word_mode
);
10046 /* Flag cleanups as not needed. */
10047 emit_move_insn (flag
, const0_rtx
);
10050 /* Flag cleanups as needed. */
10051 emit_move_insn (flag
, const1_rtx
);
10054 /* All cleanups must be on the function_obstack. */
10055 push_obstacks_nochange ();
10056 resume_temporary_allocation ();
10058 /* convert flag, which is an rtx, into a tree. */
10059 cond
= make_node (RTL_EXPR
);
10060 TREE_TYPE (cond
) = integer_type_node
;
10061 RTL_EXPR_RTL (cond
) = flag
;
10062 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
10063 cond
= save_expr (cond
);
10065 new_cleanups
= build (COND_EXPR
, void_type_node
,
10066 truthvalue_conversion (cond
),
10067 cleanups
, integer_zero_node
);
10068 new_cleanups
= fold (new_cleanups
);
10072 /* Now add in the conditionalized cleanups. */
10074 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
10075 expand_eh_region_start ();
10085 case TRUTH_ORIF_EXPR
:
10088 tree cleanups
, old_cleanups
;
10090 if (if_true_label
== 0)
10091 if_true_label
= drop_through_label
= gen_label_rtx ();
10093 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
10094 seq1
= get_insns ();
10097 old_cleanups
= cleanups_this_call
;
10099 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10100 seq2
= get_insns ();
10101 cleanups
= defer_cleanups_to (old_cleanups
);
10106 rtx flag
= gen_reg_rtx (word_mode
);
10110 /* Flag cleanups as not needed. */
10111 emit_move_insn (flag
, const0_rtx
);
10114 /* Flag cleanups as needed. */
10115 emit_move_insn (flag
, const1_rtx
);
10118 /* All cleanups must be on the function_obstack. */
10119 push_obstacks_nochange ();
10120 resume_temporary_allocation ();
10122 /* convert flag, which is an rtx, into a tree. */
10123 cond
= make_node (RTL_EXPR
);
10124 TREE_TYPE (cond
) = integer_type_node
;
10125 RTL_EXPR_RTL (cond
) = flag
;
10126 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
10127 cond
= save_expr (cond
);
10129 new_cleanups
= build (COND_EXPR
, void_type_node
,
10130 truthvalue_conversion (cond
),
10131 cleanups
, integer_zero_node
);
10132 new_cleanups
= fold (new_cleanups
);
10136 /* Now add in the conditionalized cleanups. */
10138 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
10139 expand_eh_region_start ();
10149 case COMPOUND_EXPR
:
10150 push_temp_slots ();
10151 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
10152 preserve_temp_slots (NULL_RTX
);
10153 free_temp_slots ();
10156 do_pending_stack_adjust ();
10157 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10160 case COMPONENT_REF
:
10161 case BIT_FIELD_REF
:
10164 int bitsize
, bitpos
, unsignedp
;
10165 enum machine_mode mode
;
10170 /* Get description of this reference. We don't actually care
10171 about the underlying object here. */
10172 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10173 &mode
, &unsignedp
, &volatilep
);
10175 type
= type_for_size (bitsize
, unsignedp
);
10176 if (! SLOW_BYTE_ACCESS
10177 && type
!= 0 && bitsize
>= 0
10178 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10179 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10180 != CODE_FOR_nothing
))
10182 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10189 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10190 if (integer_onep (TREE_OPERAND (exp
, 1))
10191 && integer_zerop (TREE_OPERAND (exp
, 2)))
10192 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10194 else if (integer_zerop (TREE_OPERAND (exp
, 1))
10195 && integer_onep (TREE_OPERAND (exp
, 2)))
10196 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10201 tree cleanups_left_side
, cleanups_right_side
, old_cleanups
;
10203 register rtx label1
= gen_label_rtx ();
10204 drop_through_label
= gen_label_rtx ();
10206 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
10208 /* We need to save the cleanups for the lhs and rhs separately.
10209 Keep track of the cleanups seen before the lhs. */
10210 old_cleanups
= cleanups_this_call
;
10212 /* Now the THEN-expression. */
10213 do_jump (TREE_OPERAND (exp
, 1),
10214 if_false_label
? if_false_label
: drop_through_label
,
10215 if_true_label
? if_true_label
: drop_through_label
);
10216 /* In case the do_jump just above never jumps. */
10217 do_pending_stack_adjust ();
10218 emit_label (label1
);
10219 seq1
= get_insns ();
10220 /* Now grab the cleanups for the lhs. */
10221 cleanups_left_side
= defer_cleanups_to (old_cleanups
);
10224 /* And keep track of where we start before the rhs. */
10225 old_cleanups
= cleanups_this_call
;
10227 /* Now the ELSE-expression. */
10228 do_jump (TREE_OPERAND (exp
, 2),
10229 if_false_label
? if_false_label
: drop_through_label
,
10230 if_true_label
? if_true_label
: drop_through_label
);
10231 seq2
= get_insns ();
10232 /* Grab the cleanups for the rhs. */
10233 cleanups_right_side
= defer_cleanups_to (old_cleanups
);
10236 if (cleanups_left_side
|| cleanups_right_side
)
10238 /* Make the cleanups for the THEN and ELSE clauses
10239 conditional based on which half is executed. */
10240 rtx flag
= gen_reg_rtx (word_mode
);
10244 /* Set the flag to 0 so that we know we executed the lhs. */
10245 emit_move_insn (flag
, const0_rtx
);
10248 /* Set the flag to 1 so that we know we executed the rhs. */
10249 emit_move_insn (flag
, const1_rtx
);
10252 /* Make sure the cleanup lives on the function_obstack. */
10253 push_obstacks_nochange ();
10254 resume_temporary_allocation ();
10256 /* Now, build up a COND_EXPR that tests the value of the
10257 flag, and then either do the cleanups for the lhs or the
10259 cond
= make_node (RTL_EXPR
);
10260 TREE_TYPE (cond
) = integer_type_node
;
10261 RTL_EXPR_RTL (cond
) = flag
;
10262 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
10263 cond
= save_expr (cond
);
10265 new_cleanups
= build (COND_EXPR
, void_type_node
,
10266 truthvalue_conversion (cond
),
10267 cleanups_right_side
, cleanups_left_side
);
10268 new_cleanups
= fold (new_cleanups
);
10272 /* Now add in the conditionalized cleanups. */
10274 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
10275 expand_eh_region_start ();
10279 /* No cleanups were needed, so emit the two sequences
10289 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10291 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10292 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10293 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10294 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10297 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
10298 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10299 fold (build1 (REALPART_EXPR
,
10300 TREE_TYPE (inner_type
),
10301 TREE_OPERAND (exp
, 0))),
10302 fold (build1 (REALPART_EXPR
,
10303 TREE_TYPE (inner_type
),
10304 TREE_OPERAND (exp
, 1))))),
10305 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10306 fold (build1 (IMAGPART_EXPR
,
10307 TREE_TYPE (inner_type
),
10308 TREE_OPERAND (exp
, 0))),
10309 fold (build1 (IMAGPART_EXPR
,
10310 TREE_TYPE (inner_type
),
10311 TREE_OPERAND (exp
, 1))))))),
10312 if_false_label
, if_true_label
);
10313 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10314 && !can_compare_p (TYPE_MODE (inner_type
)))
10315 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
10317 comparison
= compare (exp
, EQ
, EQ
);
10323 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10325 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10326 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10327 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10328 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10331 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
10332 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10333 fold (build1 (REALPART_EXPR
,
10334 TREE_TYPE (inner_type
),
10335 TREE_OPERAND (exp
, 0))),
10336 fold (build1 (REALPART_EXPR
,
10337 TREE_TYPE (inner_type
),
10338 TREE_OPERAND (exp
, 1))))),
10339 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10340 fold (build1 (IMAGPART_EXPR
,
10341 TREE_TYPE (inner_type
),
10342 TREE_OPERAND (exp
, 0))),
10343 fold (build1 (IMAGPART_EXPR
,
10344 TREE_TYPE (inner_type
),
10345 TREE_OPERAND (exp
, 1))))))),
10346 if_false_label
, if_true_label
);
10347 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10348 && !can_compare_p (TYPE_MODE (inner_type
)))
10349 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
10351 comparison
= compare (exp
, NE
, NE
);
10356 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10358 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10359 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10361 comparison
= compare (exp
, LT
, LTU
);
10365 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10367 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10368 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10370 comparison
= compare (exp
, LE
, LEU
);
10374 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10376 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10377 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10379 comparison
= compare (exp
, GT
, GTU
);
10383 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10385 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10386 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10388 comparison
= compare (exp
, GE
, GEU
);
10393 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10395 /* This is not needed any more and causes poor code since it causes
10396 comparisons and tests from non-SI objects to have different code
10398 /* Copy to register to avoid generating bad insns by cse
10399 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10400 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10401 temp
= copy_to_reg (temp
);
10403 do_pending_stack_adjust ();
10404 if (GET_CODE (temp
) == CONST_INT
)
10405 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10406 else if (GET_CODE (temp
) == LABEL_REF
)
10407 comparison
= const_true_rtx
;
10408 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10409 && !can_compare_p (GET_MODE (temp
)))
10410 /* Note swapping the labels gives us not-equal. */
10411 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10412 else if (GET_MODE (temp
) != VOIDmode
)
10413 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10414 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10415 GET_MODE (temp
), NULL_RTX
, 0);
10420 /* Do any postincrements in the expression that was tested. */
10423 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10424 straight into a conditional jump instruction as the jump condition.
10425 Otherwise, all the work has been done already. */
10427 if (comparison
== const_true_rtx
)
10430 emit_jump (if_true_label
);
10432 else if (comparison
== const0_rtx
)
10434 if (if_false_label
)
10435 emit_jump (if_false_label
);
10437 else if (comparison
)
10438 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10440 if (drop_through_label
)
10442 /* If do_jump produces code that might be jumped around,
10443 do any stack adjusts from that code, before the place
10444 where control merges in. */
10445 do_pending_stack_adjust ();
10446 emit_label (drop_through_label
);
10450 /* Given a comparison expression EXP for values too wide to be compared
10451 with one insn, test the comparison and jump to the appropriate label.
10452 The code of EXP is ignored; we always test GT if SWAP is 0,
10453 and LT if SWAP is 1. */
10456 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10459 rtx if_false_label
, if_true_label
;
10461 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10462 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10463 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10464 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10465 rtx drop_through_label
= 0;
10466 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10469 if (! if_true_label
|| ! if_false_label
)
10470 drop_through_label
= gen_label_rtx ();
10471 if (! if_true_label
)
10472 if_true_label
= drop_through_label
;
10473 if (! if_false_label
)
10474 if_false_label
= drop_through_label
;
10476 /* Compare a word at a time, high order first. */
10477 for (i
= 0; i
< nwords
; i
++)
10480 rtx op0_word
, op1_word
;
10482 if (WORDS_BIG_ENDIAN
)
10484 op0_word
= operand_subword_force (op0
, i
, mode
);
10485 op1_word
= operand_subword_force (op1
, i
, mode
);
10489 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10490 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10493 /* All but high-order word must be compared as unsigned. */
10494 comp
= compare_from_rtx (op0_word
, op1_word
,
10495 (unsignedp
|| i
> 0) ? GTU
: GT
,
10496 unsignedp
, word_mode
, NULL_RTX
, 0);
10497 if (comp
== const_true_rtx
)
10498 emit_jump (if_true_label
);
10499 else if (comp
!= const0_rtx
)
10500 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10502 /* Consider lower words only if these are equal. */
10503 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10505 if (comp
== const_true_rtx
)
10506 emit_jump (if_false_label
);
10507 else if (comp
!= const0_rtx
)
10508 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10511 if (if_false_label
)
10512 emit_jump (if_false_label
);
10513 if (drop_through_label
)
10514 emit_label (drop_through_label
);
10517 /* Compare OP0 with OP1, word at a time, in mode MODE.
10518 UNSIGNEDP says to do unsigned comparison.
10519 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10522 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10523 enum machine_mode mode
;
10526 rtx if_false_label
, if_true_label
;
10528 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10529 rtx drop_through_label
= 0;
10532 if (! if_true_label
|| ! if_false_label
)
10533 drop_through_label
= gen_label_rtx ();
10534 if (! if_true_label
)
10535 if_true_label
= drop_through_label
;
10536 if (! if_false_label
)
10537 if_false_label
= drop_through_label
;
10539 /* Compare a word at a time, high order first. */
10540 for (i
= 0; i
< nwords
; i
++)
10543 rtx op0_word
, op1_word
;
10545 if (WORDS_BIG_ENDIAN
)
10547 op0_word
= operand_subword_force (op0
, i
, mode
);
10548 op1_word
= operand_subword_force (op1
, i
, mode
);
10552 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10553 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10556 /* All but high-order word must be compared as unsigned. */
10557 comp
= compare_from_rtx (op0_word
, op1_word
,
10558 (unsignedp
|| i
> 0) ? GTU
: GT
,
10559 unsignedp
, word_mode
, NULL_RTX
, 0);
10560 if (comp
== const_true_rtx
)
10561 emit_jump (if_true_label
);
10562 else if (comp
!= const0_rtx
)
10563 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10565 /* Consider lower words only if these are equal. */
10566 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10568 if (comp
== const_true_rtx
)
10569 emit_jump (if_false_label
);
10570 else if (comp
!= const0_rtx
)
10571 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10574 if (if_false_label
)
10575 emit_jump (if_false_label
);
10576 if (drop_through_label
)
10577 emit_label (drop_through_label
);
10580 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10581 with one insn, test the comparison and jump to the appropriate label. */
10584 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10586 rtx if_false_label
, if_true_label
;
10588 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10589 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10590 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10591 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10593 rtx drop_through_label
= 0;
10595 if (! if_false_label
)
10596 drop_through_label
= if_false_label
= gen_label_rtx ();
10598 for (i
= 0; i
< nwords
; i
++)
10600 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10601 operand_subword_force (op1
, i
, mode
),
10602 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10603 word_mode
, NULL_RTX
, 0);
10604 if (comp
== const_true_rtx
)
10605 emit_jump (if_false_label
);
10606 else if (comp
!= const0_rtx
)
10607 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10611 emit_jump (if_true_label
);
10612 if (drop_through_label
)
10613 emit_label (drop_through_label
);
10616 /* Jump according to whether OP0 is 0.
10617 We assume that OP0 has an integer mode that is too wide
10618 for the available compare insns. */
10621 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10623 rtx if_false_label
, if_true_label
;
10625 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10627 rtx drop_through_label
= 0;
10629 if (! if_false_label
)
10630 drop_through_label
= if_false_label
= gen_label_rtx ();
10632 for (i
= 0; i
< nwords
; i
++)
10634 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10636 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10637 if (comp
== const_true_rtx
)
10638 emit_jump (if_false_label
);
10639 else if (comp
!= const0_rtx
)
10640 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10644 emit_jump (if_true_label
);
10645 if (drop_through_label
)
10646 emit_label (drop_through_label
);
10649 /* Given a comparison expression in rtl form, output conditional branches to
10650 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10653 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10654 rtx comparison
, if_false_label
, if_true_label
;
10658 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10659 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10663 if (if_false_label
)
10664 emit_jump (if_false_label
);
10666 else if (if_false_label
)
10669 rtx prev
= get_last_insn ();
10672 /* Output the branch with the opposite condition. Then try to invert
10673 what is generated. If more than one insn is a branch, or if the
10674 branch is not the last insn written, abort. If we can't invert
10675 the branch, emit make a true label, redirect this jump to that,
10676 emit a jump to the false label and define the true label. */
10678 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10679 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10683 /* Here we get the first insn that was just emitted. It used to be the
10684 case that, on some machines, emitting the branch would discard
10685 the previous compare insn and emit a replacement. This isn't
10686 done anymore, but abort if we see that PREV is deleted. */
10689 insn
= get_insns ();
10690 else if (INSN_DELETED_P (prev
))
10693 insn
= NEXT_INSN (prev
);
10695 for (; insn
; insn
= NEXT_INSN (insn
))
10696 if (GET_CODE (insn
) == JUMP_INSN
)
10703 if (branch
!= get_last_insn ())
10706 JUMP_LABEL (branch
) = if_false_label
;
10707 if (! invert_jump (branch
, if_false_label
))
10709 if_true_label
= gen_label_rtx ();
10710 redirect_jump (branch
, if_true_label
);
10711 emit_jump (if_false_label
);
10712 emit_label (if_true_label
);
10717 /* Generate code for a comparison expression EXP
10718 (including code to compute the values to be compared)
10719 and set (CC0) according to the result.
10720 SIGNED_CODE should be the rtx operation for this comparison for
10721 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10723 We force a stack adjustment unless there are currently
10724 things pushed on the stack that aren't yet used. */
10727 compare (exp
, signed_code
, unsigned_code
)
10729 enum rtx_code signed_code
, unsigned_code
;
10732 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10734 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10735 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10736 register enum machine_mode mode
= TYPE_MODE (type
);
10737 int unsignedp
= TREE_UNSIGNED (type
);
10738 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10740 #ifdef HAVE_canonicalize_funcptr_for_compare
10741 /* If function pointers need to be "canonicalized" before they can
10742 be reliably compared, then canonicalize them. */
10743 if (HAVE_canonicalize_funcptr_for_compare
10744 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10745 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10748 rtx new_op0
= gen_reg_rtx (mode
);
10750 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10754 if (HAVE_canonicalize_funcptr_for_compare
10755 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10756 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10759 rtx new_op1
= gen_reg_rtx (mode
);
10761 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10766 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10768 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10769 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10772 /* Like compare but expects the values to compare as two rtx's.
10773 The decision as to signed or unsigned comparison must be made by the caller.
10775 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10778 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10779 size of MODE should be used. */
10782 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10783 register rtx op0
, op1
;
10784 enum rtx_code code
;
10786 enum machine_mode mode
;
10792 /* If one operand is constant, make it the second one. Only do this
10793 if the other operand is not constant as well. */
10795 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10796 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10801 code
= swap_condition (code
);
10804 if (flag_force_mem
)
10806 op0
= force_not_mem (op0
);
10807 op1
= force_not_mem (op1
);
10810 do_pending_stack_adjust ();
10812 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10813 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10817 /* There's no need to do this now that combine.c can eliminate lots of
10818 sign extensions. This can be less efficient in certain cases on other
10821 /* If this is a signed equality comparison, we can do it as an
10822 unsigned comparison since zero-extension is cheaper than sign
10823 extension and comparisons with zero are done as unsigned. This is
10824 the case even on machines that can do fast sign extension, since
10825 zero-extension is easier to combine with other operations than
10826 sign-extension is. If we are comparing against a constant, we must
10827 convert it to what it would look like unsigned. */
10828 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10829 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10831 if (GET_CODE (op1
) == CONST_INT
10832 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10833 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10838 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10840 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10843 /* Generate code to calculate EXP using a store-flag instruction
10844 and return an rtx for the result. EXP is either a comparison
10845 or a TRUTH_NOT_EXPR whose operand is a comparison.
10847 If TARGET is nonzero, store the result there if convenient.
10849 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10852 Return zero if there is no suitable set-flag instruction
10853 available on this machine.
10855 Once expand_expr has been called on the arguments of the comparison,
10856 we are committed to doing the store flag, since it is not safe to
10857 re-evaluate the expression. We emit the store-flag insn by calling
10858 emit_store_flag, but only expand the arguments if we have a reason
10859 to believe that emit_store_flag will be successful. If we think that
10860 it will, but it isn't, we have to simulate the store-flag with a
10861 set/jump/set sequence. */
10864 do_store_flag (exp
, target
, mode
, only_cheap
)
10867 enum machine_mode mode
;
10870 enum rtx_code code
;
10871 tree arg0
, arg1
, type
;
10873 enum machine_mode operand_mode
;
10877 enum insn_code icode
;
10878 rtx subtarget
= target
;
10879 rtx result
, label
, pattern
, jump_pat
;
10881 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10882 result at the end. We can't simply invert the test since it would
10883 have already been inverted if it were valid. This case occurs for
10884 some floating-point comparisons. */
10886 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10887 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10889 arg0
= TREE_OPERAND (exp
, 0);
10890 arg1
= TREE_OPERAND (exp
, 1);
10891 type
= TREE_TYPE (arg0
);
10892 operand_mode
= TYPE_MODE (type
);
10893 unsignedp
= TREE_UNSIGNED (type
);
10895 /* We won't bother with BLKmode store-flag operations because it would mean
10896 passing a lot of information to emit_store_flag. */
10897 if (operand_mode
== BLKmode
)
10900 /* We won't bother with store-flag operations involving function pointers
10901 when function pointers must be canonicalized before comparisons. */
10902 #ifdef HAVE_canonicalize_funcptr_for_compare
10903 if (HAVE_canonicalize_funcptr_for_compare
10904 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10905 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10907 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10908 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10909 == FUNCTION_TYPE
))))
10916 /* Get the rtx comparison code to use. We know that EXP is a comparison
10917 operation of some type. Some comparisons against 1 and -1 can be
10918 converted to comparisons with zero. Do so here so that the tests
10919 below will be aware that we have a comparison with zero. These
10920 tests will not catch constants in the first operand, but constants
10921 are rarely passed as the first operand. */
10923 switch (TREE_CODE (exp
))
10932 if (integer_onep (arg1
))
10933 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10935 code
= unsignedp
? LTU
: LT
;
10938 if (! unsignedp
&& integer_all_onesp (arg1
))
10939 arg1
= integer_zero_node
, code
= LT
;
10941 code
= unsignedp
? LEU
: LE
;
10944 if (! unsignedp
&& integer_all_onesp (arg1
))
10945 arg1
= integer_zero_node
, code
= GE
;
10947 code
= unsignedp
? GTU
: GT
;
10950 if (integer_onep (arg1
))
10951 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10953 code
= unsignedp
? GEU
: GE
;
10959 /* Put a constant second. */
10960 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10962 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10963 code
= swap_condition (code
);
10966 /* If this is an equality or inequality test of a single bit, we can
10967 do this by shifting the bit being tested to the low-order bit and
10968 masking the result with the constant 1. If the condition was EQ,
10969 we xor it with 1. This does not require an scc insn and is faster
10970 than an scc insn even if we have it. */
10972 if ((code
== NE
|| code
== EQ
)
10973 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10974 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10975 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
10977 tree inner
= TREE_OPERAND (arg0
, 0);
10982 tem
= INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
10983 NULL_RTX
, VOIDmode
, 0));
10984 /* In this case, immed_double_const will sign extend the value to make
10985 it look the same on the host and target. We must remove the
10986 sign-extension before calling exact_log2, since exact_log2 will
10987 fail for negative values. */
10988 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
10989 && BITS_PER_WORD
== GET_MODE_BITSIZE (TYPE_MODE (type
)))
10990 /* We don't use the obvious constant shift to generate the mask,
10991 because that generates compiler warnings when BITS_PER_WORD is
10992 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10993 code is unreachable in that case. */
10994 tem
= tem
& GET_MODE_MASK (word_mode
);
10995 bitnum
= exact_log2 (tem
);
10997 /* If INNER is a right shift of a constant and it plus BITNUM does
10998 not overflow, adjust BITNUM and INNER. */
11000 if (TREE_CODE (inner
) == RSHIFT_EXPR
11001 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
11002 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
11003 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
11004 < TYPE_PRECISION (type
)))
11006 bitnum
+=TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
11007 inner
= TREE_OPERAND (inner
, 0);
11010 /* If we are going to be able to omit the AND below, we must do our
11011 operations as unsigned. If we must use the AND, we have a choice.
11012 Normally unsigned is faster, but for some machines signed is. */
11013 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
11014 #ifdef LOAD_EXTEND_OP
11015 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
11021 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11022 || GET_MODE (subtarget
) != operand_mode
11023 || ! safe_from_p (subtarget
, inner
))
11026 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
11029 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
11030 size_int (bitnum
), subtarget
, ops_unsignedp
);
11032 if (GET_MODE (op0
) != mode
)
11033 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
11035 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
11036 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
11037 ops_unsignedp
, OPTAB_LIB_WIDEN
);
11039 /* Put the AND last so it can combine with more things. */
11040 if (bitnum
!= TYPE_PRECISION (type
) - 1)
11041 op0
= expand_and (op0
, const1_rtx
, subtarget
);
11046 /* Now see if we are likely to be able to do this. Return if not. */
11047 if (! can_compare_p (operand_mode
))
11049 icode
= setcc_gen_code
[(int) code
];
11050 if (icode
== CODE_FOR_nothing
11051 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
11053 /* We can only do this if it is one of the special cases that
11054 can be handled without an scc insn. */
11055 if ((code
== LT
&& integer_zerop (arg1
))
11056 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
11058 else if (BRANCH_COST
>= 0
11059 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
11060 && TREE_CODE (type
) != REAL_TYPE
11061 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
11062 != CODE_FOR_nothing
)
11063 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
11064 != CODE_FOR_nothing
)))
11070 preexpand_calls (exp
);
11071 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11072 || GET_MODE (subtarget
) != operand_mode
11073 || ! safe_from_p (subtarget
, arg1
))
11076 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
11077 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
11080 target
= gen_reg_rtx (mode
);
11082 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11083 because, if the emit_store_flag does anything it will succeed and
11084 OP0 and OP1 will not be used subsequently. */
11086 result
= emit_store_flag (target
, code
,
11087 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
11088 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
11089 operand_mode
, unsignedp
, 1);
11094 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
11095 result
, 0, OPTAB_LIB_WIDEN
);
11099 /* If this failed, we have to do this with set/compare/jump/set code. */
11100 if (GET_CODE (target
) != REG
11101 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
11102 target
= gen_reg_rtx (GET_MODE (target
));
11104 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
11105 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
11106 operand_mode
, NULL_RTX
, 0);
11107 if (GET_CODE (result
) == CONST_INT
)
11108 return (((result
== const0_rtx
&& ! invert
)
11109 || (result
!= const0_rtx
&& invert
))
11110 ? const0_rtx
: const1_rtx
);
11112 label
= gen_label_rtx ();
11113 if (bcc_gen_fctn
[(int) code
] == 0)
11116 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
11117 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
11118 emit_label (label
);
11123 /* Generate a tablejump instruction (used for switch statements). */
11125 #ifdef HAVE_tablejump
11127 /* INDEX is the value being switched on, with the lowest value
11128 in the table already subtracted.
11129 MODE is its expected mode (needed if INDEX is constant).
11130 RANGE is the length of the jump table.
11131 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11133 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11134 index value is out of range. */
11137 do_tablejump (index
, mode
, range
, table_label
, default_label
)
11138 rtx index
, range
, table_label
, default_label
;
11139 enum machine_mode mode
;
11141 register rtx temp
, vector
;
11143 /* Do an unsigned comparison (in the proper mode) between the index
11144 expression and the value which represents the length of the range.
11145 Since we just finished subtracting the lower bound of the range
11146 from the index expression, this comparison allows us to simultaneously
11147 check that the original index expression value is both greater than
11148 or equal to the minimum value of the range and less than or equal to
11149 the maximum value of the range. */
11151 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
11152 emit_jump_insn (gen_bgtu (default_label
));
11154 /* If index is in range, it must fit in Pmode.
11155 Convert to Pmode so we can index with it. */
11157 index
= convert_to_mode (Pmode
, index
, 1);
11159 /* Don't let a MEM slip thru, because then INDEX that comes
11160 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11161 and break_out_memory_refs will go to work on it and mess it up. */
11162 #ifdef PIC_CASE_VECTOR_ADDRESS
11163 if (flag_pic
&& GET_CODE (index
) != REG
)
11164 index
= copy_to_mode_reg (Pmode
, index
);
11167 /* If flag_force_addr were to affect this address
11168 it could interfere with the tricky assumptions made
11169 about addresses that contain label-refs,
11170 which may be valid only very near the tablejump itself. */
11171 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11172 GET_MODE_SIZE, because this indicates how large insns are. The other
11173 uses should all be Pmode, because they are addresses. This code
11174 could fail if addresses and insns are not the same size. */
11175 index
= gen_rtx (PLUS
, Pmode
,
11176 gen_rtx (MULT
, Pmode
, index
,
11177 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
11178 gen_rtx (LABEL_REF
, Pmode
, table_label
));
11179 #ifdef PIC_CASE_VECTOR_ADDRESS
11181 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11184 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
11185 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11186 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
11187 RTX_UNCHANGING_P (vector
) = 1;
11188 convert_move (temp
, vector
, 0);
11190 emit_jump_insn (gen_tablejump (temp
, table_label
));
11192 #ifndef CASE_VECTOR_PC_RELATIVE
11193 /* If we are generating PIC code or if the table is PC-relative, the
11194 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11200 #endif /* HAVE_tablejump */
11203 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11204 to that value is on the top of the stack. The resulting type is TYPE, and
11205 the source declaration is DECL. */
11208 bc_load_memory (type
, decl
)
11211 enum bytecode_opcode opcode
;
11214 /* Bit fields are special. We only know about signed and
11215 unsigned ints, and enums. The latter are treated as
11216 signed integers. */
11218 if (DECL_BIT_FIELD (decl
))
11219 if (TREE_CODE (type
) == ENUMERAL_TYPE
11220 || TREE_CODE (type
) == INTEGER_TYPE
)
11221 opcode
= TREE_UNSIGNED (type
) ? zxloadBI
: sxloadBI
;
11225 /* See corresponding comment in bc_store_memory(). */
11226 if (TYPE_MODE (type
) == BLKmode
11227 || TYPE_MODE (type
) == VOIDmode
)
11230 opcode
= mode_to_load_map
[(int) TYPE_MODE (type
)];
11232 if (opcode
== neverneverland
)
11235 bc_emit_bytecode (opcode
);
11237 #ifdef DEBUG_PRINT_CODE
11238 fputc ('\n', stderr
);
11243 /* Store the contents of the second stack slot to the address in the
11244 top stack slot. DECL is the declaration of the destination and is used
11245 to determine whether we're dealing with a bitfield. */
11248 bc_store_memory (type
, decl
)
11251 enum bytecode_opcode opcode
;
11254 if (DECL_BIT_FIELD (decl
))
11256 if (TREE_CODE (type
) == ENUMERAL_TYPE
11257 || TREE_CODE (type
) == INTEGER_TYPE
)
11263 if (TYPE_MODE (type
) == BLKmode
)
11265 /* Copy structure. This expands to a block copy instruction, storeBLK.
11266 In addition to the arguments expected by the other store instructions,
11267 it also expects a type size (SImode) on top of the stack, which is the
11268 structure size in size units (usually bytes). The two first arguments
11269 are already on the stack; so we just put the size on level 1. For some
11270 other languages, the size may be variable, this is why we don't encode
11271 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11273 bc_expand_expr (TYPE_SIZE (type
));
11277 opcode
= mode_to_store_map
[(int) TYPE_MODE (type
)];
11279 if (opcode
== neverneverland
)
11282 bc_emit_bytecode (opcode
);
11284 #ifdef DEBUG_PRINT_CODE
11285 fputc ('\n', stderr
);
11290 /* Allocate local stack space sufficient to hold a value of the given
11291 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11292 integral power of 2. A special case is locals of type VOID, which
11293 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11294 remapped into the corresponding attribute of SI. */
11297 bc_allocate_local (size
, alignment
)
11298 int size
, alignment
;
11301 int byte_alignment
;
11306 /* Normalize size and alignment */
11308 size
= UNITS_PER_WORD
;
11310 if (alignment
< BITS_PER_UNIT
)
11311 byte_alignment
= 1 << (INT_ALIGN
- 1);
11314 byte_alignment
= alignment
/ BITS_PER_UNIT
;
11316 if (local_vars_size
& (byte_alignment
- 1))
11317 local_vars_size
+= byte_alignment
- (local_vars_size
& (byte_alignment
- 1));
11319 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11320 local_vars_size
+= size
;
11326 /* Allocate variable-sized local array. Variable-sized arrays are
11327 actually pointers to the address in memory where they are stored. */
11330 bc_allocate_variable_array (size
)
11334 const int ptralign
= (1 << (PTR_ALIGN
- 1));
11336 /* Align pointer */
11337 if (local_vars_size
& ptralign
)
11338 local_vars_size
+= ptralign
- (local_vars_size
& ptralign
);
11340 /* Note down local space needed: pointer to block; also return
11343 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11344 local_vars_size
+= POINTER_SIZE
/ BITS_PER_UNIT
;
11349 /* Push the machine address for the given external variable offset. */
11352 bc_load_externaddr (externaddr
)
11355 bc_emit_bytecode (constP
);
11356 bc_emit_code_labelref (BYTECODE_LABEL (externaddr
),
11357 BYTECODE_BC_LABEL (externaddr
)->offset
);
11359 #ifdef DEBUG_PRINT_CODE
11360 fputc ('\n', stderr
);
11365 /* Like above, but expects an IDENTIFIER. */
11368 bc_load_externaddr_id (id
, offset
)
11372 if (!IDENTIFIER_POINTER (id
))
11375 bc_emit_bytecode (constP
);
11376 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id
)), offset
);
11378 #ifdef DEBUG_PRINT_CODE
11379 fputc ('\n', stderr
);
11384 /* Push the machine address for the given local variable offset. */
11387 bc_load_localaddr (localaddr
)
11390 bc_emit_instruction (localP
, (HOST_WIDE_INT
) BYTECODE_BC_LABEL (localaddr
)->offset
);
11394 /* Push the machine address for the given parameter offset.
11395 NOTE: offset is in bits. */
11398 bc_load_parmaddr (parmaddr
)
11401 bc_emit_instruction (argP
, ((HOST_WIDE_INT
) BYTECODE_BC_LABEL (parmaddr
)->offset
11406 /* Convert a[i] into *(a + i). */
11409 bc_canonicalize_array_ref (exp
)
11412 tree type
= TREE_TYPE (exp
);
11413 tree array_adr
= build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
),
11414 TREE_OPERAND (exp
, 0));
11415 tree index
= TREE_OPERAND (exp
, 1);
11418 /* Convert the integer argument to a type the same size as a pointer
11419 so the multiply won't overflow spuriously. */
11421 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
11422 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
11424 /* The array address isn't volatile even if the array is.
11425 (Of course this isn't terribly relevant since the bytecode
11426 translator treats nearly everything as volatile anyway.) */
11427 TREE_THIS_VOLATILE (array_adr
) = 0;
11429 return build1 (INDIRECT_REF
, type
,
11430 fold (build (PLUS_EXPR
,
11431 TYPE_POINTER_TO (type
),
11433 fold (build (MULT_EXPR
,
11434 TYPE_POINTER_TO (type
),
11436 size_in_bytes (type
))))));
11440 /* Load the address of the component referenced by the given
11441 COMPONENT_REF expression.
11443 Returns innermost lvalue. */
11446 bc_expand_component_address (exp
)
11450 enum machine_mode mode
;
11452 HOST_WIDE_INT SIval
;
11455 tem
= TREE_OPERAND (exp
, 1);
11456 mode
= DECL_MODE (tem
);
11459 /* Compute cumulative bit offset for nested component refs
11460 and array refs, and find the ultimate containing object. */
11462 for (tem
= exp
;; tem
= TREE_OPERAND (tem
, 0))
11464 if (TREE_CODE (tem
) == COMPONENT_REF
)
11465 bitpos
+= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem
, 1)));
11467 if (TREE_CODE (tem
) == ARRAY_REF
11468 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11469 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
))) == INTEGER_CST
)
11471 bitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1))
11472 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)))
11473 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11478 bc_expand_expr (tem
);
11481 /* For bitfields also push their offset and size */
11482 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
11483 bc_push_offset_and_size (bitpos
, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp
, 1)));
11485 if (SIval
= bitpos
/ BITS_PER_UNIT
)
11486 bc_emit_instruction (addconstPSI
, SIval
);
11488 return (TREE_OPERAND (exp
, 1));
11492 /* Emit code to push two SI constants */
11495 bc_push_offset_and_size (offset
, size
)
11496 HOST_WIDE_INT offset
, size
;
11498 bc_emit_instruction (constSI
, offset
);
11499 bc_emit_instruction (constSI
, size
);
11503 /* Emit byte code to push the address of the given lvalue expression to
11504 the stack. If it's a bit field, we also push offset and size info.
11506 Returns innermost component, which allows us to determine not only
11507 its type, but also whether it's a bitfield. */
11510 bc_expand_address (exp
)
11514 if (!exp
|| TREE_CODE (exp
) == ERROR_MARK
)
11518 switch (TREE_CODE (exp
))
11522 return (bc_expand_address (bc_canonicalize_array_ref (exp
)));
11524 case COMPONENT_REF
:
11526 return (bc_expand_component_address (exp
));
11530 bc_expand_expr (TREE_OPERAND (exp
, 0));
11532 /* For variable-sized types: retrieve pointer. Sometimes the
11533 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11534 also make sure we have an operand, just in case... */
11536 if (TREE_OPERAND (exp
, 0)
11537 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
11538 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))) != INTEGER_CST
)
11539 bc_emit_instruction (loadP
);
11541 /* If packed, also return offset and size */
11542 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 0)))
11544 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 0))),
11545 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp
, 0))));
11547 return (TREE_OPERAND (exp
, 0));
11549 case FUNCTION_DECL
:
11551 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11552 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
11557 bc_load_parmaddr (DECL_RTL (exp
));
11559 /* For variable-sized types: retrieve pointer */
11560 if (TYPE_SIZE (TREE_TYPE (exp
))
11561 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11562 bc_emit_instruction (loadP
);
11564 /* If packed, also return offset and size */
11565 if (DECL_BIT_FIELD (exp
))
11566 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11567 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11573 bc_emit_instruction (returnP
);
11579 if (BYTECODE_LABEL (DECL_RTL (exp
)))
11580 bc_load_externaddr (DECL_RTL (exp
));
11583 if (DECL_EXTERNAL (exp
))
11584 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11585 (BYTECODE_BC_LABEL (DECL_RTL (exp
)))->offset
);
11587 bc_load_localaddr (DECL_RTL (exp
));
11589 /* For variable-sized types: retrieve pointer */
11590 if (TYPE_SIZE (TREE_TYPE (exp
))
11591 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11592 bc_emit_instruction (loadP
);
11594 /* If packed, also return offset and size */
11595 if (DECL_BIT_FIELD (exp
))
11596 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11597 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11605 bc_emit_bytecode (constP
);
11606 r
= output_constant_def (exp
);
11607 bc_emit_code_labelref (BYTECODE_LABEL (r
), BYTECODE_BC_LABEL (r
)->offset
);
11609 #ifdef DEBUG_PRINT_CODE
11610 fputc ('\n', stderr
);
11621 /* Most lvalues don't have components. */
11626 /* Emit a type code to be used by the runtime support in handling
11627 parameter passing. The type code consists of the machine mode
11628 plus the minimal alignment shifted left 8 bits. */
11631 bc_runtime_type_code (type
)
11636 switch (TREE_CODE (type
))
11642 case ENUMERAL_TYPE
:
11646 val
= (int) TYPE_MODE (type
) | TYPE_ALIGN (type
) << 8;
11658 return build_int_2 (val
, 0);
11662 /* Generate constructor label */
11665 bc_gen_constr_label ()
11667 static int label_counter
;
11668 static char label
[20];
11670 sprintf (label
, "*LR%d", label_counter
++);
11672 return (obstack_copy0 (&permanent_obstack
, label
, strlen (label
)));
11676 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11677 expand the constructor data as static data, and push a pointer to it.
11678 The pointer is put in the pointer table and is retrieved by a constP
11679 bytecode instruction. We then loop and store each constructor member in
11680 the corresponding component. Finally, we return the original pointer on
11684 bc_expand_constructor (constr
)
11688 HOST_WIDE_INT ptroffs
;
11692 /* Literal constructors are handled as constants, whereas
11693 non-literals are evaluated and stored element by element
11694 into the data segment. */
11696 /* Allocate space in proper segment and push pointer to space on stack.
11699 l
= bc_gen_constr_label ();
11701 if (TREE_CONSTANT (constr
))
11705 bc_emit_const_labeldef (l
);
11706 bc_output_constructor (constr
, int_size_in_bytes (TREE_TYPE (constr
)));
11712 bc_emit_data_labeldef (l
);
11713 bc_output_data_constructor (constr
);
11717 /* Add reference to pointer table and recall pointer to stack;
11718 this code is common for both types of constructors: literals
11719 and non-literals. */
11721 ptroffs
= bc_define_pointer (l
);
11722 bc_emit_instruction (constP
, ptroffs
);
11724 /* This is all that has to be done if it's a literal. */
11725 if (TREE_CONSTANT (constr
))
11729 /* At this point, we have the pointer to the structure on top of the stack.
11730 Generate sequences of store_memory calls for the constructor. */
11732 /* constructor type is structure */
11733 if (TREE_CODE (TREE_TYPE (constr
)) == RECORD_TYPE
)
11737 /* If the constructor has fewer fields than the structure,
11738 clear the whole structure first. */
11740 if (list_length (CONSTRUCTOR_ELTS (constr
))
11741 != list_length (TYPE_FIELDS (TREE_TYPE (constr
))))
11743 bc_emit_instruction (duplicate
);
11744 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11745 bc_emit_instruction (clearBLK
);
11748 /* Store each element of the constructor into the corresponding
11749 field of TARGET. */
11751 for (elt
= CONSTRUCTOR_ELTS (constr
); elt
; elt
= TREE_CHAIN (elt
))
11753 register tree field
= TREE_PURPOSE (elt
);
11754 register enum machine_mode mode
;
11759 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
)) /* * DECL_SIZE_UNIT (field) */;
11760 mode
= DECL_MODE (field
);
11761 unsignedp
= TREE_UNSIGNED (field
);
11763 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
11765 bc_store_field (elt
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11766 /* The alignment of TARGET is
11767 at least what its type requires. */
11769 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11770 int_size_in_bytes (TREE_TYPE (constr
)));
11775 /* Constructor type is array */
11776 if (TREE_CODE (TREE_TYPE (constr
)) == ARRAY_TYPE
)
11780 tree domain
= TYPE_DOMAIN (TREE_TYPE (constr
));
11781 int minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
11782 int maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
11783 tree elttype
= TREE_TYPE (TREE_TYPE (constr
));
11785 /* If the constructor has fewer fields than the structure,
11786 clear the whole structure first. */
11788 if (list_length (CONSTRUCTOR_ELTS (constr
)) < maxelt
- minelt
+ 1)
11790 bc_emit_instruction (duplicate
);
11791 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11792 bc_emit_instruction (clearBLK
);
11796 /* Store each element of the constructor into the corresponding
11797 element of TARGET, determined by counting the elements. */
11799 for (elt
= CONSTRUCTOR_ELTS (constr
), i
= 0;
11801 elt
= TREE_CHAIN (elt
), i
++)
11803 register enum machine_mode mode
;
11808 mode
= TYPE_MODE (elttype
);
11809 bitsize
= GET_MODE_BITSIZE (mode
);
11810 unsignedp
= TREE_UNSIGNED (elttype
);
11812 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
))
11813 /* * TYPE_SIZE_UNIT (elttype) */ );
11815 bc_store_field (elt
, bitsize
, bitpos
, mode
,
11816 TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11817 /* The alignment of TARGET is
11818 at least what its type requires. */
11820 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11821 int_size_in_bytes (TREE_TYPE (constr
)));
11828 /* Store the value of EXP (an expression tree) into member FIELD of
11829 structure at address on stack, which has type TYPE, mode MODE and
11830 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11833 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11834 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11837 bc_store_field (field
, bitsize
, bitpos
, mode
, exp
, type
,
11838 value_mode
, unsignedp
, align
, total_size
)
11839 int bitsize
, bitpos
;
11840 enum machine_mode mode
;
11841 tree field
, exp
, type
;
11842 enum machine_mode value_mode
;
11848 /* Expand expression and copy pointer */
11849 bc_expand_expr (exp
);
11850 bc_emit_instruction (over
);
11853 /* If the component is a bit field, we cannot use addressing to access
11854 it. Use bit-field techniques to store in it. */
11856 if (DECL_BIT_FIELD (field
))
11858 bc_store_bit_field (bitpos
, bitsize
, unsignedp
);
11862 /* Not bit field */
11864 HOST_WIDE_INT offset
= bitpos
/ BITS_PER_UNIT
;
11866 /* Advance pointer to the desired member */
11868 bc_emit_instruction (addconstPSI
, offset
);
11871 bc_store_memory (type
, field
);
11876 /* Store SI/SU in bitfield */
11879 bc_store_bit_field (offset
, size
, unsignedp
)
11880 int offset
, size
, unsignedp
;
11882 /* Push bitfield offset and size */
11883 bc_push_offset_and_size (offset
, size
);
11886 bc_emit_instruction (sstoreBI
);
11890 /* Load SI/SU from bitfield */
11893 bc_load_bit_field (offset
, size
, unsignedp
)
11894 int offset
, size
, unsignedp
;
11896 /* Push bitfield offset and size */
11897 bc_push_offset_and_size (offset
, size
);
11899 /* Load: sign-extend if signed, else zero-extend */
11900 bc_emit_instruction (unsignedp
? zxloadBI
: sxloadBI
);
11904 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11905 (adjust stack pointer upwards), negative means add that number of
11906 levels (adjust the stack pointer downwards). Only positive values
11907 normally make sense. */
11910 bc_adjust_stack (nlevels
)
11919 bc_emit_instruction (drop
);
11922 bc_emit_instruction (drop
);
11927 bc_emit_instruction (adjstackSI
, (HOST_WIDE_INT
) nlevels
);
11928 stack_depth
-= nlevels
;
11931 #if defined (VALIDATE_STACK_FOR_BC)
11932 VALIDATE_STACK_FOR_BC ();