1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-flags.h"
32 #include "insn-codes.h"
34 #include "insn-config.h"
37 #include "typeclass.h"
40 #include "bc-opcode.h"
41 #include "bc-typecd.h"
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
66 #define STACK_PUSH_CODE PRE_INC
70 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls
= 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust
;
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop
;
96 /* A list of all cleanups which belong to the arguments of
97 function calls being expanded by expand_call. */
98 tree cleanups_this_call
;
100 /* When temporaries are created by TARGET_EXPRs, they are created at
101 this level of temp_slot_level, so that they can remain allocated
102 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 int target_temp_slot_level
;
106 /* Nonzero means __builtin_saveregs has already been done in this function.
107 The value is the pseudoreg containing the value __builtin_saveregs
109 static rtx saveregs_value
;
111 /* Similarly for __builtin_apply_args. */
112 static rtx apply_args_value
;
114 /* This structure is used by move_by_pieces to describe the move to
117 struct move_by_pieces
127 int explicit_inc_from
;
134 /* This structure is used by clear_by_pieces to describe the clear to
137 struct clear_by_pieces
149 /* Used to generate bytecodes: keep track of size of local variables,
150 as well as depth of arithmetic stack. (Notice that variables are
151 stored on the machine's stack, not the arithmetic stack.) */
153 extern int local_vars_size
;
154 extern int stack_depth
;
155 extern int max_stack_depth
;
156 extern struct obstack permanent_obstack
;
157 extern rtx arg_pointer_save_area
;
159 static rtx enqueue_insn
PROTO((rtx
, rtx
));
160 static int queued_subexp_p
PROTO((rtx
));
161 static void init_queue
PROTO((void));
162 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
163 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
164 static void move_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
165 struct move_by_pieces
*));
166 static void clear_by_pieces
PROTO((rtx
, int, int));
167 static void clear_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
168 struct clear_by_pieces
*));
169 static int is_zeros_p
PROTO((tree
));
170 static int mostly_zeros_p
PROTO((tree
));
171 static void store_constructor
PROTO((tree
, rtx
, int));
172 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
173 enum machine_mode
, int, int, int));
174 static int get_inner_unaligned_p
PROTO((tree
));
175 static tree save_noncopied_parts
PROTO((tree
, tree
));
176 static tree init_noncopied_parts
PROTO((tree
, tree
));
177 static int safe_from_p
PROTO((rtx
, tree
));
178 static int fixed_type_p
PROTO((tree
));
179 static int get_pointer_alignment
PROTO((tree
, unsigned));
180 static tree string_constant
PROTO((tree
, tree
*));
181 static tree c_strlen
PROTO((tree
));
182 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
183 enum machine_mode
, int));
184 static int apply_args_size
PROTO((void));
185 static int apply_result_size
PROTO((void));
186 static rtx result_vector
PROTO((int, rtx
));
187 static rtx expand_builtin_apply_args
PROTO((void));
188 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
189 static void expand_builtin_return
PROTO((rtx
));
190 static rtx expand_increment
PROTO((tree
, int));
191 rtx bc_expand_increment
PROTO((struct increment_operator
*, tree
));
192 tree bc_runtime_type_code
PROTO((tree
));
193 rtx bc_allocate_local
PROTO((int, int));
194 void bc_store_memory
PROTO((tree
, tree
));
195 tree bc_expand_component_address
PROTO((tree
));
196 tree bc_expand_address
PROTO((tree
));
197 void bc_expand_constructor
PROTO((tree
));
198 void bc_adjust_stack
PROTO((int));
199 tree bc_canonicalize_array_ref
PROTO((tree
));
200 void bc_load_memory
PROTO((tree
, tree
));
201 void bc_load_externaddr
PROTO((rtx
));
202 void bc_load_externaddr_id
PROTO((tree
, int));
203 void bc_load_localaddr
PROTO((rtx
));
204 void bc_load_parmaddr
PROTO((rtx
));
205 static void preexpand_calls
PROTO((tree
));
206 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
207 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
208 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
209 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
210 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
211 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
212 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
213 static tree defer_cleanups_to
PROTO((tree
));
214 extern void (*interim_eh_hook
) PROTO((tree
));
215 extern tree truthvalue_conversion
PROTO((tree
));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load
[NUM_MACHINE_MODES
];
222 static char direct_store
[NUM_MACHINE_MODES
];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map
[MAX_MACHINE_MODE
];
259 enum bytecode_opcode mode_to_load_map
[MAX_MACHINE_MODE
];
260 enum bytecode_opcode mode_to_store_map
[MAX_MACHINE_MODE
];
262 /* Initialize maps used to convert modes to const, load, and store
265 bc_init_mode_to_opcode_maps ()
269 for (mode
= 0; mode
< (int) MAX_MACHINE_MODE
; mode
++)
270 mode_to_const_map
[mode
] =
271 mode_to_load_map
[mode
] =
272 mode_to_store_map
[mode
] = neverneverland
;
274 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
275 mode_to_const_map[(int) SYM] = CONST; \
276 mode_to_load_map[(int) SYM] = LOAD; \
277 mode_to_store_map[(int) SYM] = STORE;
279 #include "modemap.def"
283 /* This is run once per compilation to set up which modes can be used
284 directly in memory and to initialize the block move optab. */
290 enum machine_mode mode
;
291 /* Try indexing by frame ptr and try by stack ptr.
292 It is known that on the Convex the stack ptr isn't a valid index.
293 With luck, one or the other is valid on any machine. */
294 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
295 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
298 insn
= emit_insn (gen_rtx (SET
, 0, 0));
299 pat
= PATTERN (insn
);
301 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
302 mode
= (enum machine_mode
) ((int) mode
+ 1))
308 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
309 PUT_MODE (mem
, mode
);
310 PUT_MODE (mem1
, mode
);
312 /* See if there is some register that can be used in this mode and
313 directly loaded or stored from memory. */
315 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
316 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
317 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
320 if (! HARD_REGNO_MODE_OK (regno
, mode
))
323 reg
= gen_rtx (REG
, mode
, regno
);
326 SET_DEST (pat
) = reg
;
327 if (recog (pat
, insn
, &num_clobbers
) >= 0)
328 direct_load
[(int) mode
] = 1;
330 SET_SRC (pat
) = mem1
;
331 SET_DEST (pat
) = reg
;
332 if (recog (pat
, insn
, &num_clobbers
) >= 0)
333 direct_load
[(int) mode
] = 1;
336 SET_DEST (pat
) = mem
;
337 if (recog (pat
, insn
, &num_clobbers
) >= 0)
338 direct_store
[(int) mode
] = 1;
341 SET_DEST (pat
) = mem1
;
342 if (recog (pat
, insn
, &num_clobbers
) >= 0)
343 direct_store
[(int) mode
] = 1;
350 /* This is run at the start of compiling a function. */
357 pending_stack_adjust
= 0;
358 inhibit_defer_pop
= 0;
359 cleanups_this_call
= 0;
361 apply_args_value
= 0;
365 /* Save all variables describing the current status into the structure *P.
366 This is used before starting a nested function. */
372 /* Instead of saving the postincrement queue, empty it. */
375 p
->pending_stack_adjust
= pending_stack_adjust
;
376 p
->inhibit_defer_pop
= inhibit_defer_pop
;
377 p
->cleanups_this_call
= cleanups_this_call
;
378 p
->saveregs_value
= saveregs_value
;
379 p
->apply_args_value
= apply_args_value
;
380 p
->forced_labels
= forced_labels
;
382 pending_stack_adjust
= 0;
383 inhibit_defer_pop
= 0;
384 cleanups_this_call
= 0;
386 apply_args_value
= 0;
390 /* Restore all variables describing the current status from the structure *P.
391 This is used after a nested function. */
394 restore_expr_status (p
)
397 pending_stack_adjust
= p
->pending_stack_adjust
;
398 inhibit_defer_pop
= p
->inhibit_defer_pop
;
399 cleanups_this_call
= p
->cleanups_this_call
;
400 saveregs_value
= p
->saveregs_value
;
401 apply_args_value
= p
->apply_args_value
;
402 forced_labels
= p
->forced_labels
;
405 /* Manage the queue of increment instructions to be output
406 for POSTINCREMENT_EXPR expressions, etc. */
408 static rtx pending_chain
;
410 /* Queue up to increment (or change) VAR later. BODY says how:
411 BODY should be the same thing you would pass to emit_insn
412 to increment right away. It will go to emit_insn later on.
414 The value is a QUEUED expression to be used in place of VAR
415 where you want to guarantee the pre-incrementation value of VAR. */
418 enqueue_insn (var
, body
)
421 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
422 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
423 return pending_chain
;
426 /* Use protect_from_queue to convert a QUEUED expression
427 into something that you can put immediately into an instruction.
428 If the queued incrementation has not happened yet,
429 protect_from_queue returns the variable itself.
430 If the incrementation has happened, protect_from_queue returns a temp
431 that contains a copy of the old value of the variable.
433 Any time an rtx which might possibly be a QUEUED is to be put
434 into an instruction, it must be passed through protect_from_queue first.
435 QUEUED expressions are not meaningful in instructions.
437 Do not pass a value through protect_from_queue and then hold
438 on to it for a while before putting it in an instruction!
439 If the queue is flushed in between, incorrect code will result. */
442 protect_from_queue (x
, modify
)
446 register RTX_CODE code
= GET_CODE (x
);
448 #if 0 /* A QUEUED can hang around after the queue is forced out. */
449 /* Shortcut for most common case. */
450 if (pending_chain
== 0)
456 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
457 use of autoincrement. Make a copy of the contents of the memory
458 location rather than a copy of the address, but not if the value is
459 of mode BLKmode. Don't modify X in place since it might be
461 if (code
== MEM
&& GET_MODE (x
) != BLKmode
462 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
464 register rtx y
= XEXP (x
, 0);
465 register rtx
new = gen_rtx (MEM
, GET_MODE (x
), QUEUED_VAR (y
));
467 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
468 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
469 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
473 register rtx temp
= gen_reg_rtx (GET_MODE (new));
474 emit_insn_before (gen_move_insn (temp
, new),
480 /* Otherwise, recursively protect the subexpressions of all
481 the kinds of rtx's that can contain a QUEUED. */
484 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
485 if (tem
!= XEXP (x
, 0))
491 else if (code
== PLUS
|| code
== MULT
)
493 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
494 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
495 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
504 /* If the increment has not happened, use the variable itself. */
505 if (QUEUED_INSN (x
) == 0)
506 return QUEUED_VAR (x
);
507 /* If the increment has happened and a pre-increment copy exists,
509 if (QUEUED_COPY (x
) != 0)
510 return QUEUED_COPY (x
);
511 /* The increment has happened but we haven't set up a pre-increment copy.
512 Set one up now, and use it. */
513 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
514 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
516 return QUEUED_COPY (x
);
519 /* Return nonzero if X contains a QUEUED expression:
520 if it contains anything that will be altered by a queued increment.
521 We handle only combinations of MEM, PLUS, MINUS and MULT operators
522 since memory addresses generally contain only those. */
528 register enum rtx_code code
= GET_CODE (x
);
534 return queued_subexp_p (XEXP (x
, 0));
538 return queued_subexp_p (XEXP (x
, 0))
539 || queued_subexp_p (XEXP (x
, 1));
544 /* Perform all the pending incrementations. */
550 while (p
= pending_chain
)
552 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
553 pending_chain
= QUEUED_NEXT (p
);
564 /* Copy data from FROM to TO, where the machine modes are not the same.
565 Both modes may be integer, or both may be floating.
566 UNSIGNEDP should be nonzero if FROM is an unsigned type.
567 This causes zero-extension instead of sign-extension. */
570 convert_move (to
, from
, unsignedp
)
571 register rtx to
, from
;
574 enum machine_mode to_mode
= GET_MODE (to
);
575 enum machine_mode from_mode
= GET_MODE (from
);
576 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
577 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
581 /* rtx code for making an equivalent value. */
582 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
584 to
= protect_from_queue (to
, 1);
585 from
= protect_from_queue (from
, 0);
587 if (to_real
!= from_real
)
590 /* If FROM is a SUBREG that indicates that we have already done at least
591 the required extension, strip it. We don't handle such SUBREGs as
594 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
595 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
596 >= GET_MODE_SIZE (to_mode
))
597 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
598 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
600 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
603 if (to_mode
== from_mode
604 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
606 emit_move_insn (to
, from
);
614 #ifdef HAVE_extendqfhf2
615 if (HAVE_extendqfsf2
&& from_mode
== QFmode
&& to_mode
== HFmode
)
617 emit_unop_insn (CODE_FOR_extendqfsf2
, to
, from
, UNKNOWN
);
621 #ifdef HAVE_extendqfsf2
622 if (HAVE_extendqfsf2
&& from_mode
== QFmode
&& to_mode
== SFmode
)
624 emit_unop_insn (CODE_FOR_extendqfsf2
, to
, from
, UNKNOWN
);
628 #ifdef HAVE_extendqfdf2
629 if (HAVE_extendqfdf2
&& from_mode
== QFmode
&& to_mode
== DFmode
)
631 emit_unop_insn (CODE_FOR_extendqfdf2
, to
, from
, UNKNOWN
);
635 #ifdef HAVE_extendqfxf2
636 if (HAVE_extendqfxf2
&& from_mode
== QFmode
&& to_mode
== XFmode
)
638 emit_unop_insn (CODE_FOR_extendqfxf2
, to
, from
, UNKNOWN
);
642 #ifdef HAVE_extendqftf2
643 if (HAVE_extendqftf2
&& from_mode
== QFmode
&& to_mode
== TFmode
)
645 emit_unop_insn (CODE_FOR_extendqftf2
, to
, from
, UNKNOWN
);
650 #ifdef HAVE_extendhftqf2
651 if (HAVE_extendhftqf2
&& from_mode
== HFmode
&& to_mode
== TQFmode
)
653 emit_unop_insn (CODE_FOR_extendhftqf2
, to
, from
, UNKNOWN
);
658 #ifdef HAVE_extendhfsf2
659 if (HAVE_extendhfsf2
&& from_mode
== HFmode
&& to_mode
== SFmode
)
661 emit_unop_insn (CODE_FOR_extendhfsf2
, to
, from
, UNKNOWN
);
665 #ifdef HAVE_extendhfdf2
666 if (HAVE_extendhfdf2
&& from_mode
== HFmode
&& to_mode
== DFmode
)
668 emit_unop_insn (CODE_FOR_extendhfdf2
, to
, from
, UNKNOWN
);
672 #ifdef HAVE_extendhfxf2
673 if (HAVE_extendhfxf2
&& from_mode
== HFmode
&& to_mode
== XFmode
)
675 emit_unop_insn (CODE_FOR_extendhfxf2
, to
, from
, UNKNOWN
);
679 #ifdef HAVE_extendhftf2
680 if (HAVE_extendhftf2
&& from_mode
== HFmode
&& to_mode
== TFmode
)
682 emit_unop_insn (CODE_FOR_extendhftf2
, to
, from
, UNKNOWN
);
687 #ifdef HAVE_extendsfdf2
688 if (HAVE_extendsfdf2
&& from_mode
== SFmode
&& to_mode
== DFmode
)
690 emit_unop_insn (CODE_FOR_extendsfdf2
, to
, from
, UNKNOWN
);
694 #ifdef HAVE_extendsfxf2
695 if (HAVE_extendsfxf2
&& from_mode
== SFmode
&& to_mode
== XFmode
)
697 emit_unop_insn (CODE_FOR_extendsfxf2
, to
, from
, UNKNOWN
);
701 #ifdef HAVE_extendsftf2
702 if (HAVE_extendsftf2
&& from_mode
== SFmode
&& to_mode
== TFmode
)
704 emit_unop_insn (CODE_FOR_extendsftf2
, to
, from
, UNKNOWN
);
708 #ifdef HAVE_extenddfxf2
709 if (HAVE_extenddfxf2
&& from_mode
== DFmode
&& to_mode
== XFmode
)
711 emit_unop_insn (CODE_FOR_extenddfxf2
, to
, from
, UNKNOWN
);
715 #ifdef HAVE_extenddftf2
716 if (HAVE_extenddftf2
&& from_mode
== DFmode
&& to_mode
== TFmode
)
718 emit_unop_insn (CODE_FOR_extenddftf2
, to
, from
, UNKNOWN
);
723 #ifdef HAVE_trunchfqf2
724 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
726 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
730 #ifdef HAVE_truncsfqf2
731 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
733 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
737 #ifdef HAVE_truncdfqf2
738 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
740 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
744 #ifdef HAVE_truncxfqf2
745 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
747 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
751 #ifdef HAVE_trunctfqf2
752 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
754 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
759 #ifdef HAVE_trunctqfhf2
760 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
762 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
766 #ifdef HAVE_truncsfhf2
767 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
769 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
773 #ifdef HAVE_truncdfhf2
774 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
776 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
780 #ifdef HAVE_truncxfhf2
781 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
783 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
787 #ifdef HAVE_trunctfhf2
788 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
790 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
794 #ifdef HAVE_truncdfsf2
795 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
797 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
801 #ifdef HAVE_truncxfsf2
802 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
804 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
808 #ifdef HAVE_trunctfsf2
809 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
811 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
815 #ifdef HAVE_truncxfdf2
816 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
818 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
822 #ifdef HAVE_trunctfdf2
823 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
825 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
837 libcall
= extendsfdf2_libfunc
;
841 libcall
= extendsfxf2_libfunc
;
845 libcall
= extendsftf2_libfunc
;
854 libcall
= truncdfsf2_libfunc
;
858 libcall
= extenddfxf2_libfunc
;
862 libcall
= extenddftf2_libfunc
;
871 libcall
= truncxfsf2_libfunc
;
875 libcall
= truncxfdf2_libfunc
;
884 libcall
= trunctfsf2_libfunc
;
888 libcall
= trunctfdf2_libfunc
;
894 if (libcall
== (rtx
) 0)
895 /* This conversion is not implemented yet. */
898 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
900 emit_move_insn (to
, value
);
904 /* Now both modes are integers. */
906 /* Handle expanding beyond a word. */
907 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
908 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
915 enum machine_mode lowpart_mode
;
916 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
918 /* Try converting directly if the insn is supported. */
919 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
922 /* If FROM is a SUBREG, put it into a register. Do this
923 so that we always generate the same set of insns for
924 better cse'ing; if an intermediate assignment occurred,
925 we won't be doing the operation directly on the SUBREG. */
926 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
927 from
= force_reg (from_mode
, from
);
928 emit_unop_insn (code
, to
, from
, equiv_code
);
931 /* Next, try converting via full word. */
932 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
933 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
934 != CODE_FOR_nothing
))
936 if (GET_CODE (to
) == REG
)
937 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, to
));
938 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
939 emit_unop_insn (code
, to
,
940 gen_lowpart (word_mode
, to
), equiv_code
);
944 /* No special multiword conversion insn; do it by hand. */
947 /* Since we will turn this into a no conflict block, we must ensure
948 that the source does not overlap the target. */
950 if (reg_overlap_mentioned_p (to
, from
))
951 from
= force_reg (from_mode
, from
);
953 /* Get a copy of FROM widened to a word, if necessary. */
954 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
955 lowpart_mode
= word_mode
;
957 lowpart_mode
= from_mode
;
959 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
961 lowpart
= gen_lowpart (lowpart_mode
, to
);
962 emit_move_insn (lowpart
, lowfrom
);
964 /* Compute the value to put in each remaining word. */
966 fill_value
= const0_rtx
;
971 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
972 && STORE_FLAG_VALUE
== -1)
974 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
976 fill_value
= gen_reg_rtx (word_mode
);
977 emit_insn (gen_slt (fill_value
));
983 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
984 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
986 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
990 /* Fill the remaining words. */
991 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
993 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
994 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
999 if (fill_value
!= subword
)
1000 emit_move_insn (subword
, fill_value
);
1003 insns
= get_insns ();
1006 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
1007 gen_rtx (equiv_code
, to_mode
, copy_rtx (from
)));
1011 /* Truncating multi-word to a word or less. */
1012 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
1013 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
1015 if (!((GET_CODE (from
) == MEM
1016 && ! MEM_VOLATILE_P (from
)
1017 && direct_load
[(int) to_mode
]
1018 && ! mode_dependent_address_p (XEXP (from
, 0)))
1019 || GET_CODE (from
) == REG
1020 || GET_CODE (from
) == SUBREG
))
1021 from
= force_reg (from_mode
, from
);
1022 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
1026 /* Handle pointer conversion */ /* SPEE 900220 */
1027 if (to_mode
== PSImode
)
1029 if (from_mode
!= SImode
)
1030 from
= convert_to_mode (SImode
, from
, unsignedp
);
1032 #ifdef HAVE_truncsipsi2
1033 if (HAVE_truncsipsi2
)
1035 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1038 #endif /* HAVE_truncsipsi2 */
1042 if (from_mode
== PSImode
)
1044 if (to_mode
!= SImode
)
1046 from
= convert_to_mode (SImode
, from
, unsignedp
);
1051 #ifdef HAVE_extendpsisi2
1052 if (HAVE_extendpsisi2
)
1054 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1057 #endif /* HAVE_extendpsisi2 */
1062 if (to_mode
== PDImode
)
1064 if (from_mode
!= DImode
)
1065 from
= convert_to_mode (DImode
, from
, unsignedp
);
1067 #ifdef HAVE_truncdipdi2
1068 if (HAVE_truncdipdi2
)
1070 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1073 #endif /* HAVE_truncdipdi2 */
1077 if (from_mode
== PDImode
)
1079 if (to_mode
!= DImode
)
1081 from
= convert_to_mode (DImode
, from
, unsignedp
);
1086 #ifdef HAVE_extendpdidi2
1087 if (HAVE_extendpdidi2
)
1089 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1092 #endif /* HAVE_extendpdidi2 */
1097 /* Now follow all the conversions between integers
1098 no more than a word long. */
1100 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1101 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1102 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1103 GET_MODE_BITSIZE (from_mode
)))
1105 if (!((GET_CODE (from
) == MEM
1106 && ! MEM_VOLATILE_P (from
)
1107 && direct_load
[(int) to_mode
]
1108 && ! mode_dependent_address_p (XEXP (from
, 0)))
1109 || GET_CODE (from
) == REG
1110 || GET_CODE (from
) == SUBREG
))
1111 from
= force_reg (from_mode
, from
);
1112 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1113 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1114 from
= copy_to_reg (from
);
1115 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1119 /* Handle extension. */
1120 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1122 /* Convert directly if that works. */
1123 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1124 != CODE_FOR_nothing
)
1126 emit_unop_insn (code
, to
, from
, equiv_code
);
1131 enum machine_mode intermediate
;
1133 /* Search for a mode to convert via. */
1134 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1135 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1136 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1137 != CODE_FOR_nothing
)
1138 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1139 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1140 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1141 != CODE_FOR_nothing
))
1143 convert_move (to
, convert_to_mode (intermediate
, from
,
1144 unsignedp
), unsignedp
);
1148 /* No suitable intermediate mode. */
1153 /* Support special truncate insns for certain modes. */
1155 if (from_mode
== DImode
&& to_mode
== SImode
)
1157 #ifdef HAVE_truncdisi2
1158 if (HAVE_truncdisi2
)
1160 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1164 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1168 if (from_mode
== DImode
&& to_mode
== HImode
)
1170 #ifdef HAVE_truncdihi2
1171 if (HAVE_truncdihi2
)
1173 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1177 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1181 if (from_mode
== DImode
&& to_mode
== QImode
)
1183 #ifdef HAVE_truncdiqi2
1184 if (HAVE_truncdiqi2
)
1186 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1190 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1194 if (from_mode
== SImode
&& to_mode
== HImode
)
1196 #ifdef HAVE_truncsihi2
1197 if (HAVE_truncsihi2
)
1199 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1203 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1207 if (from_mode
== SImode
&& to_mode
== QImode
)
1209 #ifdef HAVE_truncsiqi2
1210 if (HAVE_truncsiqi2
)
1212 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1216 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1220 if (from_mode
== HImode
&& to_mode
== QImode
)
1222 #ifdef HAVE_trunchiqi2
1223 if (HAVE_trunchiqi2
)
1225 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1229 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1233 if (from_mode
== TImode
&& to_mode
== DImode
)
1235 #ifdef HAVE_trunctidi2
1236 if (HAVE_trunctidi2
)
1238 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1242 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1246 if (from_mode
== TImode
&& to_mode
== SImode
)
1248 #ifdef HAVE_trunctisi2
1249 if (HAVE_trunctisi2
)
1251 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1255 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1259 if (from_mode
== TImode
&& to_mode
== HImode
)
1261 #ifdef HAVE_trunctihi2
1262 if (HAVE_trunctihi2
)
1264 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1268 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1272 if (from_mode
== TImode
&& to_mode
== QImode
)
1274 #ifdef HAVE_trunctiqi2
1275 if (HAVE_trunctiqi2
)
1277 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1281 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1285 /* Handle truncation of volatile memrefs, and so on;
1286 the things that couldn't be truncated directly,
1287 and for which there was no special instruction. */
1288 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1290 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1291 emit_move_insn (to
, temp
);
1295 /* Mode combination is not recognized. */
1299 /* Return an rtx for a value that would result
1300 from converting X to mode MODE.
1301 Both X and MODE may be floating, or both integer.
1302 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
1304 or by copying to a new temporary with conversion.
1306 This function *must not* call protect_from_queue
1307 except when putting X into an insn (in which case convert_move does it). */
1310 convert_to_mode (mode
, x
, unsignedp
)
1311 enum machine_mode mode
;
1315 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1318 /* Return an rtx for a value that would result
1319 from converting X from mode OLDMODE to mode MODE.
1320 Both modes may be floating, or both integer.
1321 UNSIGNEDP is nonzero if X is an unsigned value.
1323 This can be done by referring to a part of X in place
1324 or by copying to a new temporary with conversion.
1326 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1328 This function *must not* call protect_from_queue
1329 except when putting X into an insn (in which case convert_move does it). */
1332 convert_modes (mode
, oldmode
, x
, unsignedp
)
1333 enum machine_mode mode
, oldmode
;
1339 /* If FROM is a SUBREG that indicates that we have already done at least
1340 the required extension, strip it. */
1342 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1343 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1344 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1345 x
= gen_lowpart (mode
, x
);
1347 if (GET_MODE (x
) != VOIDmode
)
1348 oldmode
= GET_MODE (x
);
1350 if (mode
== oldmode
)
1353 /* There is one case that we must handle specially: If we are converting
1354 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1355 we are to interpret the constant as unsigned, gen_lowpart will do
1356 the wrong if the constant appears negative. What we want to do is
1357 make the high-order word of the constant zero, not all ones. */
1359 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1360 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1361 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1362 return immed_double_const (INTVAL (x
), (HOST_WIDE_INT
) 0, mode
);
1364 /* We can do this with a gen_lowpart if both desired and current modes
1365 are integer, and this is either a constant integer, a register, or a
1366 non-volatile MEM. Except for the constant case where MODE is no
1367 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1369 if ((GET_CODE (x
) == CONST_INT
1370 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1371 || (GET_MODE_CLASS (mode
) == MODE_INT
1372 && GET_MODE_CLASS (oldmode
) == MODE_INT
1373 && (GET_CODE (x
) == CONST_DOUBLE
1374 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1375 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1376 && direct_load
[(int) mode
])
1377 || (GET_CODE (x
) == REG
1378 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1379 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1381 /* ?? If we don't know OLDMODE, we have to assume here that
1382 X does not need sign- or zero-extension. This may not be
1383 the case, but it's the best we can do. */
1384 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1385 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1387 HOST_WIDE_INT val
= INTVAL (x
);
1388 int width
= GET_MODE_BITSIZE (oldmode
);
1390 /* We must sign or zero-extend in this case. Start by
1391 zero-extending, then sign extend if we need to. */
1392 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1394 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1395 val
|= (HOST_WIDE_INT
) (-1) << width
;
1397 return GEN_INT (val
);
1400 return gen_lowpart (mode
, x
);
1403 temp
= gen_reg_rtx (mode
);
1404 convert_move (temp
, x
, unsignedp
);
1408 /* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN (in bytes) is maximum alignment we can assume. */
1415 move_by_pieces (to
, from
, len
, align
)
1419 struct move_by_pieces data
;
1420 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1421 int max_size
= MOVE_MAX
+ 1;
1424 data
.to_addr
= to_addr
;
1425 data
.from_addr
= from_addr
;
1429 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1430 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1432 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1433 || GET_CODE (from_addr
) == POST_INC
1434 || GET_CODE (from_addr
) == POST_DEC
);
1436 data
.explicit_inc_from
= 0;
1437 data
.explicit_inc_to
= 0;
1439 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1440 if (data
.reverse
) data
.offset
= len
;
1443 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1444 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data
.autinc_from
&& data
.autinc_to
)
1450 && move_by_pieces_ninsns (len
, align
) > 2)
1452 #ifdef HAVE_PRE_DECREMENT
1453 if (data
.reverse
&& ! data
.autinc_from
)
1455 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1456 data
.autinc_from
= 1;
1457 data
.explicit_inc_from
= -1;
1460 #ifdef HAVE_POST_INCREMENT
1461 if (! data
.autinc_from
)
1463 data
.from_addr
= copy_addr_to_reg (from_addr
);
1464 data
.autinc_from
= 1;
1465 data
.explicit_inc_from
= 1;
1468 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1469 data
.from_addr
= copy_addr_to_reg (from_addr
);
1470 #ifdef HAVE_PRE_DECREMENT
1471 if (data
.reverse
&& ! data
.autinc_to
)
1473 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1475 data
.explicit_inc_to
= -1;
1478 #ifdef HAVE_POST_INCREMENT
1479 if (! data
.reverse
&& ! data
.autinc_to
)
1481 data
.to_addr
= copy_addr_to_reg (to_addr
);
1483 data
.explicit_inc_to
= 1;
1486 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1487 data
.to_addr
= copy_addr_to_reg (to_addr
);
1490 if (! SLOW_UNALIGNED_ACCESS
1491 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1497 while (max_size
> 1)
1499 enum machine_mode mode
= VOIDmode
, tmode
;
1500 enum insn_code icode
;
1502 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1503 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1504 if (GET_MODE_SIZE (tmode
) < max_size
)
1507 if (mode
== VOIDmode
)
1510 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1511 if (icode
!= CODE_FOR_nothing
1512 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1513 GET_MODE_SIZE (mode
)))
1514 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1516 max_size
= GET_MODE_SIZE (mode
);
1519 /* The code above should have handled everything. */
1524 /* Return number of insns required to move L bytes by pieces.
1525 ALIGN (in bytes) is maximum alignment we can assume. */
1528 move_by_pieces_ninsns (l
, align
)
1532 register int n_insns
= 0;
1533 int max_size
= MOVE_MAX
+ 1;
1535 if (! SLOW_UNALIGNED_ACCESS
1536 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1539 while (max_size
> 1)
1541 enum machine_mode mode
= VOIDmode
, tmode
;
1542 enum insn_code icode
;
1544 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1545 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1546 if (GET_MODE_SIZE (tmode
) < max_size
)
1549 if (mode
== VOIDmode
)
1552 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1553 if (icode
!= CODE_FOR_nothing
1554 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1555 GET_MODE_SIZE (mode
)))
1556 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1558 max_size
= GET_MODE_SIZE (mode
);
1564 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1565 with move instructions for mode MODE. GENFUN is the gen_... function
1566 to make a move insn for that mode. DATA has all the other info. */
1569 move_by_pieces_1 (genfun
, mode
, data
)
1571 enum machine_mode mode
;
1572 struct move_by_pieces
*data
;
1574 register int size
= GET_MODE_SIZE (mode
);
1575 register rtx to1
, from1
;
1577 while (data
->len
>= size
)
1579 if (data
->reverse
) data
->offset
-= size
;
1581 to1
= (data
->autinc_to
1582 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1583 : change_address (data
->to
, mode
,
1584 plus_constant (data
->to_addr
, data
->offset
)));
1585 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1588 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1589 : change_address (data
->from
, mode
,
1590 plus_constant (data
->from_addr
, data
->offset
)));
1591 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1593 #ifdef HAVE_PRE_DECREMENT
1594 if (data
->explicit_inc_to
< 0)
1595 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1596 if (data
->explicit_inc_from
< 0)
1597 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1600 emit_insn ((*genfun
) (to1
, from1
));
1601 #ifdef HAVE_POST_INCREMENT
1602 if (data
->explicit_inc_to
> 0)
1603 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1604 if (data
->explicit_inc_from
> 0)
1605 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1608 if (! data
->reverse
) data
->offset
+= size
;
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have,
1622 measured in bytes. */
1625 emit_block_move (x
, y
, size
, align
)
1630 if (GET_MODE (x
) != BLKmode
)
1633 if (GET_MODE (y
) != BLKmode
)
1636 x
= protect_from_queue (x
, 1);
1637 y
= protect_from_queue (y
, 0);
1638 size
= protect_from_queue (size
, 0);
1640 if (GET_CODE (x
) != MEM
)
1642 if (GET_CODE (y
) != MEM
)
1647 if (GET_CODE (size
) == CONST_INT
1648 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1649 move_by_pieces (x
, y
, INTVAL (size
), align
);
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign
= GEN_INT (align
);
1657 enum machine_mode mode
;
1659 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1660 mode
= GET_MODE_WIDER_MODE (mode
))
1662 enum insn_code code
= movstr_optab
[(int) mode
];
1664 if (code
!= CODE_FOR_nothing
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
1667 returned by the macro, it will definitely be less than the
1668 actual mode mask. */
1669 && ((GET_CODE (size
) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1671 <= GET_MODE_MASK (mode
)))
1672 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1673 && (insn_operand_predicate
[(int) code
][0] == 0
1674 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1675 && (insn_operand_predicate
[(int) code
][1] == 0
1676 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1677 && (insn_operand_predicate
[(int) code
][3] == 0
1678 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1682 rtx last
= get_last_insn ();
1685 op2
= convert_to_mode (mode
, size
, 1);
1686 if (insn_operand_predicate
[(int) code
][2] != 0
1687 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1688 op2
= copy_to_mode_reg (mode
, op2
);
1690 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1697 delete_insns_since (last
);
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 emit_library_call (memcpy_libfunc
, 0,
1703 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1705 convert_to_mode (TYPE_MODE (sizetype
), size
,
1706 TREE_UNSIGNED (sizetype
)),
1707 TYPE_MODE (sizetype
));
1709 emit_library_call (bcopy_libfunc
, 0,
1710 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1712 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1713 TREE_UNSIGNED (integer_type_node
)),
1714 TYPE_MODE (integer_type_node
));
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1723 move_block_to_reg (regno
, x
, nregs
, mode
)
1727 enum machine_mode mode
;
1735 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1736 x
= validize_mem (force_const_mem (mode
, x
));
1738 /* See if the machine can do this with a load multiple insn. */
1739 #ifdef HAVE_load_multiple
1740 if (HAVE_load_multiple
)
1742 last
= get_last_insn ();
1743 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1751 delete_insns_since (last
);
1755 for (i
= 0; i
< nregs
; i
++)
1756 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1757 operand_subword_force (x
, i
, mode
));
1760 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1761 The number of registers to be filled is NREGS. SIZE indicates the number
1762 of bytes in the object X. */
1766 move_block_from_reg (regno
, x
, nregs
, size
)
1775 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1776 to the left before storing to memory. */
1777 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1779 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1785 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1786 gen_rtx (REG
, word_mode
, regno
),
1787 build_int_2 ((UNITS_PER_WORD
- size
)
1788 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1789 emit_move_insn (tem
, shift
);
1793 /* See if the machine can do this with a store multiple insn. */
1794 #ifdef HAVE_store_multiple
1795 if (HAVE_store_multiple
)
1797 last
= get_last_insn ();
1798 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1806 delete_insns_since (last
);
1810 for (i
= 0; i
< nregs
; i
++)
1812 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1817 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1821 /* Add a USE expression for REG to the (possibly empty) list pointed
1822 to by CALL_FUSAGE. REG must denote a hard register. */
1825 use_reg (call_fusage
, reg
)
1826 rtx
*call_fusage
, reg
;
1828 if (GET_CODE (reg
) != REG
1829 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1833 = gen_rtx (EXPR_LIST
, VOIDmode
,
1834 gen_rtx (USE
, VOIDmode
, reg
), *call_fusage
);
1837 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1838 starting at REGNO. All of these registers must be hard registers. */
1841 use_regs (call_fusage
, regno
, nregs
)
1848 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1851 for (i
= 0; i
< nregs
; i
++)
1852 use_reg (call_fusage
, gen_rtx (REG
, reg_raw_mode
[regno
+ i
], regno
+ i
));
1855 /* Generate several move instructions to clear LEN bytes of block TO.
1856 (A MEM rtx with BLKmode). The caller must pass TO through
1857 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1861 clear_by_pieces (to
, len
, align
)
1865 struct clear_by_pieces data
;
1866 rtx to_addr
= XEXP (to
, 0);
1867 int max_size
= MOVE_MAX
+ 1;
1870 data
.to_addr
= to_addr
;
1873 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1874 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1876 data
.explicit_inc_to
= 0;
1878 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1879 if (data
.reverse
) data
.offset
= len
;
1882 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1884 /* If copying requires more than two move insns,
1885 copy addresses to registers (to make displacements shorter)
1886 and use post-increment if available. */
1888 && move_by_pieces_ninsns (len
, align
) > 2)
1890 #ifdef HAVE_PRE_DECREMENT
1891 if (data
.reverse
&& ! data
.autinc_to
)
1893 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1895 data
.explicit_inc_to
= -1;
1898 #ifdef HAVE_POST_INCREMENT
1899 if (! data
.reverse
&& ! data
.autinc_to
)
1901 data
.to_addr
= copy_addr_to_reg (to_addr
);
1903 data
.explicit_inc_to
= 1;
1906 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1907 data
.to_addr
= copy_addr_to_reg (to_addr
);
1910 if (! SLOW_UNALIGNED_ACCESS
1911 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1914 /* First move what we can in the largest integer mode, then go to
1915 successively smaller modes. */
1917 while (max_size
> 1)
1919 enum machine_mode mode
= VOIDmode
, tmode
;
1920 enum insn_code icode
;
1922 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1923 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1924 if (GET_MODE_SIZE (tmode
) < max_size
)
1927 if (mode
== VOIDmode
)
1930 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1931 if (icode
!= CODE_FOR_nothing
1932 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1933 GET_MODE_SIZE (mode
)))
1934 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1936 max_size
= GET_MODE_SIZE (mode
);
1939 /* The code above should have handled everything. */
1944 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1945 with move instructions for mode MODE. GENFUN is the gen_... function
1946 to make a move insn for that mode. DATA has all the other info. */
1949 clear_by_pieces_1 (genfun
, mode
, data
)
1951 enum machine_mode mode
;
1952 struct clear_by_pieces
*data
;
1954 register int size
= GET_MODE_SIZE (mode
);
1957 while (data
->len
>= size
)
1959 if (data
->reverse
) data
->offset
-= size
;
1961 to1
= (data
->autinc_to
1962 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1963 : change_address (data
->to
, mode
,
1964 plus_constant (data
->to_addr
, data
->offset
)));
1965 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1967 #ifdef HAVE_PRE_DECREMENT
1968 if (data
->explicit_inc_to
< 0)
1969 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1972 emit_insn ((*genfun
) (to1
, const0_rtx
));
1973 #ifdef HAVE_POST_INCREMENT
1974 if (data
->explicit_inc_to
> 0)
1975 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1978 if (! data
->reverse
) data
->offset
+= size
;
1984 /* Write zeros through the storage of OBJECT.
1985 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
1986 the maximum alignment we can is has, measured in bytes. */
1989 clear_storage (object
, size
, align
)
1994 if (GET_MODE (object
) == BLKmode
)
1996 object
= protect_from_queue (object
, 1);
1997 size
= protect_from_queue (size
, 0);
1999 if (GET_CODE (size
) == CONST_INT
2000 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2001 clear_by_pieces (object
, INTVAL (size
), align
);
2005 /* Try the most limited insn first, because there's no point
2006 including more than one in the machine description unless
2007 the more limited one has some advantage. */
2009 rtx opalign
= GEN_INT (align
);
2010 enum machine_mode mode
;
2012 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2013 mode
= GET_MODE_WIDER_MODE (mode
))
2015 enum insn_code code
= clrstr_optab
[(int) mode
];
2017 if (code
!= CODE_FOR_nothing
2018 /* We don't need MODE to be narrower than
2019 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2020 the mode mask, as it is returned by the macro, it will
2021 definitely be less than the actual mode mask. */
2022 && ((GET_CODE (size
) == CONST_INT
2023 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2024 <= GET_MODE_MASK (mode
)))
2025 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2026 && (insn_operand_predicate
[(int) code
][0] == 0
2027 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2029 && (insn_operand_predicate
[(int) code
][2] == 0
2030 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2034 rtx last
= get_last_insn ();
2037 op1
= convert_to_mode (mode
, size
, 1);
2038 if (insn_operand_predicate
[(int) code
][1] != 0
2039 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2041 op1
= copy_to_mode_reg (mode
, op1
);
2043 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2050 delete_insns_since (last
);
2055 #ifdef TARGET_MEM_FUNCTIONS
2056 emit_library_call (memset_libfunc
, 0,
2058 XEXP (object
, 0), Pmode
,
2059 const0_rtx
, TYPE_MODE (integer_type_node
),
2060 convert_to_mode (TYPE_MODE (sizetype
),
2061 size
, TREE_UNSIGNED (sizetype
)),
2062 TYPE_MODE (sizetype
));
2064 emit_library_call (bzero_libfunc
, 0,
2066 XEXP (object
, 0), Pmode
,
2067 convert_to_mode (TYPE_MODE (integer_type_node
),
2069 TREE_UNSIGNED (integer_type_node
)),
2070 TYPE_MODE (integer_type_node
));
2075 emit_move_insn (object
, const0_rtx
);
2078 /* Generate code to copy Y into X.
2079 Both Y and X must have the same mode, except that
2080 Y can be a constant with VOIDmode.
2081 This mode cannot be BLKmode; use emit_block_move for that.
2083 Return the last instruction emitted. */
2086 emit_move_insn (x
, y
)
2089 enum machine_mode mode
= GET_MODE (x
);
2091 x
= protect_from_queue (x
, 1);
2092 y
= protect_from_queue (y
, 0);
2094 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2097 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2098 y
= force_const_mem (mode
, y
);
2100 /* If X or Y are memory references, verify that their addresses are valid
2102 if (GET_CODE (x
) == MEM
2103 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2104 && ! push_operand (x
, GET_MODE (x
)))
2106 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2107 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2109 if (GET_CODE (y
) == MEM
2110 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2112 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2113 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2115 if (mode
== BLKmode
)
2118 return emit_move_insn_1 (x
, y
);
2121 /* Low level part of emit_move_insn.
2122 Called just like emit_move_insn, but assumes X and Y
2123 are basically valid. */
2126 emit_move_insn_1 (x
, y
)
2129 enum machine_mode mode
= GET_MODE (x
);
2130 enum machine_mode submode
;
2131 enum mode_class
class = GET_MODE_CLASS (mode
);
2134 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2136 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2138 /* Expand complex moves by moving real part and imag part, if possible. */
2139 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2140 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2142 (class == MODE_COMPLEX_INT
2143 ? MODE_INT
: MODE_FLOAT
),
2145 && (mov_optab
->handlers
[(int) submode
].insn_code
2146 != CODE_FOR_nothing
))
2148 /* Don't split destination if it is a stack push. */
2149 int stack
= push_operand (x
, GET_MODE (x
));
2152 /* If this is a stack, push the highpart first, so it
2153 will be in the argument order.
2155 In that case, change_address is used only to convert
2156 the mode, not to change the address. */
2159 /* Note that the real part always precedes the imag part in memory
2160 regardless of machine's endianness. */
2161 #ifdef STACK_GROWS_DOWNWARD
2162 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2163 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2164 gen_imagpart (submode
, y
)));
2165 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2166 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2167 gen_realpart (submode
, y
)));
2169 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2170 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2171 gen_realpart (submode
, y
)));
2172 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2173 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2174 gen_imagpart (submode
, y
)));
2179 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2180 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2181 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2182 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2185 return get_last_insn ();
2188 /* This will handle any multi-word mode that lacks a move_insn pattern.
2189 However, you will get better code if you define such patterns,
2190 even if they must turn into multiple assembler instructions. */
2191 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2196 #ifdef PUSH_ROUNDING
2198 /* If X is a push on the stack, do the push now and replace
2199 X with a reference to the stack pointer. */
2200 if (push_operand (x
, GET_MODE (x
)))
2202 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2203 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2207 /* Show the output dies here. */
2209 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
2212 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2215 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2216 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2218 /* If we can't get a part of Y, put Y into memory if it is a
2219 constant. Otherwise, force it into a register. If we still
2220 can't get a part of Y, abort. */
2221 if (ypart
== 0 && CONSTANT_P (y
))
2223 y
= force_const_mem (mode
, y
);
2224 ypart
= operand_subword (y
, i
, 1, mode
);
2226 else if (ypart
== 0)
2227 ypart
= operand_subword_force (y
, i
, mode
);
2229 if (xpart
== 0 || ypart
== 0)
2232 last_insn
= emit_move_insn (xpart
, ypart
);
2241 /* Pushing data onto the stack. */
2243 /* Push a block of length SIZE (perhaps variable)
2244 and return an rtx to address the beginning of the block.
2245 Note that it is not possible for the value returned to be a QUEUED.
2246 The value may be virtual_outgoing_args_rtx.
2248 EXTRA is the number of bytes of padding to push in addition to SIZE.
2249 BELOW nonzero means this padding comes at low addresses;
2250 otherwise, the padding comes at high addresses. */
2253 push_block (size
, extra
, below
)
2259 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2260 if (CONSTANT_P (size
))
2261 anti_adjust_stack (plus_constant (size
, extra
));
2262 else if (GET_CODE (size
) == REG
&& extra
== 0)
2263 anti_adjust_stack (size
);
2266 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2268 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2269 temp
, 0, OPTAB_LIB_WIDEN
);
2270 anti_adjust_stack (temp
);
2273 #ifdef STACK_GROWS_DOWNWARD
2274 temp
= virtual_outgoing_args_rtx
;
2275 if (extra
!= 0 && below
)
2276 temp
= plus_constant (temp
, extra
);
2278 if (GET_CODE (size
) == CONST_INT
)
2279 temp
= plus_constant (virtual_outgoing_args_rtx
,
2280 - INTVAL (size
) - (below
? 0 : extra
));
2281 else if (extra
!= 0 && !below
)
2282 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2283 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2285 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2286 negate_rtx (Pmode
, size
));
2289 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2295 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2298 /* Generate code to push X onto the stack, assuming it has mode MODE and
2300 MODE is redundant except when X is a CONST_INT (since they don't
2302 SIZE is an rtx for the size of data to be copied (in bytes),
2303 needed only if X is BLKmode.
2305 ALIGN (in bytes) is maximum alignment we can assume.
2307 If PARTIAL and REG are both nonzero, then copy that many of the first
2308 words of X into registers starting with REG, and push the rest of X.
2309 The amount of space pushed is decreased by PARTIAL words,
2310 rounded *down* to a multiple of PARM_BOUNDARY.
2311 REG must be a hard register in this case.
2312 If REG is zero but PARTIAL is not, take any all others actions for an
2313 argument partially in registers, but do not actually load any
2316 EXTRA is the amount in bytes of extra space to leave next to this arg.
2317 This is ignored if an argument block has already been allocated.
2319 On a machine that lacks real push insns, ARGS_ADDR is the address of
2320 the bottom of the argument block for this call. We use indexing off there
2321 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2322 argument block has not been preallocated.
2324 ARGS_SO_FAR is the size of args previously pushed for this call. */
2327 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2328 args_addr
, args_so_far
)
2330 enum machine_mode mode
;
2341 enum direction stack_direction
2342 #ifdef STACK_GROWS_DOWNWARD
2348 /* Decide where to pad the argument: `downward' for below,
2349 `upward' for above, or `none' for don't pad it.
2350 Default is below for small data on big-endian machines; else above. */
2351 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2353 /* Invert direction if stack is post-update. */
2354 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2355 if (where_pad
!= none
)
2356 where_pad
= (where_pad
== downward
? upward
: downward
);
2358 xinner
= x
= protect_from_queue (x
, 0);
2360 if (mode
== BLKmode
)
2362 /* Copy a block into the stack, entirely or partially. */
2365 int used
= partial
* UNITS_PER_WORD
;
2366 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2374 /* USED is now the # of bytes we need not copy to the stack
2375 because registers will take care of them. */
2378 xinner
= change_address (xinner
, BLKmode
,
2379 plus_constant (XEXP (xinner
, 0), used
));
2381 /* If the partial register-part of the arg counts in its stack size,
2382 skip the part of stack space corresponding to the registers.
2383 Otherwise, start copying to the beginning of the stack space,
2384 by setting SKIP to 0. */
2385 #ifndef REG_PARM_STACK_SPACE
2391 #ifdef PUSH_ROUNDING
2392 /* Do it with several push insns if that doesn't take lots of insns
2393 and if there is no difficulty with push insns that skip bytes
2394 on the stack for alignment purposes. */
2396 && GET_CODE (size
) == CONST_INT
2398 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2400 /* Here we avoid the case of a structure whose weak alignment
2401 forces many pushes of a small amount of data,
2402 and such small pushes do rounding that causes trouble. */
2403 && ((! SLOW_UNALIGNED_ACCESS
)
2404 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2405 || PUSH_ROUNDING (align
) == align
)
2406 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2408 /* Push padding now if padding above and stack grows down,
2409 or if padding below and stack grows up.
2410 But if space already allocated, this has already been done. */
2411 if (extra
&& args_addr
== 0
2412 && where_pad
!= none
&& where_pad
!= stack_direction
)
2413 anti_adjust_stack (GEN_INT (extra
));
2415 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
2416 INTVAL (size
) - used
, align
);
2419 #endif /* PUSH_ROUNDING */
2421 /* Otherwise make space on the stack and copy the data
2422 to the address of that space. */
2424 /* Deduct words put into registers from the size we must copy. */
2427 if (GET_CODE (size
) == CONST_INT
)
2428 size
= GEN_INT (INTVAL (size
) - used
);
2430 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2431 GEN_INT (used
), NULL_RTX
, 0,
2435 /* Get the address of the stack space.
2436 In this case, we do not deal with EXTRA separately.
2437 A single stack adjust will do. */
2440 temp
= push_block (size
, extra
, where_pad
== downward
);
2443 else if (GET_CODE (args_so_far
) == CONST_INT
)
2444 temp
= memory_address (BLKmode
,
2445 plus_constant (args_addr
,
2446 skip
+ INTVAL (args_so_far
)));
2448 temp
= memory_address (BLKmode
,
2449 plus_constant (gen_rtx (PLUS
, Pmode
,
2450 args_addr
, args_so_far
),
2453 /* TEMP is the address of the block. Copy the data there. */
2454 if (GET_CODE (size
) == CONST_INT
2455 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2458 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
2459 INTVAL (size
), align
);
2462 /* Try the most limited insn first, because there's no point
2463 including more than one in the machine description unless
2464 the more limited one has some advantage. */
2465 #ifdef HAVE_movstrqi
2467 && GET_CODE (size
) == CONST_INT
2468 && ((unsigned) INTVAL (size
)
2469 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
2471 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
2472 xinner
, size
, GEN_INT (align
));
2480 #ifdef HAVE_movstrhi
2482 && GET_CODE (size
) == CONST_INT
2483 && ((unsigned) INTVAL (size
)
2484 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
2486 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
2487 xinner
, size
, GEN_INT (align
));
2495 #ifdef HAVE_movstrsi
2498 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
2499 xinner
, size
, GEN_INT (align
));
2507 #ifdef HAVE_movstrdi
2510 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
2511 xinner
, size
, GEN_INT (align
));
2520 #ifndef ACCUMULATE_OUTGOING_ARGS
2521 /* If the source is referenced relative to the stack pointer,
2522 copy it to another register to stabilize it. We do not need
2523 to do this if we know that we won't be changing sp. */
2525 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2526 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2527 temp
= copy_to_reg (temp
);
2530 /* Make inhibit_defer_pop nonzero around the library call
2531 to force it to pop the bcopy-arguments right away. */
2533 #ifdef TARGET_MEM_FUNCTIONS
2534 emit_library_call (memcpy_libfunc
, 0,
2535 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2536 convert_to_mode (TYPE_MODE (sizetype
),
2537 size
, TREE_UNSIGNED (sizetype
)),
2538 TYPE_MODE (sizetype
));
2540 emit_library_call (bcopy_libfunc
, 0,
2541 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2542 convert_to_mode (TYPE_MODE (integer_type_node
),
2544 TREE_UNSIGNED (integer_type_node
)),
2545 TYPE_MODE (integer_type_node
));
2550 else if (partial
> 0)
2552 /* Scalar partly in registers. */
2554 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2557 /* # words of start of argument
2558 that we must make space for but need not store. */
2559 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2560 int args_offset
= INTVAL (args_so_far
);
2563 /* Push padding now if padding above and stack grows down,
2564 or if padding below and stack grows up.
2565 But if space already allocated, this has already been done. */
2566 if (extra
&& args_addr
== 0
2567 && where_pad
!= none
&& where_pad
!= stack_direction
)
2568 anti_adjust_stack (GEN_INT (extra
));
2570 /* If we make space by pushing it, we might as well push
2571 the real data. Otherwise, we can leave OFFSET nonzero
2572 and leave the space uninitialized. */
2576 /* Now NOT_STACK gets the number of words that we don't need to
2577 allocate on the stack. */
2578 not_stack
= partial
- offset
;
2580 /* If the partial register-part of the arg counts in its stack size,
2581 skip the part of stack space corresponding to the registers.
2582 Otherwise, start copying to the beginning of the stack space,
2583 by setting SKIP to 0. */
2584 #ifndef REG_PARM_STACK_SPACE
2590 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2591 x
= validize_mem (force_const_mem (mode
, x
));
2593 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2594 SUBREGs of such registers are not allowed. */
2595 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2596 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2597 x
= copy_to_reg (x
);
2599 /* Loop over all the words allocated on the stack for this arg. */
2600 /* We can do it by words, because any scalar bigger than a word
2601 has a size a multiple of a word. */
2602 #ifndef PUSH_ARGS_REVERSED
2603 for (i
= not_stack
; i
< size
; i
++)
2605 for (i
= size
- 1; i
>= not_stack
; i
--)
2607 if (i
>= not_stack
+ offset
)
2608 emit_push_insn (operand_subword_force (x
, i
, mode
),
2609 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2611 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2612 * UNITS_PER_WORD
)));
2618 /* Push padding now if padding above and stack grows down,
2619 or if padding below and stack grows up.
2620 But if space already allocated, this has already been done. */
2621 if (extra
&& args_addr
== 0
2622 && where_pad
!= none
&& where_pad
!= stack_direction
)
2623 anti_adjust_stack (GEN_INT (extra
));
2625 #ifdef PUSH_ROUNDING
2627 addr
= gen_push_operand ();
2630 if (GET_CODE (args_so_far
) == CONST_INT
)
2632 = memory_address (mode
,
2633 plus_constant (args_addr
, INTVAL (args_so_far
)));
2635 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2638 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2642 /* If part should go in registers, copy that part
2643 into the appropriate registers. Do this now, at the end,
2644 since mem-to-mem copies above may do function calls. */
2645 if (partial
> 0 && reg
!= 0)
2646 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2648 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2649 anti_adjust_stack (GEN_INT (extra
));
2652 /* Expand an assignment that stores the value of FROM into TO.
2653 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2654 (This may contain a QUEUED rtx;
2655 if the value is constant, this rtx is a constant.)
2656 Otherwise, the returned value is NULL_RTX.
2658 SUGGEST_REG is no longer actually used.
2659 It used to mean, copy the value through a register
2660 and return that register, if that is possible.
2661 We now use WANT_VALUE to decide whether to do this. */
2664 expand_assignment (to
, from
, want_value
, suggest_reg
)
2669 register rtx to_rtx
= 0;
2672 /* Don't crash if the lhs of the assignment was erroneous. */
2674 if (TREE_CODE (to
) == ERROR_MARK
)
2676 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2677 return want_value
? result
: NULL_RTX
;
2680 if (output_bytecode
)
2682 tree dest_innermost
;
2684 bc_expand_expr (from
);
2685 bc_emit_instruction (duplicate
);
2687 dest_innermost
= bc_expand_address (to
);
2689 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2690 take care of it here. */
2692 bc_store_memory (TREE_TYPE (to
), dest_innermost
);
2696 /* Assignment of a structure component needs special treatment
2697 if the structure component's rtx is not simply a MEM.
2698 Assignment of an array element at a constant index, and assignment of
2699 an array element in an unaligned packed structure field, has the same
2702 if (TREE_CODE (to
) == COMPONENT_REF
2703 || TREE_CODE (to
) == BIT_FIELD_REF
2704 || (TREE_CODE (to
) == ARRAY_REF
2705 && ((TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2706 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
)
2707 || (SLOW_UNALIGNED_ACCESS
&& get_inner_unaligned_p (to
)))))
2709 enum machine_mode mode1
;
2719 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
,
2720 &mode1
, &unsignedp
, &volatilep
);
2722 /* If we are going to use store_bit_field and extract_bit_field,
2723 make sure to_rtx will be safe for multiple use. */
2725 if (mode1
== VOIDmode
&& want_value
)
2726 tem
= stabilize_reference (tem
);
2728 alignment
= TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
;
2729 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2732 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2734 if (GET_CODE (to_rtx
) != MEM
)
2736 to_rtx
= change_address (to_rtx
, VOIDmode
,
2737 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
2738 force_reg (ptr_mode
, offset_rtx
)));
2739 /* If we have a variable offset, the known alignment
2740 is only that of the innermost structure containing the field.
2741 (Actually, we could sometimes do better by using the
2742 align of an element of the innermost array, but no need.) */
2743 if (TREE_CODE (to
) == COMPONENT_REF
2744 || TREE_CODE (to
) == BIT_FIELD_REF
)
2746 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to
, 0))) / BITS_PER_UNIT
;
2750 if (GET_CODE (to_rtx
) == MEM
)
2752 /* When the offset is zero, to_rtx is the address of the
2753 structure we are storing into, and hence may be shared.
2754 We must make a new MEM before setting the volatile bit. */
2756 to_rtx
= change_address (to_rtx
, VOIDmode
, XEXP (to_rtx
, 0));
2757 MEM_VOLATILE_P (to_rtx
) = 1;
2759 #if 0 /* This was turned off because, when a field is volatile
2760 in an object which is not volatile, the object may be in a register,
2761 and then we would abort over here. */
2767 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2769 /* Spurious cast makes HPUX compiler happy. */
2770 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2773 /* Required alignment of containing datum. */
2775 int_size_in_bytes (TREE_TYPE (tem
)));
2776 preserve_temp_slots (result
);
2780 /* If the value is meaningful, convert RESULT to the proper mode.
2781 Otherwise, return nothing. */
2782 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
2783 TYPE_MODE (TREE_TYPE (from
)),
2785 TREE_UNSIGNED (TREE_TYPE (to
)))
2789 /* If the rhs is a function call and its value is not an aggregate,
2790 call the function before we start to compute the lhs.
2791 This is needed for correct code for cases such as
2792 val = setjmp (buf) on machines where reference to val
2793 requires loading up part of an address in a separate insn.
2795 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2796 a promoted variable where the zero- or sign- extension needs to be done.
2797 Handling this in the normal way is safe because no computation is done
2799 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
2800 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
2805 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2807 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2809 if (GET_MODE (to_rtx
) == BLKmode
)
2810 emit_block_move (to_rtx
, value
, expr_size (from
),
2811 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
2813 emit_move_insn (to_rtx
, value
);
2814 preserve_temp_slots (to_rtx
);
2817 return want_value
? to_rtx
: NULL_RTX
;
2820 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2821 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2824 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2826 /* Don't move directly into a return register. */
2827 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
2832 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
2833 emit_move_insn (to_rtx
, temp
);
2834 preserve_temp_slots (to_rtx
);
2837 return want_value
? to_rtx
: NULL_RTX
;
2840 /* In case we are returning the contents of an object which overlaps
2841 the place the value is being stored, use a safe function when copying
2842 a value through a pointer into a structure value return block. */
2843 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2844 && current_function_returns_struct
2845 && !current_function_returns_pcc_struct
)
2850 size
= expr_size (from
);
2851 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2853 #ifdef TARGET_MEM_FUNCTIONS
2854 emit_library_call (memcpy_libfunc
, 0,
2855 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2856 XEXP (from_rtx
, 0), Pmode
,
2857 convert_to_mode (TYPE_MODE (sizetype
),
2858 size
, TREE_UNSIGNED (sizetype
)),
2859 TYPE_MODE (sizetype
));
2861 emit_library_call (bcopy_libfunc
, 0,
2862 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2863 XEXP (to_rtx
, 0), Pmode
,
2864 convert_to_mode (TYPE_MODE (integer_type_node
),
2865 size
, TREE_UNSIGNED (integer_type_node
)),
2866 TYPE_MODE (integer_type_node
));
2869 preserve_temp_slots (to_rtx
);
2872 return want_value
? to_rtx
: NULL_RTX
;
2875 /* Compute FROM and store the value in the rtx we got. */
2878 result
= store_expr (from
, to_rtx
, want_value
);
2879 preserve_temp_slots (result
);
2882 return want_value
? result
: NULL_RTX
;
2885 /* Generate code for computing expression EXP,
2886 and storing the value into TARGET.
2887 TARGET may contain a QUEUED rtx.
2889 If WANT_VALUE is nonzero, return a copy of the value
2890 not in TARGET, so that we can be sure to use the proper
2891 value in a containing expression even if TARGET has something
2892 else stored in it. If possible, we copy the value through a pseudo
2893 and return that pseudo. Or, if the value is constant, we try to
2894 return the constant. In some cases, we return a pseudo
2895 copied *from* TARGET.
2897 If the mode is BLKmode then we may return TARGET itself.
2898 It turns out that in BLKmode it doesn't cause a problem.
2899 because C has no operators that could combine two different
2900 assignments into the same BLKmode object with different values
2901 with no sequence point. Will other languages need this to
2904 If WANT_VALUE is 0, we return NULL, to make sure
2905 to catch quickly any cases where the caller uses the value
2906 and fails to set WANT_VALUE. */
2909 store_expr (exp
, target
, want_value
)
2911 register rtx target
;
2915 int dont_return_target
= 0;
2917 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2919 /* Perform first part of compound expression, then assign from second
2921 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
2923 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
2925 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
2927 /* For conditional expression, get safe form of the target. Then
2928 test the condition, doing the appropriate assignment on either
2929 side. This avoids the creation of unnecessary temporaries.
2930 For non-BLKmode, it is more efficient not to do this. */
2932 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
2935 target
= protect_from_queue (target
, 1);
2937 do_pending_stack_adjust ();
2939 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
2940 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
2942 emit_jump_insn (gen_jump (lab2
));
2945 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
2949 return want_value
? target
: NULL_RTX
;
2951 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
2952 && GET_MODE (target
) != BLKmode
)
2953 /* If target is in memory and caller wants value in a register instead,
2954 arrange that. Pass TARGET as target for expand_expr so that,
2955 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2956 We know expand_expr will not use the target in that case.
2957 Don't do this if TARGET is volatile because we are supposed
2958 to write it and then read it. */
2960 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
2961 GET_MODE (target
), 0);
2962 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
2963 temp
= copy_to_reg (temp
);
2964 dont_return_target
= 1;
2966 else if (queued_subexp_p (target
))
2967 /* If target contains a postincrement, let's not risk
2968 using it as the place to generate the rhs. */
2970 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
2972 /* Expand EXP into a new pseudo. */
2973 temp
= gen_reg_rtx (GET_MODE (target
));
2974 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
2977 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
2979 /* If target is volatile, ANSI requires accessing the value
2980 *from* the target, if it is accessed. So make that happen.
2981 In no case return the target itself. */
2982 if (! MEM_VOLATILE_P (target
) && want_value
)
2983 dont_return_target
= 1;
2985 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
2986 /* If this is an scalar in a register that is stored in a wider mode
2987 than the declared mode, compute the result into its declared mode
2988 and then convert to the wider mode. Our value is the computed
2991 /* If we don't want a value, we can do the conversion inside EXP,
2992 which will often result in some optimizations. Do the conversion
2993 in two steps: first change the signedness, if needed, then
2997 if (TREE_UNSIGNED (TREE_TYPE (exp
))
2998 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3001 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3005 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3006 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3010 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3012 /* If TEMP is a volatile MEM and we want a result value, make
3013 the access now so it gets done only once. Likewise if
3014 it contains TARGET. */
3015 if (GET_CODE (temp
) == MEM
&& want_value
3016 && (MEM_VOLATILE_P (temp
)
3017 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3018 temp
= copy_to_reg (temp
);
3020 /* If TEMP is a VOIDmode constant, use convert_modes to make
3021 sure that we properly convert it. */
3022 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3023 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3024 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3025 SUBREG_PROMOTED_UNSIGNED_P (target
));
3027 convert_move (SUBREG_REG (target
), temp
,
3028 SUBREG_PROMOTED_UNSIGNED_P (target
));
3029 return want_value
? temp
: NULL_RTX
;
3033 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3034 /* Return TARGET if it's a specified hardware register.
3035 If TARGET is a volatile mem ref, either return TARGET
3036 or return a reg copied *from* TARGET; ANSI requires this.
3038 Otherwise, if TEMP is not TARGET, return TEMP
3039 if it is constant (for efficiency),
3040 or if we really want the correct value. */
3041 if (!(target
&& GET_CODE (target
) == REG
3042 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3043 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3045 && (CONSTANT_P (temp
) || want_value
))
3046 dont_return_target
= 1;
3049 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3050 the same as that of TARGET, adjust the constant. This is needed, for
3051 example, in case it is a CONST_DOUBLE and we want only a word-sized
3053 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3054 && TREE_CODE (exp
) != ERROR_MARK
3055 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3056 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3057 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3059 /* If value was not generated in the target, store it there.
3060 Convert the value to TARGET's type first if nec. */
3062 if (temp
!= target
&& TREE_CODE (exp
) != ERROR_MARK
)
3064 target
= protect_from_queue (target
, 1);
3065 if (GET_MODE (temp
) != GET_MODE (target
)
3066 && GET_MODE (temp
) != VOIDmode
)
3068 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3069 if (dont_return_target
)
3071 /* In this case, we will return TEMP,
3072 so make sure it has the proper mode.
3073 But don't forget to store the value into TARGET. */
3074 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3075 emit_move_insn (target
, temp
);
3078 convert_move (target
, temp
, unsignedp
);
3081 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3083 /* Handle copying a string constant into an array.
3084 The string constant may be shorter than the array.
3085 So copy just the string's actual length, and clear the rest. */
3089 /* Get the size of the data type of the string,
3090 which is actually the size of the target. */
3091 size
= expr_size (exp
);
3092 if (GET_CODE (size
) == CONST_INT
3093 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3094 emit_block_move (target
, temp
, size
,
3095 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3098 /* Compute the size of the data to copy from the string. */
3100 = size_binop (MIN_EXPR
,
3101 make_tree (sizetype
, size
),
3103 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3104 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3108 /* Copy that much. */
3109 emit_block_move (target
, temp
, copy_size_rtx
,
3110 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3112 /* Figure out how much is left in TARGET that we have to clear.
3113 Do all calculations in ptr_mode. */
3115 addr
= XEXP (target
, 0);
3116 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3118 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3120 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3121 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3125 addr
= force_reg (ptr_mode
, addr
);
3126 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3127 copy_size_rtx
, NULL_RTX
, 0,
3130 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3131 copy_size_rtx
, NULL_RTX
, 0,
3134 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3135 GET_MODE (size
), 0, 0);
3136 label
= gen_label_rtx ();
3137 emit_jump_insn (gen_blt (label
));
3140 if (size
!= const0_rtx
)
3142 #ifdef TARGET_MEM_FUNCTIONS
3143 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3145 const0_rtx
, TYPE_MODE (integer_type_node
),
3146 convert_to_mode (TYPE_MODE (sizetype
),
3148 TREE_UNSIGNED (sizetype
)),
3149 TYPE_MODE (sizetype
));
3151 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3153 convert_to_mode (TYPE_MODE (integer_type_node
),
3155 TREE_UNSIGNED (integer_type_node
)),
3156 TYPE_MODE (integer_type_node
));
3164 else if (GET_MODE (temp
) == BLKmode
)
3165 emit_block_move (target
, temp
, expr_size (exp
),
3166 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3168 emit_move_insn (target
, temp
);
3171 /* If we don't want a value, return NULL_RTX. */
3175 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3176 ??? The latter test doesn't seem to make sense. */
3177 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3180 /* Return TARGET itself if it is a hard register. */
3181 else if (want_value
&& GET_MODE (target
) != BLKmode
3182 && ! (GET_CODE (target
) == REG
3183 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3184 return copy_to_reg (target
);
3190 /* Return 1 if EXP just contains zeros. */
3198 switch (TREE_CODE (exp
))
3202 case NON_LVALUE_EXPR
:
3203 return is_zeros_p (TREE_OPERAND (exp
, 0));
3206 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3210 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3213 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp
), dconst0
);
3216 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3217 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3218 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3219 if (! is_zeros_p (TREE_VALUE (elt
)))
3228 /* Return 1 if EXP contains mostly (3/4) zeros. */
3231 mostly_zeros_p (exp
)
3234 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3236 int elts
= 0, zeros
= 0;
3237 tree elt
= CONSTRUCTOR_ELTS (exp
);
3238 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3240 /* If there are no ranges of true bits, it is all zero. */
3241 return elt
== NULL_TREE
;
3243 for (; elt
; elt
= TREE_CHAIN (elt
))
3245 /* We do not handle the case where the index is a RANGE_EXPR,
3246 so the statistic will be somewhat inaccurate.
3247 We do make a more accurate count in store_constructor itself,
3248 so since this function is only used for nested array elements,
3249 this should be close enough. */
3250 if (mostly_zeros_p (TREE_VALUE (elt
)))
3255 return 4 * zeros
>= 3 * elts
;
3258 return is_zeros_p (exp
);
3261 /* Helper function for store_constructor.
3262 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3263 TYPE is the type of the CONSTRUCTOR, not the element type.
3264 CLEARED is as for store_constructor. */
3267 store_constructor_field (target
, bitsize
, bitpos
,
3268 mode
, exp
, type
, cleared
)
3270 int bitsize
, bitpos
;
3271 enum machine_mode mode
;
3275 if (TREE_CODE (exp
) == CONSTRUCTOR
3276 && (bitpos
% BITS_PER_UNIT
) == 0)
3279 target
= change_address (target
, VOIDmode
,
3280 plus_constant (XEXP (target
, 0),
3281 bitpos
/ BITS_PER_UNIT
));
3282 store_constructor (exp
, target
, cleared
);
3285 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3286 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3287 int_size_in_bytes (type
));
3290 /* Store the value of constructor EXP into the rtx TARGET.
3291 TARGET is either a REG or a MEM.
3292 CLEARED is true if TARGET is known to have been zero'd. */
3295 store_constructor (exp
, target
, cleared
)
3300 tree type
= TREE_TYPE (exp
);
3302 /* We know our target cannot conflict, since safe_from_p has been called. */
3304 /* Don't try copying piece by piece into a hard register
3305 since that is vulnerable to being clobbered by EXP.
3306 Instead, construct in a pseudo register and then copy it all. */
3307 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3309 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3310 store_constructor (exp
, temp
, 0);
3311 emit_move_insn (target
, temp
);
3316 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3317 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3321 /* Inform later passes that the whole union value is dead. */
3322 if (TREE_CODE (type
) == UNION_TYPE
3323 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3324 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3326 /* If we are building a static constructor into a register,
3327 set the initial value as zero so we can fold the value into
3328 a constant. But if more than one register is involved,
3329 this probably loses. */
3330 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3331 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3334 emit_move_insn (target
, const0_rtx
);
3339 /* If the constructor has fewer fields than the structure
3340 or if we are initializing the structure to mostly zeros,
3341 clear the whole structure first. */
3342 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3343 != list_length (TYPE_FIELDS (type
)))
3344 || mostly_zeros_p (exp
))
3347 clear_storage (target
, expr_size (exp
),
3348 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3353 /* Inform later passes that the old value is dead. */
3354 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3356 /* Store each element of the constructor into
3357 the corresponding field of TARGET. */
3359 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3361 register tree field
= TREE_PURPOSE (elt
);
3362 register enum machine_mode mode
;
3366 tree pos
, constant
= 0, offset
= 0;
3367 rtx to_rtx
= target
;
3369 /* Just ignore missing fields.
3370 We cleared the whole structure, above,
3371 if any fields are missing. */
3375 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3378 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3379 unsignedp
= TREE_UNSIGNED (field
);
3380 mode
= DECL_MODE (field
);
3381 if (DECL_BIT_FIELD (field
))
3384 pos
= DECL_FIELD_BITPOS (field
);
3385 if (TREE_CODE (pos
) == INTEGER_CST
)
3387 else if (TREE_CODE (pos
) == PLUS_EXPR
3388 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3389 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3394 bitpos
= TREE_INT_CST_LOW (constant
);
3400 if (contains_placeholder_p (offset
))
3401 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3404 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3405 size_int (BITS_PER_UNIT
));
3407 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3408 if (GET_CODE (to_rtx
) != MEM
)
3412 = change_address (to_rtx
, VOIDmode
,
3413 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
3414 force_reg (ptr_mode
, offset_rtx
)));
3416 if (TREE_READONLY (field
))
3418 if (GET_CODE (to_rtx
) == MEM
)
3419 to_rtx
= change_address (to_rtx
, GET_MODE (to_rtx
),
3421 RTX_UNCHANGING_P (to_rtx
) = 1;
3424 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3425 mode
, TREE_VALUE (elt
), type
, cleared
);
3428 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3433 tree domain
= TYPE_DOMAIN (type
);
3434 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3435 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3436 tree elttype
= TREE_TYPE (type
);
3438 /* If the constructor has fewer elements than the array,
3439 clear the whole array first. Similarly if this this is
3440 static constructor of a non-BLKmode object. */
3441 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3445 HOST_WIDE_INT count
= 0, zero_count
= 0;
3447 /* This loop is a more accurate version of the loop in
3448 mostly_zeros_p (it handles RANGE_EXPR in an index).
3449 It is also needed to check for missing elements. */
3450 for (elt
= CONSTRUCTOR_ELTS (exp
);
3452 elt
= TREE_CHAIN (elt
), i
++)
3454 tree index
= TREE_PURPOSE (elt
);
3455 HOST_WIDE_INT this_node_count
;
3456 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3458 tree lo_index
= TREE_OPERAND (index
, 0);
3459 tree hi_index
= TREE_OPERAND (index
, 1);
3460 if (TREE_CODE (lo_index
) != INTEGER_CST
3461 || TREE_CODE (hi_index
) != INTEGER_CST
)
3466 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3467 - TREE_INT_CST_LOW (lo_index
) + 1;
3470 this_node_count
= 1;
3471 count
+= this_node_count
;
3472 if (mostly_zeros_p (TREE_VALUE (elt
)))
3473 zero_count
+= this_node_count
;
3475 if (4 * zero_count
>= 3 * count
)
3481 clear_storage (target
, expr_size (exp
),
3482 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3486 /* Inform later passes that the old value is dead. */
3487 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3489 /* Store each element of the constructor into
3490 the corresponding element of TARGET, determined
3491 by counting the elements. */
3492 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3494 elt
= TREE_CHAIN (elt
), i
++)
3496 register enum machine_mode mode
;
3500 tree value
= TREE_VALUE (elt
);
3501 tree index
= TREE_PURPOSE (elt
);
3502 rtx xtarget
= target
;
3504 if (cleared
&& is_zeros_p (value
))
3507 mode
= TYPE_MODE (elttype
);
3508 bitsize
= GET_MODE_BITSIZE (mode
);
3509 unsignedp
= TREE_UNSIGNED (elttype
);
3511 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3513 tree lo_index
= TREE_OPERAND (index
, 0);
3514 tree hi_index
= TREE_OPERAND (index
, 1);
3515 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3516 struct nesting
*loop
;
3519 if (TREE_CODE (lo_index
) == INTEGER_CST
3520 && TREE_CODE (hi_index
) == INTEGER_CST
)
3522 HOST_WIDE_INT lo
= TREE_INT_CST_LOW (lo_index
);
3523 HOST_WIDE_INT hi
= TREE_INT_CST_LOW (hi_index
);
3524 HOST_WIDE_INT count
= hi
- lo
+ 1;
3526 /* If the range is constant and "small", unroll the loop.
3527 We must also use store_field if the target is not MEM. */
3528 if (GET_CODE (target
) != MEM
3530 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3531 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3534 lo
-= minelt
; hi
-= minelt
;
3535 for (; lo
<= hi
; lo
++)
3537 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3538 store_constructor_field (target
, bitsize
, bitpos
,
3539 mode
, value
, type
, cleared
);
3545 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3546 loop_top
= gen_label_rtx ();
3547 loop_end
= gen_label_rtx ();
3549 unsignedp
= TREE_UNSIGNED (domain
);
3551 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3553 DECL_RTL (index
) = index_r
3554 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3557 if (TREE_CODE (value
) == SAVE_EXPR
3558 && SAVE_EXPR_RTL (value
) == 0)
3560 /* Make sure value gets expanded once before the loop. */
3561 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3564 store_expr (lo_index
, index_r
, 0);
3565 loop
= expand_start_loop (0);
3567 /* Assign value to element index. */
3568 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3569 size_int (BITS_PER_UNIT
));
3570 position
= size_binop (MULT_EXPR
,
3571 size_binop (MINUS_EXPR
, index
,
3572 TYPE_MIN_VALUE (domain
)),
3574 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3575 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3576 xtarget
= change_address (target
, mode
, addr
);
3577 if (TREE_CODE (value
) == CONSTRUCTOR
)
3578 store_constructor (exp
, xtarget
, cleared
);
3580 store_expr (value
, xtarget
, 0);
3582 expand_exit_loop_if_false (loop
,
3583 build (LT_EXPR
, integer_type_node
,
3586 expand_increment (build (PREINCREMENT_EXPR
,
3588 index
, integer_one_node
), 0);
3590 emit_label (loop_end
);
3592 /* Needed by stupid register allocation. to extend the
3593 lifetime of pseudo-regs used by target past the end
3595 emit_insn (gen_rtx (USE
, GET_MODE (target
), target
));
3598 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3599 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3605 index
= size_int (i
);
3608 index
= size_binop (MINUS_EXPR
, index
,
3609 TYPE_MIN_VALUE (domain
));
3610 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3611 size_int (BITS_PER_UNIT
));
3612 position
= size_binop (MULT_EXPR
, index
, position
);
3613 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3614 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3615 xtarget
= change_address (target
, mode
, addr
);
3616 store_expr (value
, xtarget
, 0);
3621 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3622 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3624 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3625 store_constructor_field (target
, bitsize
, bitpos
,
3626 mode
, value
, type
, cleared
);
3630 /* set constructor assignments */
3631 else if (TREE_CODE (type
) == SET_TYPE
)
3633 tree elt
= CONSTRUCTOR_ELTS (exp
);
3634 rtx xtarget
= XEXP (target
, 0);
3635 int set_word_size
= TYPE_ALIGN (type
);
3636 int nbytes
= int_size_in_bytes (type
), nbits
;
3637 tree domain
= TYPE_DOMAIN (type
);
3638 tree domain_min
, domain_max
, bitlength
;
3640 /* The default implementation strategy is to extract the constant
3641 parts of the constructor, use that to initialize the target,
3642 and then "or" in whatever non-constant ranges we need in addition.
3644 If a large set is all zero or all ones, it is
3645 probably better to set it using memset (if available) or bzero.
3646 Also, if a large set has just a single range, it may also be
3647 better to first clear all the first clear the set (using
3648 bzero/memset), and set the bits we want. */
3650 /* Check for all zeros. */
3651 if (elt
== NULL_TREE
)
3654 clear_storage (target
, expr_size (exp
),
3655 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3659 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3660 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3661 bitlength
= size_binop (PLUS_EXPR
,
3662 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3665 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
3667 nbits
= TREE_INT_CST_LOW (bitlength
);
3669 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3670 are "complicated" (more than one range), initialize (the
3671 constant parts) by copying from a constant. */
3672 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
3673 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
3675 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3676 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3677 char *bit_buffer
= (char*) alloca (nbits
);
3678 HOST_WIDE_INT word
= 0;
3681 int offset
= 0; /* In bytes from beginning of set. */
3682 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
3685 if (bit_buffer
[ibit
])
3687 if (BYTES_BIG_ENDIAN
)
3688 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3690 word
|= 1 << bit_pos
;
3693 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
3695 if (word
!= 0 || ! cleared
)
3697 rtx datum
= GEN_INT (word
);
3699 /* The assumption here is that it is safe to use XEXP if
3700 the set is multi-word, but not if it's single-word. */
3701 if (GET_CODE (target
) == MEM
)
3703 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
3704 to_rtx
= change_address (target
, mode
, to_rtx
);
3706 else if (offset
== 0)
3710 emit_move_insn (to_rtx
, datum
);
3716 offset
+= set_word_size
/ BITS_PER_UNIT
;
3722 /* Don't bother clearing storage if the set is all ones. */
3723 if (TREE_CHAIN (elt
) != NULL_TREE
3724 || (TREE_PURPOSE (elt
) == NULL_TREE
3726 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
3727 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
3728 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
3729 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
3731 clear_storage (target
, expr_size (exp
),
3732 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3735 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
3737 /* start of range of element or NULL */
3738 tree startbit
= TREE_PURPOSE (elt
);
3739 /* end of range of element, or element value */
3740 tree endbit
= TREE_VALUE (elt
);
3741 HOST_WIDE_INT startb
, endb
;
3742 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
3744 bitlength_rtx
= expand_expr (bitlength
,
3745 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
3747 /* handle non-range tuple element like [ expr ] */
3748 if (startbit
== NULL_TREE
)
3750 startbit
= save_expr (endbit
);
3753 startbit
= convert (sizetype
, startbit
);
3754 endbit
= convert (sizetype
, endbit
);
3755 if (! integer_zerop (domain_min
))
3757 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
3758 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
3760 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
3761 EXPAND_CONST_ADDRESS
);
3762 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
3763 EXPAND_CONST_ADDRESS
);
3767 targetx
= assign_stack_temp (GET_MODE (target
),
3768 GET_MODE_SIZE (GET_MODE (target
)),
3770 emit_move_insn (targetx
, target
);
3772 else if (GET_CODE (target
) == MEM
)
3777 #ifdef TARGET_MEM_FUNCTIONS
3778 /* Optimization: If startbit and endbit are
3779 constants divisible by BITS_PER_UNIT,
3780 call memset instead. */
3781 if (TREE_CODE (startbit
) == INTEGER_CST
3782 && TREE_CODE (endbit
) == INTEGER_CST
3783 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
3784 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
3786 emit_library_call (memset_libfunc
, 0,
3788 plus_constant (XEXP (targetx
, 0),
3789 startb
/ BITS_PER_UNIT
),
3791 constm1_rtx
, TYPE_MODE (integer_type_node
),
3792 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
3793 TYPE_MODE (sizetype
));
3798 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__setbits"),
3799 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
3800 bitlength_rtx
, TYPE_MODE (sizetype
),
3801 startbit_rtx
, TYPE_MODE (sizetype
),
3802 endbit_rtx
, TYPE_MODE (sizetype
));
3805 emit_move_insn (target
, targetx
);
3813 /* Store the value of EXP (an expression tree)
3814 into a subfield of TARGET which has mode MODE and occupies
3815 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3816 If MODE is VOIDmode, it means that we are storing into a bit-field.
3818 If VALUE_MODE is VOIDmode, return nothing in particular.
3819 UNSIGNEDP is not used in this case.
3821 Otherwise, return an rtx for the value stored. This rtx
3822 has mode VALUE_MODE if that is convenient to do.
3823 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3825 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3826 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3829 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
3830 unsignedp
, align
, total_size
)
3832 int bitsize
, bitpos
;
3833 enum machine_mode mode
;
3835 enum machine_mode value_mode
;
3840 HOST_WIDE_INT width_mask
= 0;
3842 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
3843 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
3845 /* If we are storing into an unaligned field of an aligned union that is
3846 in a register, we may have the mode of TARGET being an integer mode but
3847 MODE == BLKmode. In that case, get an aligned object whose size and
3848 alignment are the same as TARGET and store TARGET into it (we can avoid
3849 the store if the field being stored is the entire width of TARGET). Then
3850 call ourselves recursively to store the field into a BLKmode version of
3851 that object. Finally, load from the object into TARGET. This is not
3852 very efficient in general, but should only be slightly more expensive
3853 than the otherwise-required unaligned accesses. Perhaps this can be
3854 cleaned up later. */
3857 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
3859 rtx object
= assign_stack_temp (GET_MODE (target
),
3860 GET_MODE_SIZE (GET_MODE (target
)), 0);
3861 rtx blk_object
= copy_rtx (object
);
3863 MEM_IN_STRUCT_P (object
) = 1;
3864 MEM_IN_STRUCT_P (blk_object
) = 1;
3865 PUT_MODE (blk_object
, BLKmode
);
3867 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
3868 emit_move_insn (object
, target
);
3870 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
3873 /* Even though we aren't returning target, we need to
3874 give it the updated value. */
3875 emit_move_insn (target
, object
);
3880 /* If the structure is in a register or if the component
3881 is a bit field, we cannot use addressing to access it.
3882 Use bit-field techniques or SUBREG to store in it. */
3884 if (mode
== VOIDmode
3885 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
3886 || GET_CODE (target
) == REG
3887 || GET_CODE (target
) == SUBREG
3888 /* If the field isn't aligned enough to store as an ordinary memref,
3889 store it as a bit field. */
3890 || (SLOW_UNALIGNED_ACCESS
3891 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
3892 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
3894 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3896 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3898 if (mode
!= VOIDmode
&& mode
!= BLKmode
3899 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
3900 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
3902 /* Store the value in the bitfield. */
3903 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
3904 if (value_mode
!= VOIDmode
)
3906 /* The caller wants an rtx for the value. */
3907 /* If possible, avoid refetching from the bitfield itself. */
3909 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
3912 enum machine_mode tmode
;
3915 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
3916 tmode
= GET_MODE (temp
);
3917 if (tmode
== VOIDmode
)
3919 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
3920 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
3921 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
3923 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
3924 NULL_RTX
, value_mode
, 0, align
,
3931 rtx addr
= XEXP (target
, 0);
3934 /* If a value is wanted, it must be the lhs;
3935 so make the address stable for multiple use. */
3937 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
3938 && ! CONSTANT_ADDRESS_P (addr
)
3939 /* A frame-pointer reference is already stable. */
3940 && ! (GET_CODE (addr
) == PLUS
3941 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
3942 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
3943 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
3944 addr
= copy_to_reg (addr
);
3946 /* Now build a reference to just the desired component. */
3948 to_rtx
= change_address (target
, mode
,
3949 plus_constant (addr
, (bitpos
/ BITS_PER_UNIT
)));
3950 MEM_IN_STRUCT_P (to_rtx
) = 1;
3952 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
3956 /* Return true if any object containing the innermost array is an unaligned
3957 packed structure field. */
3960 get_inner_unaligned_p (exp
)
3963 int needed_alignment
= TYPE_ALIGN (TREE_TYPE (exp
));
3967 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
3969 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
3973 else if (TREE_CODE (exp
) != ARRAY_REF
3974 && TREE_CODE (exp
) != NON_LVALUE_EXPR
3975 && ! ((TREE_CODE (exp
) == NOP_EXPR
3976 || TREE_CODE (exp
) == CONVERT_EXPR
)
3977 && (TYPE_MODE (TREE_TYPE (exp
))
3978 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
3981 exp
= TREE_OPERAND (exp
, 0);
3987 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3988 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3989 ARRAY_REFs and find the ultimate containing object, which we return.
3991 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3992 bit position, and *PUNSIGNEDP to the signedness of the field.
3993 If the position of the field is variable, we store a tree
3994 giving the variable offset (in units) in *POFFSET.
3995 This offset is in addition to the bit position.
3996 If the position is not variable, we store 0 in *POFFSET.
3998 If any of the extraction expressions is volatile,
3999 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4001 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4002 is a mode that can be used to access the field. In that case, *PBITSIZE
4005 If the field describes a variable-sized object, *PMODE is set to
4006 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4007 this case, but the address of the object can be found. */
4010 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4011 punsignedp
, pvolatilep
)
4016 enum machine_mode
*pmode
;
4020 tree orig_exp
= exp
;
4022 enum machine_mode mode
= VOIDmode
;
4023 tree offset
= integer_zero_node
;
4025 if (TREE_CODE (exp
) == COMPONENT_REF
)
4027 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4028 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4029 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4030 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4032 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4034 size_tree
= TREE_OPERAND (exp
, 1);
4035 *punsignedp
= TREE_UNSIGNED (exp
);
4039 mode
= TYPE_MODE (TREE_TYPE (exp
));
4040 *pbitsize
= GET_MODE_BITSIZE (mode
);
4041 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4046 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4047 mode
= BLKmode
, *pbitsize
= -1;
4049 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4052 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4053 and find the ultimate containing object. */
4059 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4061 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4062 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4063 : TREE_OPERAND (exp
, 2));
4064 tree constant
= integer_zero_node
, var
= pos
;
4066 /* If this field hasn't been filled in yet, don't go
4067 past it. This should only happen when folding expressions
4068 made during type construction. */
4072 /* Assume here that the offset is a multiple of a unit.
4073 If not, there should be an explicitly added constant. */
4074 if (TREE_CODE (pos
) == PLUS_EXPR
4075 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4076 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4077 else if (TREE_CODE (pos
) == INTEGER_CST
)
4078 constant
= pos
, var
= integer_zero_node
;
4080 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4083 offset
= size_binop (PLUS_EXPR
, offset
,
4084 size_binop (EXACT_DIV_EXPR
, var
,
4085 size_int (BITS_PER_UNIT
)));
4088 else if (TREE_CODE (exp
) == ARRAY_REF
)
4090 /* This code is based on the code in case ARRAY_REF in expand_expr
4091 below. We assume here that the size of an array element is
4092 always an integral multiple of BITS_PER_UNIT. */
4094 tree index
= TREE_OPERAND (exp
, 1);
4095 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4097 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4098 tree index_type
= TREE_TYPE (index
);
4100 if (! integer_zerop (low_bound
))
4101 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4103 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4105 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4107 index_type
= TREE_TYPE (index
);
4110 index
= fold (build (MULT_EXPR
, index_type
, index
,
4111 TYPE_SIZE (TREE_TYPE (exp
))));
4113 if (TREE_CODE (index
) == INTEGER_CST
4114 && TREE_INT_CST_HIGH (index
) == 0)
4115 *pbitpos
+= TREE_INT_CST_LOW (index
);
4117 offset
= size_binop (PLUS_EXPR
, offset
,
4118 size_binop (FLOOR_DIV_EXPR
, index
,
4119 size_int (BITS_PER_UNIT
)));
4121 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4122 && ! ((TREE_CODE (exp
) == NOP_EXPR
4123 || TREE_CODE (exp
) == CONVERT_EXPR
)
4124 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4125 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4127 && (TYPE_MODE (TREE_TYPE (exp
))
4128 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4131 /* If any reference in the chain is volatile, the effect is volatile. */
4132 if (TREE_THIS_VOLATILE (exp
))
4134 exp
= TREE_OPERAND (exp
, 0);
4137 /* If this was a bit-field, see if there is a mode that allows direct
4138 access in case EXP is in memory. */
4139 if (mode
== VOIDmode
&& *pbitsize
!= 0 && *pbitpos
% *pbitsize
== 0)
4141 mode
= mode_for_size (*pbitsize
, MODE_INT
, 0);
4142 if (mode
== BLKmode
)
4146 if (integer_zerop (offset
))
4149 if (offset
!= 0 && contains_placeholder_p (offset
))
4150 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4157 /* Given an rtx VALUE that may contain additions and multiplications,
4158 return an equivalent value that just refers to a register or memory.
4159 This is done by generating instructions to perform the arithmetic
4160 and returning a pseudo-register containing the value.
4162 The returned value may be a REG, SUBREG, MEM or constant. */
4165 force_operand (value
, target
)
4168 register optab binoptab
= 0;
4169 /* Use a temporary to force order of execution of calls to
4173 /* Use subtarget as the target for operand 0 of a binary operation. */
4174 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4176 if (GET_CODE (value
) == PLUS
)
4177 binoptab
= add_optab
;
4178 else if (GET_CODE (value
) == MINUS
)
4179 binoptab
= sub_optab
;
4180 else if (GET_CODE (value
) == MULT
)
4182 op2
= XEXP (value
, 1);
4183 if (!CONSTANT_P (op2
)
4184 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4186 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4187 return expand_mult (GET_MODE (value
), tmp
,
4188 force_operand (op2
, NULL_RTX
),
4194 op2
= XEXP (value
, 1);
4195 if (!CONSTANT_P (op2
)
4196 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4198 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4200 binoptab
= add_optab
;
4201 op2
= negate_rtx (GET_MODE (value
), op2
);
4204 /* Check for an addition with OP2 a constant integer and our first
4205 operand a PLUS of a virtual register and something else. In that
4206 case, we want to emit the sum of the virtual register and the
4207 constant first and then add the other value. This allows virtual
4208 register instantiation to simply modify the constant rather than
4209 creating another one around this addition. */
4210 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4211 && GET_CODE (XEXP (value
, 0)) == PLUS
4212 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4213 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4214 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4216 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4217 XEXP (XEXP (value
, 0), 0), op2
,
4218 subtarget
, 0, OPTAB_LIB_WIDEN
);
4219 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4220 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4221 target
, 0, OPTAB_LIB_WIDEN
);
4224 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4225 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4226 force_operand (op2
, NULL_RTX
),
4227 target
, 0, OPTAB_LIB_WIDEN
);
4228 /* We give UNSIGNEDP = 0 to expand_binop
4229 because the only operations we are expanding here are signed ones. */
4234 /* Subroutine of expand_expr:
4235 save the non-copied parts (LIST) of an expr (LHS), and return a list
4236 which can restore these values to their previous values,
4237 should something modify their storage. */
4240 save_noncopied_parts (lhs
, list
)
4247 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4248 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4249 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4252 tree part
= TREE_VALUE (tail
);
4253 tree part_type
= TREE_TYPE (part
);
4254 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4255 rtx target
= assign_temp (part_type
, 0, 1, 1);
4256 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4257 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4258 parts
= tree_cons (to_be_saved
,
4259 build (RTL_EXPR
, part_type
, NULL_TREE
,
4262 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4267 /* Subroutine of expand_expr:
4268 record the non-copied parts (LIST) of an expr (LHS), and return a list
4269 which specifies the initial values of these parts. */
4272 init_noncopied_parts (lhs
, list
)
4279 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4280 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4281 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4284 tree part
= TREE_VALUE (tail
);
4285 tree part_type
= TREE_TYPE (part
);
4286 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4287 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4292 /* Subroutine of expand_expr: return nonzero iff there is no way that
4293 EXP can reference X, which is being modified. */
4296 safe_from_p (x
, exp
)
4304 /* If EXP has varying size, we MUST use a target since we currently
4305 have no way of allocating temporaries of variable size. So we
4306 assume here that something at a higher level has prevented a
4307 clash. This is somewhat bogus, but the best we can do. Only
4308 do this when X is BLKmode. */
4309 || (TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4310 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4311 && GET_MODE (x
) == BLKmode
))
4314 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4315 find the underlying pseudo. */
4316 if (GET_CODE (x
) == SUBREG
)
4319 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4323 /* If X is a location in the outgoing argument area, it is always safe. */
4324 if (GET_CODE (x
) == MEM
4325 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4326 || (GET_CODE (XEXP (x
, 0)) == PLUS
4327 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4330 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4333 exp_rtl
= DECL_RTL (exp
);
4340 if (TREE_CODE (exp
) == TREE_LIST
)
4341 return ((TREE_VALUE (exp
) == 0
4342 || safe_from_p (x
, TREE_VALUE (exp
)))
4343 && (TREE_CHAIN (exp
) == 0
4344 || safe_from_p (x
, TREE_CHAIN (exp
))));
4349 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4353 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
4354 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
4358 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4359 the expression. If it is set, we conflict iff we are that rtx or
4360 both are in memory. Otherwise, we check all operands of the
4361 expression recursively. */
4363 switch (TREE_CODE (exp
))
4366 return (staticp (TREE_OPERAND (exp
, 0))
4367 || safe_from_p (x
, TREE_OPERAND (exp
, 0)));
4370 if (GET_CODE (x
) == MEM
)
4375 exp_rtl
= CALL_EXPR_RTL (exp
);
4378 /* Assume that the call will clobber all hard registers and
4380 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4381 || GET_CODE (x
) == MEM
)
4388 /* If a sequence exists, we would have to scan every instruction
4389 in the sequence to see if it was safe. This is probably not
4391 if (RTL_EXPR_SEQUENCE (exp
))
4394 exp_rtl
= RTL_EXPR_RTL (exp
);
4397 case WITH_CLEANUP_EXPR
:
4398 exp_rtl
= RTL_EXPR_RTL (exp
);
4401 case CLEANUP_POINT_EXPR
:
4402 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4405 exp_rtl
= SAVE_EXPR_RTL (exp
);
4409 /* The only operand we look at is operand 1. The rest aren't
4410 part of the expression. */
4411 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
4413 case METHOD_CALL_EXPR
:
4414 /* This takes a rtx argument, but shouldn't appear here. */
4418 /* If we have an rtx, we do not need to scan our operands. */
4422 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
4423 for (i
= 0; i
< nops
; i
++)
4424 if (TREE_OPERAND (exp
, i
) != 0
4425 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
4429 /* If we have an rtl, find any enclosed object. Then see if we conflict
4433 if (GET_CODE (exp_rtl
) == SUBREG
)
4435 exp_rtl
= SUBREG_REG (exp_rtl
);
4436 if (GET_CODE (exp_rtl
) == REG
4437 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
4441 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4442 are memory and EXP is not readonly. */
4443 return ! (rtx_equal_p (x
, exp_rtl
)
4444 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
4445 && ! TREE_READONLY (exp
)));
4448 /* If we reach here, it is safe. */
4452 /* Subroutine of expand_expr: return nonzero iff EXP is an
4453 expression whose type is statically determinable. */
4459 if (TREE_CODE (exp
) == PARM_DECL
4460 || TREE_CODE (exp
) == VAR_DECL
4461 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
4462 || TREE_CODE (exp
) == COMPONENT_REF
4463 || TREE_CODE (exp
) == ARRAY_REF
)
4468 /* expand_expr: generate code for computing expression EXP.
4469 An rtx for the computed value is returned. The value is never null.
4470 In the case of a void EXP, const0_rtx is returned.
4472 The value may be stored in TARGET if TARGET is nonzero.
4473 TARGET is just a suggestion; callers must assume that
4474 the rtx returned may not be the same as TARGET.
4476 If TARGET is CONST0_RTX, it means that the value will be ignored.
4478 If TMODE is not VOIDmode, it suggests generating the
4479 result in mode TMODE. But this is done only when convenient.
4480 Otherwise, TMODE is ignored and the value generated in its natural mode.
4481 TMODE is just a suggestion; callers must assume that
4482 the rtx returned may not have mode TMODE.
4484 Note that TARGET may have neither TMODE nor MODE. In that case, it
4485 probably will not be used.
4487 If MODIFIER is EXPAND_SUM then when EXP is an addition
4488 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4489 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4490 products as above, or REG or MEM, or constant.
4491 Ordinarily in such cases we would output mul or add instructions
4492 and then return a pseudo reg containing the sum.
4494 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4495 it also marks a label as absolutely required (it can't be dead).
4496 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4497 This is used for outputting expressions used in initializers.
4499 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4500 with a constant address even if that address is not normally legitimate.
4501 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4504 expand_expr (exp
, target
, tmode
, modifier
)
4507 enum machine_mode tmode
;
4508 enum expand_modifier modifier
;
4510 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4511 This is static so it will be accessible to our recursive callees. */
4512 static tree placeholder_list
= 0;
4513 register rtx op0
, op1
, temp
;
4514 tree type
= TREE_TYPE (exp
);
4515 int unsignedp
= TREE_UNSIGNED (type
);
4516 register enum machine_mode mode
= TYPE_MODE (type
);
4517 register enum tree_code code
= TREE_CODE (exp
);
4519 /* Use subtarget as the target for operand 0 of a binary operation. */
4520 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4521 rtx original_target
= target
;
4522 /* Maybe defer this until sure not doing bytecode? */
4523 int ignore
= (target
== const0_rtx
4524 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4525 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4526 || code
== COND_EXPR
)
4527 && TREE_CODE (type
) == VOID_TYPE
));
4531 if (output_bytecode
&& modifier
!= EXPAND_INITIALIZER
)
4533 bc_expand_expr (exp
);
4537 /* Don't use hard regs as subtargets, because the combiner
4538 can only handle pseudo regs. */
4539 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4541 /* Avoid subtargets inside loops,
4542 since they hide some invariant expressions. */
4543 if (preserve_subexpressions_p ())
4546 /* If we are going to ignore this result, we need only do something
4547 if there is a side-effect somewhere in the expression. If there
4548 is, short-circuit the most common cases here. Note that we must
4549 not call expand_expr with anything but const0_rtx in case this
4550 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4554 if (! TREE_SIDE_EFFECTS (exp
))
4557 /* Ensure we reference a volatile object even if value is ignored. */
4558 if (TREE_THIS_VOLATILE (exp
)
4559 && TREE_CODE (exp
) != FUNCTION_DECL
4560 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4562 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
4563 if (GET_CODE (temp
) == MEM
)
4564 temp
= copy_to_reg (temp
);
4568 if (TREE_CODE_CLASS (code
) == '1')
4569 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4570 VOIDmode
, modifier
);
4571 else if (TREE_CODE_CLASS (code
) == '2'
4572 || TREE_CODE_CLASS (code
) == '<')
4574 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
4575 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
4578 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4579 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4580 /* If the second operand has no side effects, just evaluate
4582 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4583 VOIDmode
, modifier
);
4588 /* If will do cse, generate all results into pseudo registers
4589 since 1) that allows cse to find more things
4590 and 2) otherwise cse could produce an insn the machine
4593 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4594 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4601 tree function
= decl_function_context (exp
);
4602 /* Handle using a label in a containing function. */
4603 if (function
!= current_function_decl
&& function
!= 0)
4605 struct function
*p
= find_function_data (function
);
4606 /* Allocate in the memory associated with the function
4607 that the label is in. */
4608 push_obstacks (p
->function_obstack
,
4609 p
->function_maybepermanent_obstack
);
4611 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4612 label_rtx (exp
), p
->forced_labels
);
4615 else if (modifier
== EXPAND_INITIALIZER
)
4616 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4617 label_rtx (exp
), forced_labels
);
4618 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
4619 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
4620 if (function
!= current_function_decl
&& function
!= 0)
4621 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
4626 if (DECL_RTL (exp
) == 0)
4628 error_with_decl (exp
, "prior parameter's size depends on `%s'");
4629 return CONST0_RTX (mode
);
4632 /* ... fall through ... */
4635 /* If a static var's type was incomplete when the decl was written,
4636 but the type is complete now, lay out the decl now. */
4637 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4638 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
4640 push_obstacks_nochange ();
4641 end_temporary_allocation ();
4642 layout_decl (exp
, 0);
4643 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
4647 /* ... fall through ... */
4651 if (DECL_RTL (exp
) == 0)
4654 /* Ensure variable marked as used even if it doesn't go through
4655 a parser. If it hasn't be used yet, write out an external
4657 if (! TREE_USED (exp
))
4659 assemble_external (exp
);
4660 TREE_USED (exp
) = 1;
4663 /* Show we haven't gotten RTL for this yet. */
4666 /* Handle variables inherited from containing functions. */
4667 context
= decl_function_context (exp
);
4669 /* We treat inline_function_decl as an alias for the current function
4670 because that is the inline function whose vars, types, etc.
4671 are being merged into the current function.
4672 See expand_inline_function. */
4674 if (context
!= 0 && context
!= current_function_decl
4675 && context
!= inline_function_decl
4676 /* If var is static, we don't need a static chain to access it. */
4677 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
4678 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
4682 /* Mark as non-local and addressable. */
4683 DECL_NONLOCAL (exp
) = 1;
4684 mark_addressable (exp
);
4685 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
4687 addr
= XEXP (DECL_RTL (exp
), 0);
4688 if (GET_CODE (addr
) == MEM
)
4689 addr
= gen_rtx (MEM
, Pmode
,
4690 fix_lexical_addr (XEXP (addr
, 0), exp
));
4692 addr
= fix_lexical_addr (addr
, exp
);
4693 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
4696 /* This is the case of an array whose size is to be determined
4697 from its initializer, while the initializer is still being parsed.
4700 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4701 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
4702 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
4703 XEXP (DECL_RTL (exp
), 0));
4705 /* If DECL_RTL is memory, we are in the normal case and either
4706 the address is not valid or it is not a register and -fforce-addr
4707 is specified, get the address into a register. */
4709 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4710 && modifier
!= EXPAND_CONST_ADDRESS
4711 && modifier
!= EXPAND_SUM
4712 && modifier
!= EXPAND_INITIALIZER
4713 && (! memory_address_p (DECL_MODE (exp
),
4714 XEXP (DECL_RTL (exp
), 0))
4716 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
4717 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
4718 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
4720 /* If we got something, return it. But first, set the alignment
4721 the address is a register. */
4724 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
4725 mark_reg_pointer (XEXP (temp
, 0),
4726 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
4731 /* If the mode of DECL_RTL does not match that of the decl, it
4732 must be a promoted value. We return a SUBREG of the wanted mode,
4733 but mark it so that we know that it was already extended. */
4735 if (GET_CODE (DECL_RTL (exp
)) == REG
4736 && GET_MODE (DECL_RTL (exp
)) != mode
)
4738 /* Get the signedness used for this variable. Ensure we get the
4739 same mode we got when the variable was declared. */
4740 if (GET_MODE (DECL_RTL (exp
))
4741 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
4744 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
4745 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4746 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4750 return DECL_RTL (exp
);
4753 return immed_double_const (TREE_INT_CST_LOW (exp
),
4754 TREE_INT_CST_HIGH (exp
),
4758 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
4761 /* If optimized, generate immediate CONST_DOUBLE
4762 which will be turned into memory by reload if necessary.
4764 We used to force a register so that loop.c could see it. But
4765 this does not allow gen_* patterns to perform optimizations with
4766 the constants. It also produces two insns in cases like "x = 1.0;".
4767 On most machines, floating-point constants are not permitted in
4768 many insns, so we'd end up copying it to a register in any case.
4770 Now, we do the copying in expand_binop, if appropriate. */
4771 return immed_real_const (exp
);
4775 if (! TREE_CST_RTL (exp
))
4776 output_constant_def (exp
);
4778 /* TREE_CST_RTL probably contains a constant address.
4779 On RISC machines where a constant address isn't valid,
4780 make some insns to get that address into a register. */
4781 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
4782 && modifier
!= EXPAND_CONST_ADDRESS
4783 && modifier
!= EXPAND_INITIALIZER
4784 && modifier
!= EXPAND_SUM
4785 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
4787 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
4788 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
4789 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
4790 return TREE_CST_RTL (exp
);
4793 context
= decl_function_context (exp
);
4795 /* We treat inline_function_decl as an alias for the current function
4796 because that is the inline function whose vars, types, etc.
4797 are being merged into the current function.
4798 See expand_inline_function. */
4799 if (context
== current_function_decl
|| context
== inline_function_decl
)
4802 /* If this is non-local, handle it. */
4805 temp
= SAVE_EXPR_RTL (exp
);
4806 if (temp
&& GET_CODE (temp
) == REG
)
4808 put_var_into_stack (exp
);
4809 temp
= SAVE_EXPR_RTL (exp
);
4811 if (temp
== 0 || GET_CODE (temp
) != MEM
)
4813 return change_address (temp
, mode
,
4814 fix_lexical_addr (XEXP (temp
, 0), exp
));
4816 if (SAVE_EXPR_RTL (exp
) == 0)
4818 if (mode
== VOIDmode
)
4821 temp
= assign_temp (type
, 0, 0, 0);
4823 SAVE_EXPR_RTL (exp
) = temp
;
4824 if (!optimize
&& GET_CODE (temp
) == REG
)
4825 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
4828 /* If the mode of TEMP does not match that of the expression, it
4829 must be a promoted value. We pass store_expr a SUBREG of the
4830 wanted mode but mark it so that we know that it was already
4831 extended. Note that `unsignedp' was modified above in
4834 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
4836 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
4837 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4838 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4841 if (temp
== const0_rtx
)
4842 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4844 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
4847 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4848 must be a promoted value. We return a SUBREG of the wanted mode,
4849 but mark it so that we know that it was already extended. */
4851 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
4852 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
4854 /* Compute the signedness and make the proper SUBREG. */
4855 promote_mode (type
, mode
, &unsignedp
, 0);
4856 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
4857 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4858 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4862 return SAVE_EXPR_RTL (exp
);
4864 case PLACEHOLDER_EXPR
:
4865 /* If there is an object on the head of the placeholder list,
4866 see if some object in it's references is of type TYPE. For
4867 further information, see tree.def. */
4868 if (placeholder_list
)
4871 tree old_list
= placeholder_list
;
4873 for (object
= TREE_PURPOSE (placeholder_list
);
4874 (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
4875 != TYPE_MAIN_VARIANT (type
))
4876 && (TREE_CODE_CLASS (TREE_CODE (object
)) == 'r'
4877 || TREE_CODE_CLASS (TREE_CODE (object
)) == '1'
4878 || TREE_CODE_CLASS (TREE_CODE (object
)) == '2'
4879 || TREE_CODE_CLASS (TREE_CODE (object
)) == 'e');
4880 object
= TREE_OPERAND (object
, 0))
4884 && (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
4885 == TYPE_MAIN_VARIANT (type
)))
4887 /* Expand this object skipping the list entries before
4888 it was found in case it is also a PLACEHOLDER_EXPR.
4889 In that case, we want to translate it using subsequent
4891 placeholder_list
= TREE_CHAIN (placeholder_list
);
4892 temp
= expand_expr (object
, original_target
, tmode
, modifier
);
4893 placeholder_list
= old_list
;
4898 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4901 case WITH_RECORD_EXPR
:
4902 /* Put the object on the placeholder list, expand our first operand,
4903 and pop the list. */
4904 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
4906 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
4908 placeholder_list
= TREE_CHAIN (placeholder_list
);
4912 expand_exit_loop_if_false (NULL_PTR
,
4913 invert_truthvalue (TREE_OPERAND (exp
, 0)));
4918 expand_start_loop (1);
4919 expand_expr_stmt (TREE_OPERAND (exp
, 0));
4927 tree vars
= TREE_OPERAND (exp
, 0);
4928 int vars_need_expansion
= 0;
4930 /* Need to open a binding contour here because
4931 if there are any cleanups they most be contained here. */
4932 expand_start_bindings (0);
4934 /* Mark the corresponding BLOCK for output in its proper place. */
4935 if (TREE_OPERAND (exp
, 2) != 0
4936 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
4937 insert_block (TREE_OPERAND (exp
, 2));
4939 /* If VARS have not yet been expanded, expand them now. */
4942 if (DECL_RTL (vars
) == 0)
4944 vars_need_expansion
= 1;
4947 expand_decl_init (vars
);
4948 vars
= TREE_CHAIN (vars
);
4951 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
4953 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
4959 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
4961 emit_insns (RTL_EXPR_SEQUENCE (exp
));
4962 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
4963 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
4964 free_temps_for_rtl_expr (exp
);
4965 return RTL_EXPR_RTL (exp
);
4968 /* If we don't need the result, just ensure we evaluate any
4973 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4974 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
4978 /* All elts simple constants => refer to a constant in memory. But
4979 if this is a non-BLKmode mode, let it store a field at a time
4980 since that should make a CONST_INT or CONST_DOUBLE when we
4981 fold. Likewise, if we have a target we can use, it is best to
4982 store directly into the target unless the type is large enough
4983 that memcpy will be used. If we are making an initializer and
4984 all operands are constant, put it in memory as well. */
4985 else if ((TREE_STATIC (exp
)
4986 && ((mode
== BLKmode
4987 && ! (target
!= 0 && safe_from_p (target
, exp
)))
4988 || TREE_ADDRESSABLE (exp
)
4989 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
4990 && (move_by_pieces_ninsns
4991 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
4992 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
4994 && ! mostly_zeros_p (exp
))))
4995 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
4997 rtx constructor
= output_constant_def (exp
);
4998 if (modifier
!= EXPAND_CONST_ADDRESS
4999 && modifier
!= EXPAND_INITIALIZER
5000 && modifier
!= EXPAND_SUM
5001 && (! memory_address_p (GET_MODE (constructor
),
5002 XEXP (constructor
, 0))
5004 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5005 constructor
= change_address (constructor
, VOIDmode
,
5006 XEXP (constructor
, 0));
5012 if (target
== 0 || ! safe_from_p (target
, exp
))
5014 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5015 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5017 target
= assign_temp (type
, 0, 1, 1);
5020 if (TREE_READONLY (exp
))
5022 if (GET_CODE (target
) == MEM
)
5023 target
= change_address (target
, GET_MODE (target
),
5025 RTX_UNCHANGING_P (target
) = 1;
5028 store_constructor (exp
, target
, 0);
5034 tree exp1
= TREE_OPERAND (exp
, 0);
5037 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
5038 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
5039 This code has the same general effect as simply doing
5040 expand_expr on the save expr, except that the expression PTR
5041 is computed for use as a memory address. This means different
5042 code, suitable for indexing, may be generated. */
5043 if (TREE_CODE (exp1
) == SAVE_EXPR
5044 && SAVE_EXPR_RTL (exp1
) == 0
5045 && TYPE_MODE (TREE_TYPE (exp1
)) == ptr_mode
)
5047 temp
= expand_expr (TREE_OPERAND (exp1
, 0), NULL_RTX
,
5048 VOIDmode
, EXPAND_SUM
);
5049 op0
= memory_address (mode
, temp
);
5050 op0
= copy_all_regs (op0
);
5051 SAVE_EXPR_RTL (exp1
) = op0
;
5055 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5056 op0
= memory_address (mode
, op0
);
5059 temp
= gen_rtx (MEM
, mode
, op0
);
5060 /* If address was computed by addition,
5061 mark this as an element of an aggregate. */
5062 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5063 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
5064 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
5065 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5066 || (TREE_CODE (exp1
) == ADDR_EXPR
5067 && (exp2
= TREE_OPERAND (exp1
, 0))
5068 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
5069 MEM_IN_STRUCT_P (temp
) = 1;
5070 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5072 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5073 here, because, in C and C++, the fact that a location is accessed
5074 through a pointer to const does not mean that the value there can
5075 never change. Languages where it can never change should
5076 also set TREE_STATIC. */
5077 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5082 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5086 tree array
= TREE_OPERAND (exp
, 0);
5087 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5088 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5089 tree index
= TREE_OPERAND (exp
, 1);
5090 tree index_type
= TREE_TYPE (index
);
5093 if (TREE_CODE (low_bound
) != INTEGER_CST
5094 && contains_placeholder_p (low_bound
))
5095 low_bound
= build (WITH_RECORD_EXPR
, sizetype
, low_bound
, exp
);
5097 /* Optimize the special-case of a zero lower bound.
5099 We convert the low_bound to sizetype to avoid some problems
5100 with constant folding. (E.g. suppose the lower bound is 1,
5101 and its mode is QI. Without the conversion, (ARRAY
5102 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5103 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5105 But sizetype isn't quite right either (especially if
5106 the lowbound is negative). FIXME */
5108 if (! integer_zerop (low_bound
))
5109 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5110 convert (sizetype
, low_bound
)));
5112 if ((TREE_CODE (index
) != INTEGER_CST
5113 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5114 && (! SLOW_UNALIGNED_ACCESS
|| ! get_inner_unaligned_p (exp
)))
5116 /* Nonconstant array index or nonconstant element size, and
5117 not an array in an unaligned (packed) structure field.
5118 Generate the tree for *(&array+index) and expand that,
5119 except do it in a language-independent way
5120 and don't complain about non-lvalue arrays.
5121 `mark_addressable' should already have been called
5122 for any array for which this case will be reached. */
5124 /* Don't forget the const or volatile flag from the array
5126 tree variant_type
= build_type_variant (type
,
5127 TREE_READONLY (exp
),
5128 TREE_THIS_VOLATILE (exp
));
5129 tree array_adr
= build1 (ADDR_EXPR
,
5130 build_pointer_type (variant_type
), array
);
5132 tree size
= size_in_bytes (type
);
5134 /* Convert the integer argument to a type the same size as sizetype
5135 so the multiply won't overflow spuriously. */
5136 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
5137 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
5140 if (TREE_CODE (size
) != INTEGER_CST
5141 && contains_placeholder_p (size
))
5142 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, exp
);
5144 /* Don't think the address has side effects
5145 just because the array does.
5146 (In some cases the address might have side effects,
5147 and we fail to record that fact here. However, it should not
5148 matter, since expand_expr should not care.) */
5149 TREE_SIDE_EFFECTS (array_adr
) = 0;
5153 (INDIRECT_REF
, type
,
5154 fold (build (PLUS_EXPR
,
5155 TYPE_POINTER_TO (variant_type
),
5160 TYPE_POINTER_TO (variant_type
),
5161 fold (build (MULT_EXPR
, TREE_TYPE (index
),
5163 convert (TREE_TYPE (index
),
5166 /* Volatility, etc., of new expression is same as old
5168 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
5169 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
5170 TREE_READONLY (elt
) = TREE_READONLY (exp
);
5172 return expand_expr (elt
, target
, tmode
, modifier
);
5175 /* Fold an expression like: "foo"[2].
5176 This is not done in fold so it won't happen inside &.
5177 Don't fold if this is for wide characters since it's too
5178 difficult to do correctly and this is a very rare case. */
5180 if (TREE_CODE (array
) == STRING_CST
5181 && TREE_CODE (index
) == INTEGER_CST
5182 && !TREE_INT_CST_HIGH (index
)
5183 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5184 && GET_MODE_CLASS (mode
) == MODE_INT
5185 && GET_MODE_SIZE (mode
) == 1)
5186 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5188 /* If this is a constant index into a constant array,
5189 just get the value from the array. Handle both the cases when
5190 we have an explicit constructor and when our operand is a variable
5191 that was declared const. */
5193 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5195 if (TREE_CODE (index
) == INTEGER_CST
5196 && TREE_INT_CST_HIGH (index
) == 0)
5198 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5200 i
= TREE_INT_CST_LOW (index
);
5202 elem
= TREE_CHAIN (elem
);
5204 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5209 else if (optimize
>= 1
5210 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5211 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5212 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5214 if (TREE_CODE (index
) == INTEGER_CST
5215 && TREE_INT_CST_HIGH (index
) == 0)
5217 tree init
= DECL_INITIAL (array
);
5219 i
= TREE_INT_CST_LOW (index
);
5220 if (TREE_CODE (init
) == CONSTRUCTOR
)
5222 tree elem
= CONSTRUCTOR_ELTS (init
);
5225 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5226 elem
= TREE_CHAIN (elem
);
5228 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5231 else if (TREE_CODE (init
) == STRING_CST
5232 && i
< TREE_STRING_LENGTH (init
))
5233 return GEN_INT (TREE_STRING_POINTER (init
)[i
]);
5238 /* Treat array-ref with constant index as a component-ref. */
5242 /* If the operand is a CONSTRUCTOR, we can just extract the
5243 appropriate field if it is present. Don't do this if we have
5244 already written the data since we want to refer to that copy
5245 and varasm.c assumes that's what we'll do. */
5246 if (code
!= ARRAY_REF
5247 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5248 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5252 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5253 elt
= TREE_CHAIN (elt
))
5254 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
5255 return expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5259 enum machine_mode mode1
;
5264 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
5265 &mode1
, &unsignedp
, &volatilep
);
5268 /* If we got back the original object, something is wrong. Perhaps
5269 we are evaluating an expression too early. In any event, don't
5270 infinitely recurse. */
5274 /* If TEM's type is a union of variable size, pass TARGET to the inner
5275 computation, since it will need a temporary and TARGET is known
5276 to have to do. This occurs in unchecked conversion in Ada. */
5278 op0
= expand_expr (tem
,
5279 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
5280 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
5282 ? target
: NULL_RTX
),
5284 modifier
== EXPAND_INITIALIZER
? modifier
: 0);
5286 /* If this is a constant, put it into a register if it is a
5287 legitimate constant and memory if it isn't. */
5288 if (CONSTANT_P (op0
))
5290 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
5291 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
5292 op0
= force_reg (mode
, op0
);
5294 op0
= validize_mem (force_const_mem (mode
, op0
));
5297 alignment
= TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
;
5300 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5302 if (GET_CODE (op0
) != MEM
)
5304 op0
= change_address (op0
, VOIDmode
,
5305 gen_rtx (PLUS
, ptr_mode
, XEXP (op0
, 0),
5306 force_reg (ptr_mode
, offset_rtx
)));
5307 /* If we have a variable offset, the known alignment
5308 is only that of the innermost structure containing the field.
5309 (Actually, we could sometimes do better by using the
5310 size of an element of the innermost array, but no need.) */
5311 if (TREE_CODE (exp
) == COMPONENT_REF
5312 || TREE_CODE (exp
) == BIT_FIELD_REF
)
5313 alignment
= (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5317 /* Don't forget about volatility even if this is a bitfield. */
5318 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
5320 op0
= copy_rtx (op0
);
5321 MEM_VOLATILE_P (op0
) = 1;
5324 /* In cases where an aligned union has an unaligned object
5325 as a field, we might be extracting a BLKmode value from
5326 an integer-mode (e.g., SImode) object. Handle this case
5327 by doing the extract into an object as wide as the field
5328 (which we know to be the width of a basic mode), then
5329 storing into memory, and changing the mode to BLKmode. */
5330 if (mode1
== VOIDmode
5331 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
5332 || (modifier
!= EXPAND_CONST_ADDRESS
5333 && modifier
!= EXPAND_SUM
5334 && modifier
!= EXPAND_INITIALIZER
5335 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
])
5336 /* If the field isn't aligned enough to fetch as a memref,
5337 fetch it as a bit field. */
5338 || (SLOW_UNALIGNED_ACCESS
5339 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
5340 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
5342 enum machine_mode ext_mode
= mode
;
5344 if (ext_mode
== BLKmode
)
5345 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
5347 if (ext_mode
== BLKmode
)
5350 op0
= validize_mem (op0
);
5352 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
5353 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5355 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
5356 unsignedp
, target
, ext_mode
, ext_mode
,
5358 int_size_in_bytes (TREE_TYPE (tem
)));
5359 if (mode
== BLKmode
)
5361 rtx
new = assign_stack_temp (ext_mode
,
5362 bitsize
/ BITS_PER_UNIT
, 0);
5364 emit_move_insn (new, op0
);
5365 op0
= copy_rtx (new);
5366 PUT_MODE (op0
, BLKmode
);
5367 MEM_IN_STRUCT_P (op0
) = 1;
5373 /* If the result is BLKmode, use that to access the object
5375 if (mode
== BLKmode
)
5378 /* Get a reference to just this component. */
5379 if (modifier
== EXPAND_CONST_ADDRESS
5380 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5381 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
5382 (bitpos
/ BITS_PER_UNIT
)));
5384 op0
= change_address (op0
, mode1
,
5385 plus_constant (XEXP (op0
, 0),
5386 (bitpos
/ BITS_PER_UNIT
)));
5387 if (GET_CODE (XEXP (op0
, 0)) == REG
)
5388 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5390 MEM_IN_STRUCT_P (op0
) = 1;
5391 MEM_VOLATILE_P (op0
) |= volatilep
;
5392 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
)
5395 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5396 convert_move (target
, op0
, unsignedp
);
5402 tree base
= build1 (ADDR_EXPR
, type
, TREE_OPERAND (exp
, 0));
5403 tree addr
= build (PLUS_EXPR
, type
, base
, TREE_OPERAND (exp
, 1));
5404 op0
= expand_expr (addr
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5405 temp
= gen_rtx (MEM
, mode
, memory_address (mode
, op0
));
5406 MEM_IN_STRUCT_P (temp
) = 1;
5407 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
);
5408 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
5409 a location is accessed through a pointer to const does not mean
5410 that the value there can never change. */
5411 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
);
5416 /* Intended for a reference to a buffer of a file-object in Pascal.
5417 But it's not certain that a special tree code will really be
5418 necessary for these. INDIRECT_REF might work for them. */
5424 /* Pascal set IN expression.
5427 rlo = set_low - (set_low%bits_per_word);
5428 the_word = set [ (index - rlo)/bits_per_word ];
5429 bit_index = index % bits_per_word;
5430 bitmask = 1 << bit_index;
5431 return !!(the_word & bitmask); */
5433 tree set
= TREE_OPERAND (exp
, 0);
5434 tree index
= TREE_OPERAND (exp
, 1);
5435 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
5436 tree set_type
= TREE_TYPE (set
);
5437 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
5438 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
5439 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
5440 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
5441 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
5442 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
5443 rtx setaddr
= XEXP (setval
, 0);
5444 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
5446 rtx diff
, quo
, rem
, addr
, bit
, result
;
5448 preexpand_calls (exp
);
5450 /* If domain is empty, answer is no. Likewise if index is constant
5451 and out of bounds. */
5452 if ((TREE_CODE (set_high_bound
) == INTEGER_CST
5453 && TREE_CODE (set_low_bound
) == INTEGER_CST
5454 && tree_int_cst_lt (set_high_bound
, set_low_bound
)
5455 || (TREE_CODE (index
) == INTEGER_CST
5456 && TREE_CODE (set_low_bound
) == INTEGER_CST
5457 && tree_int_cst_lt (index
, set_low_bound
))
5458 || (TREE_CODE (set_high_bound
) == INTEGER_CST
5459 && TREE_CODE (index
) == INTEGER_CST
5460 && tree_int_cst_lt (set_high_bound
, index
))))
5464 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5466 /* If we get here, we have to generate the code for both cases
5467 (in range and out of range). */
5469 op0
= gen_label_rtx ();
5470 op1
= gen_label_rtx ();
5472 if (! (GET_CODE (index_val
) == CONST_INT
5473 && GET_CODE (lo_r
) == CONST_INT
))
5475 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
5476 GET_MODE (index_val
), iunsignedp
, 0);
5477 emit_jump_insn (gen_blt (op1
));
5480 if (! (GET_CODE (index_val
) == CONST_INT
5481 && GET_CODE (hi_r
) == CONST_INT
))
5483 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5484 GET_MODE (index_val
), iunsignedp
, 0);
5485 emit_jump_insn (gen_bgt (op1
));
5488 /* Calculate the element number of bit zero in the first word
5490 if (GET_CODE (lo_r
) == CONST_INT
)
5491 rlow
= GEN_INT (INTVAL (lo_r
)
5492 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5494 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5495 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5496 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5498 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5499 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5501 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5502 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5503 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5504 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5506 addr
= memory_address (byte_mode
,
5507 expand_binop (index_mode
, add_optab
, diff
,
5508 setaddr
, NULL_RTX
, iunsignedp
,
5511 /* Extract the bit we want to examine */
5512 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5513 gen_rtx (MEM
, byte_mode
, addr
),
5514 make_tree (TREE_TYPE (index
), rem
),
5516 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
5517 GET_MODE (target
) == byte_mode
? target
: 0,
5518 1, OPTAB_LIB_WIDEN
);
5520 if (result
!= target
)
5521 convert_move (target
, result
, 1);
5523 /* Output the code to handle the out-of-range case. */
5526 emit_move_insn (target
, const0_rtx
);
5531 case WITH_CLEANUP_EXPR
:
5532 if (RTL_EXPR_RTL (exp
) == 0)
5535 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5537 = tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2), cleanups_this_call
);
5538 /* That's it for this cleanup. */
5539 TREE_OPERAND (exp
, 2) = 0;
5540 (*interim_eh_hook
) (NULL_TREE
);
5542 return RTL_EXPR_RTL (exp
);
5544 case CLEANUP_POINT_EXPR
:
5546 extern int temp_slot_level
;
5547 tree old_cleanups
= cleanups_this_call
;
5548 int old_temp_level
= target_temp_slot_level
;
5550 target_temp_slot_level
= temp_slot_level
;
5551 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5552 /* If we're going to use this value, load it up now. */
5554 op0
= force_not_mem (op0
);
5555 expand_cleanups_to (old_cleanups
);
5556 preserve_temp_slots (op0
);
5559 target_temp_slot_level
= old_temp_level
;
5564 /* Check for a built-in function. */
5565 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5566 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5568 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5569 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
5571 /* If this call was expanded already by preexpand_calls,
5572 just return the result we got. */
5573 if (CALL_EXPR_RTL (exp
) != 0)
5574 return CALL_EXPR_RTL (exp
);
5576 return expand_call (exp
, target
, ignore
);
5578 case NON_LVALUE_EXPR
:
5581 case REFERENCE_EXPR
:
5582 if (TREE_CODE (type
) == UNION_TYPE
)
5584 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5587 if (mode
!= BLKmode
)
5588 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5590 target
= assign_temp (type
, 0, 1, 1);
5593 if (GET_CODE (target
) == MEM
)
5594 /* Store data into beginning of memory target. */
5595 store_expr (TREE_OPERAND (exp
, 0),
5596 change_address (target
, TYPE_MODE (valtype
), 0), 0);
5598 else if (GET_CODE (target
) == REG
)
5599 /* Store this field into a union of the proper type. */
5600 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
5601 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
5603 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5607 /* Return the entire union. */
5611 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5613 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
5616 /* If the signedness of the conversion differs and OP0 is
5617 a promoted SUBREG, clear that indication since we now
5618 have to do the proper extension. */
5619 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
5620 && GET_CODE (op0
) == SUBREG
)
5621 SUBREG_PROMOTED_VAR_P (op0
) = 0;
5626 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
5627 if (GET_MODE (op0
) == mode
)
5630 /* If OP0 is a constant, just convert it into the proper mode. */
5631 if (CONSTANT_P (op0
))
5633 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5634 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5636 if (modifier
== EXPAND_INITIALIZER
)
5637 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
5641 convert_to_mode (mode
, op0
,
5642 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5644 convert_move (target
, op0
,
5645 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5649 /* We come here from MINUS_EXPR when the second operand is a constant. */
5651 this_optab
= add_optab
;
5653 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5654 something else, make sure we add the register to the constant and
5655 then to the other thing. This case can occur during strength
5656 reduction and doing it this way will produce better code if the
5657 frame pointer or argument pointer is eliminated.
5659 fold-const.c will ensure that the constant is always in the inner
5660 PLUS_EXPR, so the only case we need to do anything about is if
5661 sp, ap, or fp is our second argument, in which case we must swap
5662 the innermost first argument and our second argument. */
5664 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5665 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
5666 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
5667 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
5668 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
5669 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
5671 tree t
= TREE_OPERAND (exp
, 1);
5673 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5674 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
5677 /* If the result is to be ptr_mode and we are adding an integer to
5678 something, we might be forming a constant. So try to use
5679 plus_constant. If it produces a sum and we can't accept it,
5680 use force_operand. This allows P = &ARR[const] to generate
5681 efficient code on machines where a SYMBOL_REF is not a valid
5684 If this is an EXPAND_SUM call, always return the sum. */
5685 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
5686 || mode
== ptr_mode
)
5688 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
5689 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5690 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
5692 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
5694 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
5695 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5696 op1
= force_operand (op1
, target
);
5700 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5701 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
5702 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
5704 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
5706 if (! CONSTANT_P (op0
))
5708 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5709 VOIDmode
, modifier
);
5710 /* Don't go to both_summands if modifier
5711 says it's not right to return a PLUS. */
5712 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5716 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
5717 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5718 op0
= force_operand (op0
, target
);
5723 /* No sense saving up arithmetic to be done
5724 if it's all in the wrong mode to form part of an address.
5725 And force_operand won't know whether to sign-extend or
5727 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5728 || mode
!= ptr_mode
)
5731 preexpand_calls (exp
);
5732 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5735 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
5736 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
5739 /* Make sure any term that's a sum with a constant comes last. */
5740 if (GET_CODE (op0
) == PLUS
5741 && CONSTANT_P (XEXP (op0
, 1)))
5747 /* If adding to a sum including a constant,
5748 associate it to put the constant outside. */
5749 if (GET_CODE (op1
) == PLUS
5750 && CONSTANT_P (XEXP (op1
, 1)))
5752 rtx constant_term
= const0_rtx
;
5754 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
5757 /* Ensure that MULT comes first if there is one. */
5758 else if (GET_CODE (op0
) == MULT
)
5759 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
5761 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
5763 /* Let's also eliminate constants from op0 if possible. */
5764 op0
= eliminate_constant_term (op0
, &constant_term
);
5766 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5767 their sum should be a constant. Form it into OP1, since the
5768 result we want will then be OP0 + OP1. */
5770 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
5775 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
5778 /* Put a constant term last and put a multiplication first. */
5779 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
5780 temp
= op1
, op1
= op0
, op0
= temp
;
5782 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
5783 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
5786 /* For initializers, we are allowed to return a MINUS of two
5787 symbolic constants. Here we handle all cases when both operands
5789 /* Handle difference of two symbolic constants,
5790 for the sake of an initializer. */
5791 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5792 && really_constant_p (TREE_OPERAND (exp
, 0))
5793 && really_constant_p (TREE_OPERAND (exp
, 1)))
5795 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
5796 VOIDmode
, modifier
);
5797 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5798 VOIDmode
, modifier
);
5800 /* If the last operand is a CONST_INT, use plus_constant of
5801 the negated constant. Else make the MINUS. */
5802 if (GET_CODE (op1
) == CONST_INT
)
5803 return plus_constant (op0
, - INTVAL (op1
));
5805 return gen_rtx (MINUS
, mode
, op0
, op1
);
5807 /* Convert A - const to A + (-const). */
5808 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
5810 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
5811 TREE_OPERAND (exp
, 1)));
5813 /* Deal with the case where we can't negate the constant
5815 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
5817 tree newtype
= signed_type (type
);
5818 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
5819 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
5820 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
5822 if (! TREE_OVERFLOW (newneg
))
5823 return expand_expr (convert (type
,
5824 build (PLUS_EXPR
, newtype
,
5826 target
, tmode
, modifier
);
5830 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
5834 this_optab
= sub_optab
;
5838 preexpand_calls (exp
);
5839 /* If first operand is constant, swap them.
5840 Thus the following special case checks need only
5841 check the second operand. */
5842 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5844 register tree t1
= TREE_OPERAND (exp
, 0);
5845 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
5846 TREE_OPERAND (exp
, 1) = t1
;
5849 /* Attempt to return something suitable for generating an
5850 indexed address, for machines that support that. */
5852 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
5853 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5854 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
5856 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
5858 /* Apply distributive law if OP0 is x+c. */
5859 if (GET_CODE (op0
) == PLUS
5860 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
5861 return gen_rtx (PLUS
, mode
,
5862 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
5863 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
5864 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
5865 * INTVAL (XEXP (op0
, 1))));
5867 if (GET_CODE (op0
) != REG
)
5868 op0
= force_operand (op0
, NULL_RTX
);
5869 if (GET_CODE (op0
) != REG
)
5870 op0
= copy_to_mode_reg (mode
, op0
);
5872 return gen_rtx (MULT
, mode
, op0
,
5873 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
5876 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5879 /* Check for multiplying things that have been extended
5880 from a narrower type. If this machine supports multiplying
5881 in that narrower type with a result in the desired type,
5882 do it that way, and avoid the explicit type-conversion. */
5883 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
5884 && TREE_CODE (type
) == INTEGER_TYPE
5885 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5886 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5887 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5888 && int_fits_type_p (TREE_OPERAND (exp
, 1),
5889 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5890 /* Don't use a widening multiply if a shift will do. */
5891 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5892 > HOST_BITS_PER_WIDE_INT
)
5893 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
5895 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
5896 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
5898 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
5899 /* If both operands are extended, they must either both
5900 be zero-extended or both be sign-extended. */
5901 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
5903 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
5905 enum machine_mode innermode
5906 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
5907 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5908 ? smul_widen_optab
: umul_widen_optab
);
5909 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5910 ? umul_widen_optab
: smul_widen_optab
);
5911 if (mode
== GET_MODE_WIDER_MODE (innermode
))
5913 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
5915 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
5916 NULL_RTX
, VOIDmode
, 0);
5917 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
5918 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5921 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
5922 NULL_RTX
, VOIDmode
, 0);
5925 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
5926 && innermode
== word_mode
)
5929 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
5930 NULL_RTX
, VOIDmode
, 0);
5931 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
5932 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5935 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
5936 NULL_RTX
, VOIDmode
, 0);
5937 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
5938 unsignedp
, OPTAB_LIB_WIDEN
);
5939 htem
= expand_mult_highpart_adjust (innermode
,
5940 gen_highpart (innermode
, temp
),
5942 gen_highpart (innermode
, temp
),
5944 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
5949 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5950 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5951 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
5953 case TRUNC_DIV_EXPR
:
5954 case FLOOR_DIV_EXPR
:
5956 case ROUND_DIV_EXPR
:
5957 case EXACT_DIV_EXPR
:
5958 preexpand_calls (exp
);
5959 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5961 /* Possible optimization: compute the dividend with EXPAND_SUM
5962 then if the divisor is constant can optimize the case
5963 where some terms of the dividend have coeffs divisible by it. */
5964 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5965 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5966 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
5969 this_optab
= flodiv_optab
;
5972 case TRUNC_MOD_EXPR
:
5973 case FLOOR_MOD_EXPR
:
5975 case ROUND_MOD_EXPR
:
5976 preexpand_calls (exp
);
5977 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5979 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5980 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5981 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
5983 case FIX_ROUND_EXPR
:
5984 case FIX_FLOOR_EXPR
:
5986 abort (); /* Not used for C. */
5988 case FIX_TRUNC_EXPR
:
5989 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
5991 target
= gen_reg_rtx (mode
);
5992 expand_fix (target
, op0
, unsignedp
);
5996 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
5998 target
= gen_reg_rtx (mode
);
5999 /* expand_float can't figure out what to do if FROM has VOIDmode.
6000 So give it the correct mode. With -O, cse will optimize this. */
6001 if (GET_MODE (op0
) == VOIDmode
)
6002 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6004 expand_float (target
, op0
,
6005 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6009 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6010 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6016 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6018 /* Handle complex values specially. */
6019 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6020 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6021 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6023 /* Unsigned abs is simply the operand. Testing here means we don't
6024 risk generating incorrect code below. */
6025 if (TREE_UNSIGNED (type
))
6028 return expand_abs (mode
, op0
, target
, unsignedp
,
6029 safe_from_p (target
, TREE_OPERAND (exp
, 0)));
6033 target
= original_target
;
6034 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
6035 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6036 || GET_MODE (target
) != mode
6037 || (GET_CODE (target
) == REG
6038 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6039 target
= gen_reg_rtx (mode
);
6040 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6041 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6043 /* First try to do it with a special MIN or MAX instruction.
6044 If that does not win, use a conditional jump to select the proper
6046 this_optab
= (TREE_UNSIGNED (type
)
6047 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6048 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6050 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6055 /* At this point, a MEM target is no longer useful; we will get better
6058 if (GET_CODE (target
) == MEM
)
6059 target
= gen_reg_rtx (mode
);
6062 emit_move_insn (target
, op0
);
6064 op0
= gen_label_rtx ();
6066 /* If this mode is an integer too wide to compare properly,
6067 compare word by word. Rely on cse to optimize constant cases. */
6068 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6070 if (code
== MAX_EXPR
)
6071 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6072 target
, op1
, NULL_RTX
, op0
);
6074 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6075 op1
, target
, NULL_RTX
, op0
);
6076 emit_move_insn (target
, op1
);
6080 if (code
== MAX_EXPR
)
6081 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6082 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6083 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6085 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6086 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6087 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6088 if (temp
== const0_rtx
)
6089 emit_move_insn (target
, op1
);
6090 else if (temp
!= const_true_rtx
)
6092 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6093 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6096 emit_move_insn (target
, op1
);
6103 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6104 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6110 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6111 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6116 /* ??? Can optimize bitwise operations with one arg constant.
6117 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6118 and (a bitwise1 b) bitwise2 b (etc)
6119 but that is probably not worth while. */
6121 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6122 boolean values when we want in all cases to compute both of them. In
6123 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6124 as actual zero-or-1 values and then bitwise anding. In cases where
6125 there cannot be any side effects, better code would be made by
6126 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6127 how to recognize those cases. */
6129 case TRUTH_AND_EXPR
:
6131 this_optab
= and_optab
;
6136 this_optab
= ior_optab
;
6139 case TRUTH_XOR_EXPR
:
6141 this_optab
= xor_optab
;
6148 preexpand_calls (exp
);
6149 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6151 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6152 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6155 /* Could determine the answer when only additive constants differ. Also,
6156 the addition of one can be handled by changing the condition. */
6163 preexpand_calls (exp
);
6164 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
6168 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6169 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
6171 && GET_CODE (original_target
) == REG
6172 && (GET_MODE (original_target
)
6173 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6175 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6178 if (temp
!= original_target
)
6179 temp
= copy_to_reg (temp
);
6181 op1
= gen_label_rtx ();
6182 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
6183 GET_MODE (temp
), unsignedp
, 0);
6184 emit_jump_insn (gen_beq (op1
));
6185 emit_move_insn (temp
, const1_rtx
);
6190 /* If no set-flag instruction, must generate a conditional
6191 store into a temporary variable. Drop through
6192 and handle this like && and ||. */
6194 case TRUTH_ANDIF_EXPR
:
6195 case TRUTH_ORIF_EXPR
:
6197 && (target
== 0 || ! safe_from_p (target
, exp
)
6198 /* Make sure we don't have a hard reg (such as function's return
6199 value) live across basic blocks, if not optimizing. */
6200 || (!optimize
&& GET_CODE (target
) == REG
6201 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
6202 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6205 emit_clr_insn (target
);
6207 op1
= gen_label_rtx ();
6208 jumpifnot (exp
, op1
);
6211 emit_0_to_1_insn (target
);
6214 return ignore
? const0_rtx
: target
;
6216 case TRUTH_NOT_EXPR
:
6217 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6218 /* The parser is careful to generate TRUTH_NOT_EXPR
6219 only with operands that are always zero or one. */
6220 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
6221 target
, 1, OPTAB_LIB_WIDEN
);
6227 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6229 return expand_expr (TREE_OPERAND (exp
, 1),
6230 (ignore
? const0_rtx
: target
),
6235 rtx flag
= NULL_RTX
;
6236 tree left_cleanups
= NULL_TREE
;
6237 tree right_cleanups
= NULL_TREE
;
6239 /* Used to save a pointer to the place to put the setting of
6240 the flag that indicates if this side of the conditional was
6241 taken. We backpatch the code, if we find out later that we
6242 have any conditional cleanups that need to be performed. */
6243 rtx dest_right_flag
= NULL_RTX
;
6244 rtx dest_left_flag
= NULL_RTX
;
6246 /* Note that COND_EXPRs whose type is a structure or union
6247 are required to be constructed to contain assignments of
6248 a temporary variable, so that we can evaluate them here
6249 for side effect only. If type is void, we must do likewise. */
6251 /* If an arm of the branch requires a cleanup,
6252 only that cleanup is performed. */
6255 tree binary_op
= 0, unary_op
= 0;
6256 tree old_cleanups
= cleanups_this_call
;
6258 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6259 convert it to our mode, if necessary. */
6260 if (integer_onep (TREE_OPERAND (exp
, 1))
6261 && integer_zerop (TREE_OPERAND (exp
, 2))
6262 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6266 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6271 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
6272 if (GET_MODE (op0
) == mode
)
6276 target
= gen_reg_rtx (mode
);
6277 convert_move (target
, op0
, unsignedp
);
6281 /* If we are not to produce a result, we have no target. Otherwise,
6282 if a target was specified use it; it will not be used as an
6283 intermediate target unless it is safe. If no target, use a
6288 else if (original_target
6289 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0))
6290 && GET_MODE (original_target
) == mode
6291 && ! (GET_CODE (original_target
) == MEM
6292 && MEM_VOLATILE_P (original_target
)))
6293 temp
= original_target
;
6295 temp
= assign_temp (type
, 0, 0, 1);
6297 /* Check for X ? A + B : A. If we have this, we can copy
6298 A to the output and conditionally add B. Similarly for unary
6299 operations. Don't do this if X has side-effects because
6300 those side effects might affect A or B and the "?" operation is
6301 a sequence point in ANSI. (We test for side effects later.) */
6303 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
6304 && operand_equal_p (TREE_OPERAND (exp
, 2),
6305 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6306 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
6307 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
6308 && operand_equal_p (TREE_OPERAND (exp
, 1),
6309 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6310 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
6311 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
6312 && operand_equal_p (TREE_OPERAND (exp
, 2),
6313 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6314 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
6315 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
6316 && operand_equal_p (TREE_OPERAND (exp
, 1),
6317 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6318 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
6320 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6321 operation, do this as A + (X != 0). Similarly for other simple
6322 binary operators. */
6323 if (temp
&& singleton
&& binary_op
6324 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6325 && (TREE_CODE (binary_op
) == PLUS_EXPR
6326 || TREE_CODE (binary_op
) == MINUS_EXPR
6327 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
6328 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
6329 && integer_onep (TREE_OPERAND (binary_op
, 1))
6330 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6333 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
6334 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
6335 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
6338 /* If we had X ? A : A + 1, do this as A + (X == 0).
6340 We have to invert the truth value here and then put it
6341 back later if do_store_flag fails. We cannot simply copy
6342 TREE_OPERAND (exp, 0) to another variable and modify that
6343 because invert_truthvalue can modify the tree pointed to
6345 if (singleton
== TREE_OPERAND (exp
, 1))
6346 TREE_OPERAND (exp
, 0)
6347 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6349 result
= do_store_flag (TREE_OPERAND (exp
, 0),
6350 (safe_from_p (temp
, singleton
)
6352 mode
, BRANCH_COST
<= 1);
6356 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
6357 return expand_binop (mode
, boptab
, op1
, result
, temp
,
6358 unsignedp
, OPTAB_LIB_WIDEN
);
6360 else if (singleton
== TREE_OPERAND (exp
, 1))
6361 TREE_OPERAND (exp
, 0)
6362 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6365 do_pending_stack_adjust ();
6367 op0
= gen_label_rtx ();
6369 flag
= gen_reg_rtx (word_mode
);
6370 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
6374 /* If the target conflicts with the other operand of the
6375 binary op, we can't use it. Also, we can't use the target
6376 if it is a hard register, because evaluating the condition
6377 might clobber it. */
6379 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
6380 || (GET_CODE (temp
) == REG
6381 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
6382 temp
= gen_reg_rtx (mode
);
6383 store_expr (singleton
, temp
, 0);
6386 expand_expr (singleton
,
6387 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6388 dest_left_flag
= get_last_insn ();
6389 if (singleton
== TREE_OPERAND (exp
, 1))
6390 jumpif (TREE_OPERAND (exp
, 0), op0
);
6392 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6394 /* Allows cleanups up to here. */
6395 old_cleanups
= cleanups_this_call
;
6396 if (binary_op
&& temp
== 0)
6397 /* Just touch the other operand. */
6398 expand_expr (TREE_OPERAND (binary_op
, 1),
6399 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6401 store_expr (build (TREE_CODE (binary_op
), type
,
6402 make_tree (type
, temp
),
6403 TREE_OPERAND (binary_op
, 1)),
6406 store_expr (build1 (TREE_CODE (unary_op
), type
,
6407 make_tree (type
, temp
)),
6410 dest_right_flag
= get_last_insn ();
6413 /* This is now done in jump.c and is better done there because it
6414 produces shorter register lifetimes. */
6416 /* Check for both possibilities either constants or variables
6417 in registers (but not the same as the target!). If so, can
6418 save branches by assigning one, branching, and assigning the
6420 else if (temp
&& GET_MODE (temp
) != BLKmode
6421 && (TREE_CONSTANT (TREE_OPERAND (exp
, 1))
6422 || ((TREE_CODE (TREE_OPERAND (exp
, 1)) == PARM_DECL
6423 || TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
)
6424 && DECL_RTL (TREE_OPERAND (exp
, 1))
6425 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 1))) == REG
6426 && DECL_RTL (TREE_OPERAND (exp
, 1)) != temp
))
6427 && (TREE_CONSTANT (TREE_OPERAND (exp
, 2))
6428 || ((TREE_CODE (TREE_OPERAND (exp
, 2)) == PARM_DECL
6429 || TREE_CODE (TREE_OPERAND (exp
, 2)) == VAR_DECL
)
6430 && DECL_RTL (TREE_OPERAND (exp
, 2))
6431 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 2))) == REG
6432 && DECL_RTL (TREE_OPERAND (exp
, 2)) != temp
)))
6434 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6435 temp
= gen_reg_rtx (mode
);
6436 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6437 dest_left_flag
= get_last_insn ();
6438 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6440 /* Allows cleanups up to here. */
6441 old_cleanups
= cleanups_this_call
;
6442 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6444 dest_right_flag
= get_last_insn ();
6447 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6448 comparison operator. If we have one of these cases, set the
6449 output to A, branch on A (cse will merge these two references),
6450 then set the output to FOO. */
6452 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6453 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6454 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6455 TREE_OPERAND (exp
, 1), 0)
6456 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6457 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
6459 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6460 temp
= gen_reg_rtx (mode
);
6461 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6462 dest_left_flag
= get_last_insn ();
6463 jumpif (TREE_OPERAND (exp
, 0), op0
);
6465 /* Allows cleanups up to here. */
6466 old_cleanups
= cleanups_this_call
;
6467 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6469 dest_right_flag
= get_last_insn ();
6472 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6473 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6474 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6475 TREE_OPERAND (exp
, 2), 0)
6476 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6477 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
6479 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6480 temp
= gen_reg_rtx (mode
);
6481 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6482 dest_left_flag
= get_last_insn ();
6483 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6485 /* Allows cleanups up to here. */
6486 old_cleanups
= cleanups_this_call
;
6487 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6489 dest_right_flag
= get_last_insn ();
6493 op1
= gen_label_rtx ();
6494 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6496 /* Allows cleanups up to here. */
6497 old_cleanups
= cleanups_this_call
;
6499 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6501 expand_expr (TREE_OPERAND (exp
, 1),
6502 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6503 dest_left_flag
= get_last_insn ();
6505 /* Handle conditional cleanups, if any. */
6506 left_cleanups
= defer_cleanups_to (old_cleanups
);
6509 emit_jump_insn (gen_jump (op1
));
6513 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6515 expand_expr (TREE_OPERAND (exp
, 2),
6516 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6517 dest_right_flag
= get_last_insn ();
6520 /* Handle conditional cleanups, if any. */
6521 right_cleanups
= defer_cleanups_to (old_cleanups
);
6527 /* Add back in, any conditional cleanups. */
6528 if (left_cleanups
|| right_cleanups
)
6534 /* Now that we know that a flag is needed, go back and add in the
6535 setting of the flag. */
6537 /* Do the left side flag. */
6538 last
= get_last_insn ();
6539 /* Flag left cleanups as needed. */
6540 emit_move_insn (flag
, const1_rtx
);
6541 /* ??? deprecated, use sequences instead. */
6542 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_left_flag
);
6544 /* Do the right side flag. */
6545 last
= get_last_insn ();
6546 /* Flag left cleanups as needed. */
6547 emit_move_insn (flag
, const0_rtx
);
6548 /* ??? deprecated, use sequences instead. */
6549 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_right_flag
);
6551 /* All cleanups must be on the function_obstack. */
6552 push_obstacks_nochange ();
6553 resume_temporary_allocation ();
6555 /* convert flag, which is an rtx, into a tree. */
6556 cond
= make_node (RTL_EXPR
);
6557 TREE_TYPE (cond
) = integer_type_node
;
6558 RTL_EXPR_RTL (cond
) = flag
;
6559 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
6560 cond
= save_expr (cond
);
6562 if (! left_cleanups
)
6563 left_cleanups
= integer_zero_node
;
6564 if (! right_cleanups
)
6565 right_cleanups
= integer_zero_node
;
6566 new_cleanups
= build (COND_EXPR
, void_type_node
,
6567 truthvalue_conversion (cond
),
6568 left_cleanups
, right_cleanups
);
6569 new_cleanups
= fold (new_cleanups
);
6573 /* Now add in the conditionalized cleanups. */
6575 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
6576 (*interim_eh_hook
) (NULL_TREE
);
6583 int need_exception_region
= 0;
6584 /* Something needs to be initialized, but we didn't know
6585 where that thing was when building the tree. For example,
6586 it could be the return value of a function, or a parameter
6587 to a function which lays down in the stack, or a temporary
6588 variable which must be passed by reference.
6590 We guarantee that the expression will either be constructed
6591 or copied into our original target. */
6593 tree slot
= TREE_OPERAND (exp
, 0);
6597 if (TREE_CODE (slot
) != VAR_DECL
)
6601 target
= original_target
;
6605 if (DECL_RTL (slot
) != 0)
6607 target
= DECL_RTL (slot
);
6608 /* If we have already expanded the slot, so don't do
6610 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6615 target
= assign_temp (type
, 2, 1, 1);
6616 /* All temp slots at this level must not conflict. */
6617 preserve_temp_slots (target
);
6618 DECL_RTL (slot
) = target
;
6620 /* Since SLOT is not known to the called function
6621 to belong to its stack frame, we must build an explicit
6622 cleanup. This case occurs when we must build up a reference
6623 to pass the reference as an argument. In this case,
6624 it is very likely that such a reference need not be
6627 if (TREE_OPERAND (exp
, 2) == 0)
6628 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
6629 if (TREE_OPERAND (exp
, 2))
6631 cleanups_this_call
= tree_cons (NULL_TREE
,
6632 TREE_OPERAND (exp
, 2),
6633 cleanups_this_call
);
6634 need_exception_region
= 1;
6640 /* This case does occur, when expanding a parameter which
6641 needs to be constructed on the stack. The target
6642 is the actual stack address that we want to initialize.
6643 The function we call will perform the cleanup in this case. */
6645 /* If we have already assigned it space, use that space,
6646 not target that we were passed in, as our target
6647 parameter is only a hint. */
6648 if (DECL_RTL (slot
) != 0)
6650 target
= DECL_RTL (slot
);
6651 /* If we have already expanded the slot, so don't do
6653 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6657 DECL_RTL (slot
) = target
;
6660 exp1
= TREE_OPERAND (exp
, 1);
6661 /* Mark it as expanded. */
6662 TREE_OPERAND (exp
, 1) = NULL_TREE
;
6664 temp
= expand_expr (exp1
, target
, tmode
, modifier
);
6666 if (need_exception_region
)
6667 (*interim_eh_hook
) (NULL_TREE
);
6674 tree lhs
= TREE_OPERAND (exp
, 0);
6675 tree rhs
= TREE_OPERAND (exp
, 1);
6676 tree noncopied_parts
= 0;
6677 tree lhs_type
= TREE_TYPE (lhs
);
6679 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6680 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
6681 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
6682 TYPE_NONCOPIED_PARTS (lhs_type
));
6683 while (noncopied_parts
!= 0)
6685 expand_assignment (TREE_VALUE (noncopied_parts
),
6686 TREE_PURPOSE (noncopied_parts
), 0, 0);
6687 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6694 /* If lhs is complex, expand calls in rhs before computing it.
6695 That's so we don't compute a pointer and save it over a call.
6696 If lhs is simple, compute it first so we can give it as a
6697 target if the rhs is just a call. This avoids an extra temp and copy
6698 and that prevents a partial-subsumption which makes bad code.
6699 Actually we could treat component_ref's of vars like vars. */
6701 tree lhs
= TREE_OPERAND (exp
, 0);
6702 tree rhs
= TREE_OPERAND (exp
, 1);
6703 tree noncopied_parts
= 0;
6704 tree lhs_type
= TREE_TYPE (lhs
);
6708 if (TREE_CODE (lhs
) != VAR_DECL
6709 && TREE_CODE (lhs
) != RESULT_DECL
6710 && TREE_CODE (lhs
) != PARM_DECL
)
6711 preexpand_calls (exp
);
6713 /* Check for |= or &= of a bitfield of size one into another bitfield
6714 of size 1. In this case, (unless we need the result of the
6715 assignment) we can do this more efficiently with a
6716 test followed by an assignment, if necessary.
6718 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6719 things change so we do, this code should be enhanced to
6722 && TREE_CODE (lhs
) == COMPONENT_REF
6723 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
6724 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
6725 && TREE_OPERAND (rhs
, 0) == lhs
6726 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
6727 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
6728 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
6730 rtx label
= gen_label_rtx ();
6732 do_jump (TREE_OPERAND (rhs
, 1),
6733 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
6734 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
6735 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
6736 (TREE_CODE (rhs
) == BIT_IOR_EXPR
6738 : integer_zero_node
)),
6740 do_pending_stack_adjust ();
6745 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
6746 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
6747 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
6748 TYPE_NONCOPIED_PARTS (lhs_type
));
6750 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6751 while (noncopied_parts
!= 0)
6753 expand_assignment (TREE_PURPOSE (noncopied_parts
),
6754 TREE_VALUE (noncopied_parts
), 0, 0);
6755 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6760 case PREINCREMENT_EXPR
:
6761 case PREDECREMENT_EXPR
:
6762 return expand_increment (exp
, 0);
6764 case POSTINCREMENT_EXPR
:
6765 case POSTDECREMENT_EXPR
:
6766 /* Faster to treat as pre-increment if result is not used. */
6767 return expand_increment (exp
, ! ignore
);
6770 /* If nonzero, TEMP will be set to the address of something that might
6771 be a MEM corresponding to a stack slot. */
6774 /* Are we taking the address of a nested function? */
6775 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
6776 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0)
6778 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
6779 op0
= force_operand (op0
, target
);
6781 /* If we are taking the address of something erroneous, just
6783 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
6787 /* We make sure to pass const0_rtx down if we came in with
6788 ignore set, to avoid doing the cleanups twice for something. */
6789 op0
= expand_expr (TREE_OPERAND (exp
, 0),
6790 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
6791 (modifier
== EXPAND_INITIALIZER
6792 ? modifier
: EXPAND_CONST_ADDRESS
));
6794 /* If we are going to ignore the result, OP0 will have been set
6795 to const0_rtx, so just return it. Don't get confused and
6796 think we are taking the address of the constant. */
6800 op0
= protect_from_queue (op0
, 0);
6802 /* We would like the object in memory. If it is a constant,
6803 we can have it be statically allocated into memory. For
6804 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6805 memory and store the value into it. */
6807 if (CONSTANT_P (op0
))
6808 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6810 else if (GET_CODE (op0
) == MEM
)
6812 mark_temp_addr_taken (op0
);
6813 temp
= XEXP (op0
, 0);
6816 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6817 || GET_CODE (op0
) == CONCAT
)
6819 /* If this object is in a register, it must be not
6821 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6822 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
6824 mark_temp_addr_taken (memloc
);
6825 emit_move_insn (memloc
, op0
);
6829 if (GET_CODE (op0
) != MEM
)
6832 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6834 temp
= XEXP (op0
, 0);
6835 #ifdef POINTERS_EXTEND_UNSIGNED
6836 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
6837 && mode
== ptr_mode
)
6838 temp
= convert_memory_address (ptr_mode
, temp
);
6843 op0
= force_operand (XEXP (op0
, 0), target
);
6846 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
6847 op0
= force_reg (Pmode
, op0
);
6849 if (GET_CODE (op0
) == REG
6850 && ! REG_USERVAR_P (op0
))
6851 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
6853 /* If we might have had a temp slot, add an equivalent address
6856 update_temp_slot_address (temp
, op0
);
6858 #ifdef POINTERS_EXTEND_UNSIGNED
6859 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
6860 && mode
== ptr_mode
)
6861 op0
= convert_memory_address (ptr_mode
, op0
);
6866 case ENTRY_VALUE_EXPR
:
6869 /* COMPLEX type for Extended Pascal & Fortran */
6872 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6875 /* Get the rtx code of the operands. */
6876 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
6877 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
6880 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6884 /* Move the real (op0) and imaginary (op1) parts to their location. */
6885 emit_move_insn (gen_realpart (mode
, target
), op0
);
6886 emit_move_insn (gen_imagpart (mode
, target
), op1
);
6888 insns
= get_insns ();
6891 /* Complex construction should appear as a single unit. */
6892 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6893 each with a separate pseudo as destination.
6894 It's not correct for flow to treat them as a unit. */
6895 if (GET_CODE (target
) != CONCAT
)
6896 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
6904 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
6905 return gen_realpart (mode
, op0
);
6908 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
6909 return gen_imagpart (mode
, op0
);
6913 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6917 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
6920 target
= gen_reg_rtx (mode
);
6924 /* Store the realpart and the negated imagpart to target. */
6925 emit_move_insn (gen_realpart (partmode
, target
),
6926 gen_realpart (partmode
, op0
));
6928 imag_t
= gen_imagpart (partmode
, target
);
6929 temp
= expand_unop (partmode
, neg_optab
,
6930 gen_imagpart (partmode
, op0
), imag_t
, 0);
6932 emit_move_insn (imag_t
, temp
);
6934 insns
= get_insns ();
6937 /* Conjugate should appear as a single unit
6938 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6939 each with a separate pseudo as destination.
6940 It's not correct for flow to treat them as a unit. */
6941 if (GET_CODE (target
) != CONCAT
)
6942 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
6950 op0
= CONST0_RTX (tmode
);
6956 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
6959 /* Here to do an ordinary binary operator, generating an instruction
6960 from the optab already placed in `this_optab'. */
6962 preexpand_calls (exp
);
6963 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6965 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6966 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6968 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
6969 unsignedp
, OPTAB_LIB_WIDEN
);
6976 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6978 bc_expand_expr (exp
)
6981 enum tree_code code
;
6984 struct binary_operator
*binoptab
;
6985 struct unary_operator
*unoptab
;
6986 struct increment_operator
*incroptab
;
6987 struct bc_label
*lab
, *lab1
;
6988 enum bytecode_opcode opcode
;
6991 code
= TREE_CODE (exp
);
6997 if (DECL_RTL (exp
) == 0)
6999 error_with_decl (exp
, "prior parameter's size depends on `%s'");
7003 bc_load_parmaddr (DECL_RTL (exp
));
7004 bc_load_memory (TREE_TYPE (exp
), exp
);
7010 if (DECL_RTL (exp
) == 0)
7014 if (BYTECODE_LABEL (DECL_RTL (exp
)))
7015 bc_load_externaddr (DECL_RTL (exp
));
7017 bc_load_localaddr (DECL_RTL (exp
));
7019 if (TREE_PUBLIC (exp
))
7020 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
7021 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
7023 bc_load_localaddr (DECL_RTL (exp
));
7025 bc_load_memory (TREE_TYPE (exp
), exp
);
7030 #ifdef DEBUG_PRINT_CODE
7031 fprintf (stderr
, " [%x]\n", TREE_INT_CST_LOW (exp
));
7033 bc_emit_instruction (mode_to_const_map
[(int) (DECL_BIT_FIELD (exp
)
7035 : TYPE_MODE (TREE_TYPE (exp
)))],
7036 (HOST_WIDE_INT
) TREE_INT_CST_LOW (exp
));
7042 #ifdef DEBUG_PRINT_CODE
7043 fprintf (stderr
, " [%g]\n", (double) TREE_INT_CST_LOW (exp
));
7045 /* FIX THIS: find a better way to pass real_cst's. -bson */
7046 bc_emit_instruction (mode_to_const_map
[TYPE_MODE (TREE_TYPE (exp
))],
7047 (double) TREE_REAL_CST (exp
));
7056 /* We build a call description vector describing the type of
7057 the return value and of the arguments; this call vector,
7058 together with a pointer to a location for the return value
7059 and the base of the argument list, is passed to the low
7060 level machine dependent call subroutine, which is responsible
7061 for putting the arguments wherever real functions expect
7062 them, as well as getting the return value back. */
7064 tree calldesc
= 0, arg
;
7068 /* Push the evaluated args on the evaluation stack in reverse
7069 order. Also make an entry for each arg in the calldesc
7070 vector while we're at it. */
7072 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7074 for (arg
= TREE_OPERAND (exp
, 1); arg
; arg
= TREE_CHAIN (arg
))
7077 bc_expand_expr (TREE_VALUE (arg
));
7079 calldesc
= tree_cons ((tree
) 0,
7080 size_in_bytes (TREE_TYPE (TREE_VALUE (arg
))),
7082 calldesc
= tree_cons ((tree
) 0,
7083 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg
))),
7087 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7089 /* Allocate a location for the return value and push its
7090 address on the evaluation stack. Also make an entry
7091 at the front of the calldesc for the return value type. */
7093 type
= TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7094 retval
= bc_allocate_local (int_size_in_bytes (type
), TYPE_ALIGN (type
));
7095 bc_load_localaddr (retval
);
7097 calldesc
= tree_cons ((tree
) 0, size_in_bytes (type
), calldesc
);
7098 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (type
), calldesc
);
7100 /* Prepend the argument count. */
7101 calldesc
= tree_cons ((tree
) 0,
7102 build_int_2 (nargs
, 0),
7105 /* Push the address of the call description vector on the stack. */
7106 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
7107 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
7108 build_index_type (build_int_2 (nargs
* 2, 0)));
7109 r
= output_constant_def (calldesc
);
7110 bc_load_externaddr (r
);
7112 /* Push the address of the function to be called. */
7113 bc_expand_expr (TREE_OPERAND (exp
, 0));
7115 /* Call the function, popping its address and the calldesc vector
7116 address off the evaluation stack in the process. */
7117 bc_emit_instruction (call
);
7119 /* Pop the arguments off the stack. */
7120 bc_adjust_stack (nargs
);
7122 /* Load the return value onto the stack. */
7123 bc_load_localaddr (retval
);
7124 bc_load_memory (type
, TREE_OPERAND (exp
, 0));
7130 if (!SAVE_EXPR_RTL (exp
))
7132 /* First time around: copy to local variable */
7133 SAVE_EXPR_RTL (exp
) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp
)),
7134 TYPE_ALIGN (TREE_TYPE(exp
)));
7135 bc_expand_expr (TREE_OPERAND (exp
, 0));
7136 bc_emit_instruction (duplicate
);
7138 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7139 bc_store_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7143 /* Consecutive reference: use saved copy */
7144 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7145 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7150 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7151 how are they handled instead? */
7154 TREE_USED (exp
) = 1;
7155 bc_expand_expr (STMT_BODY (exp
));
7162 bc_expand_expr (TREE_OPERAND (exp
, 0));
7163 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)), TREE_TYPE (exp
));
7168 expand_assignment (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1), 0, 0);
7173 bc_expand_address (TREE_OPERAND (exp
, 0));
7178 bc_expand_expr (TREE_OPERAND (exp
, 0));
7179 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7184 bc_expand_expr (bc_canonicalize_array_ref (exp
));
7189 bc_expand_component_address (exp
);
7191 /* If we have a bitfield, generate a proper load */
7192 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp
, 1)), TREE_OPERAND (exp
, 1));
7197 bc_expand_expr (TREE_OPERAND (exp
, 0));
7198 bc_emit_instruction (drop
);
7199 bc_expand_expr (TREE_OPERAND (exp
, 1));
7204 bc_expand_expr (TREE_OPERAND (exp
, 0));
7205 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7206 lab
= bc_get_bytecode_label ();
7207 bc_emit_bytecode (xjumpifnot
);
7208 bc_emit_bytecode_labelref (lab
);
7210 #ifdef DEBUG_PRINT_CODE
7211 fputc ('\n', stderr
);
7213 bc_expand_expr (TREE_OPERAND (exp
, 1));
7214 lab1
= bc_get_bytecode_label ();
7215 bc_emit_bytecode (jump
);
7216 bc_emit_bytecode_labelref (lab1
);
7218 #ifdef DEBUG_PRINT_CODE
7219 fputc ('\n', stderr
);
7222 bc_emit_bytecode_labeldef (lab
);
7223 bc_expand_expr (TREE_OPERAND (exp
, 2));
7224 bc_emit_bytecode_labeldef (lab1
);
7227 case TRUTH_ANDIF_EXPR
:
7229 opcode
= xjumpifnot
;
7232 case TRUTH_ORIF_EXPR
:
7239 binoptab
= optab_plus_expr
;
7244 binoptab
= optab_minus_expr
;
7249 binoptab
= optab_mult_expr
;
7252 case TRUNC_DIV_EXPR
:
7253 case FLOOR_DIV_EXPR
:
7255 case ROUND_DIV_EXPR
:
7256 case EXACT_DIV_EXPR
:
7258 binoptab
= optab_trunc_div_expr
;
7261 case TRUNC_MOD_EXPR
:
7262 case FLOOR_MOD_EXPR
:
7264 case ROUND_MOD_EXPR
:
7266 binoptab
= optab_trunc_mod_expr
;
7269 case FIX_ROUND_EXPR
:
7270 case FIX_FLOOR_EXPR
:
7272 abort (); /* Not used for C. */
7274 case FIX_TRUNC_EXPR
:
7281 abort (); /* FIXME */
7285 binoptab
= optab_rdiv_expr
;
7290 binoptab
= optab_bit_and_expr
;
7295 binoptab
= optab_bit_ior_expr
;
7300 binoptab
= optab_bit_xor_expr
;
7305 binoptab
= optab_lshift_expr
;
7310 binoptab
= optab_rshift_expr
;
7313 case TRUTH_AND_EXPR
:
7315 binoptab
= optab_truth_and_expr
;
7320 binoptab
= optab_truth_or_expr
;
7325 binoptab
= optab_lt_expr
;
7330 binoptab
= optab_le_expr
;
7335 binoptab
= optab_ge_expr
;
7340 binoptab
= optab_gt_expr
;
7345 binoptab
= optab_eq_expr
;
7350 binoptab
= optab_ne_expr
;
7355 unoptab
= optab_negate_expr
;
7360 unoptab
= optab_bit_not_expr
;
7363 case TRUTH_NOT_EXPR
:
7365 unoptab
= optab_truth_not_expr
;
7368 case PREDECREMENT_EXPR
:
7370 incroptab
= optab_predecrement_expr
;
7373 case PREINCREMENT_EXPR
:
7375 incroptab
= optab_preincrement_expr
;
7378 case POSTDECREMENT_EXPR
:
7380 incroptab
= optab_postdecrement_expr
;
7383 case POSTINCREMENT_EXPR
:
7385 incroptab
= optab_postincrement_expr
;
7390 bc_expand_constructor (exp
);
7400 tree vars
= TREE_OPERAND (exp
, 0);
7401 int vars_need_expansion
= 0;
7403 /* Need to open a binding contour here because
7404 if there are any cleanups they most be contained here. */
7405 expand_start_bindings (0);
7407 /* Mark the corresponding BLOCK for output. */
7408 if (TREE_OPERAND (exp
, 2) != 0)
7409 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
7411 /* If VARS have not yet been expanded, expand them now. */
7414 if (DECL_RTL (vars
) == 0)
7416 vars_need_expansion
= 1;
7419 expand_decl_init (vars
);
7420 vars
= TREE_CHAIN (vars
);
7423 bc_expand_expr (TREE_OPERAND (exp
, 1));
7425 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7435 bc_expand_binary_operation (binoptab
, TREE_TYPE (exp
),
7436 TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1));
7442 bc_expand_unary_operation (unoptab
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7448 bc_expand_expr (TREE_OPERAND (exp
, 0));
7449 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7450 lab
= bc_get_bytecode_label ();
7452 bc_emit_instruction (duplicate
);
7453 bc_emit_bytecode (opcode
);
7454 bc_emit_bytecode_labelref (lab
);
7456 #ifdef DEBUG_PRINT_CODE
7457 fputc ('\n', stderr
);
7460 bc_emit_instruction (drop
);
7462 bc_expand_expr (TREE_OPERAND (exp
, 1));
7463 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7464 bc_emit_bytecode_labeldef (lab
);
7470 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7472 /* Push the quantum. */
7473 bc_expand_expr (TREE_OPERAND (exp
, 1));
7475 /* Convert it to the lvalue's type. */
7476 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)), type
);
7478 /* Push the address of the lvalue */
7479 bc_expand_expr (build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
), TREE_OPERAND (exp
, 0)));
7481 /* Perform actual increment */
7482 bc_expand_increment (incroptab
, type
);
7486 /* Return the alignment in bits of EXP, a pointer valued expression.
7487 But don't return more than MAX_ALIGN no matter what.
7488 The alignment returned is, by default, the alignment of the thing that
7489 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7491 Otherwise, look at the expression to see if we can do better, i.e., if the
7492 expression is actually pointing at an object whose alignment is tighter. */
7495 get_pointer_alignment (exp
, max_align
)
7499 unsigned align
, inner
;
7501 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7504 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7505 align
= MIN (align
, max_align
);
7509 switch (TREE_CODE (exp
))
7513 case NON_LVALUE_EXPR
:
7514 exp
= TREE_OPERAND (exp
, 0);
7515 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7517 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7518 align
= MIN (inner
, max_align
);
7522 /* If sum of pointer + int, restrict our maximum alignment to that
7523 imposed by the integer. If not, we can't do any better than
7525 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7528 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7533 exp
= TREE_OPERAND (exp
, 0);
7537 /* See what we are pointing at and look at its alignment. */
7538 exp
= TREE_OPERAND (exp
, 0);
7539 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7540 align
= FUNCTION_BOUNDARY
;
7541 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7542 align
= DECL_ALIGN (exp
);
7543 #ifdef CONSTANT_ALIGNMENT
7544 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7545 align
= CONSTANT_ALIGNMENT (exp
, align
);
7547 return MIN (align
, max_align
);
7555 /* Return the tree node and offset if a given argument corresponds to
7556 a string constant. */
7559 string_constant (arg
, ptr_offset
)
7565 if (TREE_CODE (arg
) == ADDR_EXPR
7566 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7568 *ptr_offset
= integer_zero_node
;
7569 return TREE_OPERAND (arg
, 0);
7571 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7573 tree arg0
= TREE_OPERAND (arg
, 0);
7574 tree arg1
= TREE_OPERAND (arg
, 1);
7579 if (TREE_CODE (arg0
) == ADDR_EXPR
7580 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7583 return TREE_OPERAND (arg0
, 0);
7585 else if (TREE_CODE (arg1
) == ADDR_EXPR
7586 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7589 return TREE_OPERAND (arg1
, 0);
7596 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7597 way, because it could contain a zero byte in the middle.
7598 TREE_STRING_LENGTH is the size of the character array, not the string.
7600 Unfortunately, string_constant can't access the values of const char
7601 arrays with initializers, so neither can we do so here. */
7611 src
= string_constant (src
, &offset_node
);
7614 max
= TREE_STRING_LENGTH (src
);
7615 ptr
= TREE_STRING_POINTER (src
);
7616 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7618 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7619 compute the offset to the following null if we don't know where to
7620 start searching for it. */
7622 for (i
= 0; i
< max
; i
++)
7625 /* We don't know the starting offset, but we do know that the string
7626 has no internal zero bytes. We can assume that the offset falls
7627 within the bounds of the string; otherwise, the programmer deserves
7628 what he gets. Subtract the offset from the length of the string,
7630 /* This would perhaps not be valid if we were dealing with named
7631 arrays in addition to literal string constants. */
7632 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7635 /* We have a known offset into the string. Start searching there for
7636 a null character. */
7637 if (offset_node
== 0)
7641 /* Did we get a long long offset? If so, punt. */
7642 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7644 offset
= TREE_INT_CST_LOW (offset_node
);
7646 /* If the offset is known to be out of bounds, warn, and call strlen at
7648 if (offset
< 0 || offset
> max
)
7650 warning ("offset outside bounds of constant string");
7653 /* Use strlen to search for the first zero byte. Since any strings
7654 constructed with build_string will have nulls appended, we win even
7655 if we get handed something like (char[4])"abcd".
7657 Since OFFSET is our starting index into the string, no further
7658 calculation is needed. */
7659 return size_int (strlen (ptr
+ offset
));
7663 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7664 enum built_in_function fndecl_code
;
7670 /* Some machines need special handling before we can access
7671 arbitrary frames. For example, on the sparc, we must first flush
7672 all register windows to the stack. */
7673 #ifdef SETUP_FRAME_ADDRESSES
7674 SETUP_FRAME_ADDRESSES ();
7677 /* On the sparc, the return address is not in the frame, it is in a
7678 register. There is no way to access it off of the current frame
7679 pointer, but it can be accessed off the previous frame pointer by
7680 reading the value from the register window save area. */
7681 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7682 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7686 /* Scan back COUNT frames to the specified frame. */
7687 for (i
= 0; i
< count
; i
++)
7689 /* Assume the dynamic chain pointer is in the word that the
7690 frame address points to, unless otherwise specified. */
7691 #ifdef DYNAMIC_CHAIN_ADDRESS
7692 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7694 tem
= memory_address (Pmode
, tem
);
7695 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
7698 /* For __builtin_frame_address, return what we've got. */
7699 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
7702 /* For __builtin_return_address, Get the return address from that
7704 #ifdef RETURN_ADDR_RTX
7705 tem
= RETURN_ADDR_RTX (count
, tem
);
7707 tem
= memory_address (Pmode
,
7708 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
7709 tem
= gen_rtx (MEM
, Pmode
, tem
);
7714 /* Expand an expression EXP that calls a built-in function,
7715 with result going to TARGET if that's convenient
7716 (and in mode MODE if that's convenient).
7717 SUBTARGET may be used as the target for computing one of EXP's operands.
7718 IGNORE is nonzero if the value is to be ignored. */
7720 #define CALLED_AS_BUILT_IN(NODE) \
7721 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7724 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
7728 enum machine_mode mode
;
7731 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7732 tree arglist
= TREE_OPERAND (exp
, 1);
7735 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
7736 optab builtin_optab
;
7738 switch (DECL_FUNCTION_CODE (fndecl
))
7743 /* build_function_call changes these into ABS_EXPR. */
7748 /* Treat these like sqrt, but only if the user asks for them. */
7749 if (! flag_fast_math
)
7751 case BUILT_IN_FSQRT
:
7752 /* If not optimizing, call the library function. */
7757 /* Arg could be wrong type if user redeclared this fcn wrong. */
7758 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
7761 /* Stabilize and compute the argument. */
7762 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
7763 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
7765 exp
= copy_node (exp
);
7766 arglist
= copy_node (arglist
);
7767 TREE_OPERAND (exp
, 1) = arglist
;
7768 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
7770 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
7772 /* Make a suitable register to place result in. */
7773 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7778 switch (DECL_FUNCTION_CODE (fndecl
))
7781 builtin_optab
= sin_optab
; break;
7783 builtin_optab
= cos_optab
; break;
7784 case BUILT_IN_FSQRT
:
7785 builtin_optab
= sqrt_optab
; break;
7790 /* Compute into TARGET.
7791 Set TARGET to wherever the result comes back. */
7792 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
7793 builtin_optab
, op0
, target
, 0);
7795 /* If we were unable to expand via the builtin, stop the
7796 sequence (without outputting the insns) and break, causing
7797 a call the the library function. */
7804 /* Check the results by default. But if flag_fast_math is turned on,
7805 then assume sqrt will always be called with valid arguments. */
7807 if (! flag_fast_math
)
7809 /* Don't define the builtin FP instructions
7810 if your machine is not IEEE. */
7811 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
7814 lab1
= gen_label_rtx ();
7816 /* Test the result; if it is NaN, set errno=EDOM because
7817 the argument was not in the domain. */
7818 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
7819 emit_jump_insn (gen_beq (lab1
));
7823 #ifdef GEN_ERRNO_RTX
7824 rtx errno_rtx
= GEN_ERRNO_RTX
;
7827 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "errno"));
7830 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
7833 /* We can't set errno=EDOM directly; let the library call do it.
7834 Pop the arguments right away in case the call gets deleted. */
7836 expand_call (exp
, target
, 0);
7843 /* Output the entire sequence. */
7844 insns
= get_insns ();
7850 /* __builtin_apply_args returns block of memory allocated on
7851 the stack into which is stored the arg pointer, structure
7852 value address, static chain, and all the registers that might
7853 possibly be used in performing a function call. The code is
7854 moved to the start of the function so the incoming values are
7856 case BUILT_IN_APPLY_ARGS
:
7857 /* Don't do __builtin_apply_args more than once in a function.
7858 Save the result of the first call and reuse it. */
7859 if (apply_args_value
!= 0)
7860 return apply_args_value
;
7862 /* When this function is called, it means that registers must be
7863 saved on entry to this function. So we migrate the
7864 call to the first insn of this function. */
7869 temp
= expand_builtin_apply_args ();
7873 apply_args_value
= temp
;
7875 /* Put the sequence after the NOTE that starts the function.
7876 If this is inside a SEQUENCE, make the outer-level insn
7877 chain current, so the code is placed at the start of the
7879 push_topmost_sequence ();
7880 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
7881 pop_topmost_sequence ();
7885 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7886 FUNCTION with a copy of the parameters described by
7887 ARGUMENTS, and ARGSIZE. It returns a block of memory
7888 allocated on the stack into which is stored all the registers
7889 that might possibly be used for returning the result of a
7890 function. ARGUMENTS is the value returned by
7891 __builtin_apply_args. ARGSIZE is the number of bytes of
7892 arguments that must be copied. ??? How should this value be
7893 computed? We'll also need a safe worst case value for varargs
7895 case BUILT_IN_APPLY
:
7897 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7898 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
7899 || TREE_CHAIN (arglist
) == 0
7900 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
7901 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
7902 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
7910 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
7911 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
7913 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
7916 /* __builtin_return (RESULT) causes the function to return the
7917 value described by RESULT. RESULT is address of the block of
7918 memory returned by __builtin_apply. */
7919 case BUILT_IN_RETURN
:
7921 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7922 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
7923 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
7924 NULL_RTX
, VOIDmode
, 0));
7927 case BUILT_IN_SAVEREGS
:
7928 /* Don't do __builtin_saveregs more than once in a function.
7929 Save the result of the first call and reuse it. */
7930 if (saveregs_value
!= 0)
7931 return saveregs_value
;
7933 /* When this function is called, it means that registers must be
7934 saved on entry to this function. So we migrate the
7935 call to the first insn of this function. */
7939 /* Now really call the function. `expand_call' does not call
7940 expand_builtin, so there is no danger of infinite recursion here. */
7943 #ifdef EXPAND_BUILTIN_SAVEREGS
7944 /* Do whatever the machine needs done in this case. */
7945 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
7947 /* The register where the function returns its value
7948 is likely to have something else in it, such as an argument.
7949 So preserve that register around the call. */
7951 if (value_mode
!= VOIDmode
)
7953 rtx valreg
= hard_libcall_value (value_mode
);
7954 rtx saved_valreg
= gen_reg_rtx (value_mode
);
7956 emit_move_insn (saved_valreg
, valreg
);
7957 temp
= expand_call (exp
, target
, ignore
);
7958 emit_move_insn (valreg
, saved_valreg
);
7961 /* Generate the call, putting the value in a pseudo. */
7962 temp
= expand_call (exp
, target
, ignore
);
7968 saveregs_value
= temp
;
7970 /* Put the sequence after the NOTE that starts the function.
7971 If this is inside a SEQUENCE, make the outer-level insn
7972 chain current, so the code is placed at the start of the
7974 push_topmost_sequence ();
7975 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
7976 pop_topmost_sequence ();
7980 /* __builtin_args_info (N) returns word N of the arg space info
7981 for the current function. The number and meanings of words
7982 is controlled by the definition of CUMULATIVE_ARGS. */
7983 case BUILT_IN_ARGS_INFO
:
7985 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
7987 int *word_ptr
= (int *) ¤t_function_args_info
;
7988 tree type
, elts
, result
;
7990 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
7991 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7992 __FILE__
, __LINE__
);
7996 tree arg
= TREE_VALUE (arglist
);
7997 if (TREE_CODE (arg
) != INTEGER_CST
)
7998 error ("argument of `__builtin_args_info' must be constant");
8001 int wordnum
= TREE_INT_CST_LOW (arg
);
8003 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8004 error ("argument of `__builtin_args_info' out of range");
8006 return GEN_INT (word_ptr
[wordnum
]);
8010 error ("missing argument in `__builtin_args_info'");
8015 for (i
= 0; i
< nwords
; i
++)
8016 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8018 type
= build_array_type (integer_type_node
,
8019 build_index_type (build_int_2 (nwords
, 0)));
8020 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8021 TREE_CONSTANT (result
) = 1;
8022 TREE_STATIC (result
) = 1;
8023 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8024 TREE_CONSTANT (result
) = 1;
8025 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
8029 /* Return the address of the first anonymous stack arg. */
8030 case BUILT_IN_NEXT_ARG
:
8032 tree fntype
= TREE_TYPE (current_function_decl
);
8034 if ((TYPE_ARG_TYPES (fntype
) == 0
8035 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8037 && ! current_function_varargs
)
8039 error ("`va_start' used in function with fixed args");
8045 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8046 tree arg
= TREE_VALUE (arglist
);
8048 /* Strip off all nops for the sake of the comparison. This
8049 is not quite the same as STRIP_NOPS. It does more.
8050 We must also strip off INDIRECT_EXPR for C++ reference
8052 while (TREE_CODE (arg
) == NOP_EXPR
8053 || TREE_CODE (arg
) == CONVERT_EXPR
8054 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8055 || TREE_CODE (arg
) == INDIRECT_REF
)
8056 arg
= TREE_OPERAND (arg
, 0);
8057 if (arg
!= last_parm
)
8058 warning ("second parameter of `va_start' not last named argument");
8060 else if (! current_function_varargs
)
8061 /* Evidently an out of date version of <stdarg.h>; can't validate
8062 va_start's second argument, but can still work as intended. */
8063 warning ("`__builtin_next_arg' called without an argument");
8066 return expand_binop (Pmode
, add_optab
,
8067 current_function_internal_arg_pointer
,
8068 current_function_arg_offset_rtx
,
8069 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8071 case BUILT_IN_CLASSIFY_TYPE
:
8074 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8075 enum tree_code code
= TREE_CODE (type
);
8076 if (code
== VOID_TYPE
)
8077 return GEN_INT (void_type_class
);
8078 if (code
== INTEGER_TYPE
)
8079 return GEN_INT (integer_type_class
);
8080 if (code
== CHAR_TYPE
)
8081 return GEN_INT (char_type_class
);
8082 if (code
== ENUMERAL_TYPE
)
8083 return GEN_INT (enumeral_type_class
);
8084 if (code
== BOOLEAN_TYPE
)
8085 return GEN_INT (boolean_type_class
);
8086 if (code
== POINTER_TYPE
)
8087 return GEN_INT (pointer_type_class
);
8088 if (code
== REFERENCE_TYPE
)
8089 return GEN_INT (reference_type_class
);
8090 if (code
== OFFSET_TYPE
)
8091 return GEN_INT (offset_type_class
);
8092 if (code
== REAL_TYPE
)
8093 return GEN_INT (real_type_class
);
8094 if (code
== COMPLEX_TYPE
)
8095 return GEN_INT (complex_type_class
);
8096 if (code
== FUNCTION_TYPE
)
8097 return GEN_INT (function_type_class
);
8098 if (code
== METHOD_TYPE
)
8099 return GEN_INT (method_type_class
);
8100 if (code
== RECORD_TYPE
)
8101 return GEN_INT (record_type_class
);
8102 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8103 return GEN_INT (union_type_class
);
8104 if (code
== ARRAY_TYPE
)
8106 if (TYPE_STRING_FLAG (type
))
8107 return GEN_INT (string_type_class
);
8109 return GEN_INT (array_type_class
);
8111 if (code
== SET_TYPE
)
8112 return GEN_INT (set_type_class
);
8113 if (code
== FILE_TYPE
)
8114 return GEN_INT (file_type_class
);
8115 if (code
== LANG_TYPE
)
8116 return GEN_INT (lang_type_class
);
8118 return GEN_INT (no_type_class
);
8120 case BUILT_IN_CONSTANT_P
:
8125 tree arg
= TREE_VALUE (arglist
);
8128 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
8129 || (TREE_CODE (arg
) == ADDR_EXPR
8130 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8131 ? const1_rtx
: const0_rtx
);
8134 case BUILT_IN_FRAME_ADDRESS
:
8135 /* The argument must be a nonnegative integer constant.
8136 It counts the number of frames to scan up the stack.
8137 The value is the address of that frame. */
8138 case BUILT_IN_RETURN_ADDRESS
:
8139 /* The argument must be a nonnegative integer constant.
8140 It counts the number of frames to scan up the stack.
8141 The value is the return address saved in that frame. */
8143 /* Warning about missing arg was already issued. */
8145 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
)
8147 error ("invalid arg to `__builtin_return_address'");
8150 else if (tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8152 error ("invalid arg to `__builtin_return_address'");
8157 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8158 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8159 hard_frame_pointer_rtx
);
8161 /* For __builtin_frame_address, return what we've got. */
8162 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8165 if (GET_CODE (tem
) != REG
)
8166 tem
= copy_to_reg (tem
);
8170 case BUILT_IN_ALLOCA
:
8172 /* Arg could be non-integer if user redeclared this fcn wrong. */
8173 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8176 /* Compute the argument. */
8177 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8179 /* Allocate the desired space. */
8180 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8183 /* If not optimizing, call the library function. */
8184 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8188 /* Arg could be non-integer if user redeclared this fcn wrong. */
8189 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8192 /* Compute the argument. */
8193 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8194 /* Compute ffs, into TARGET if possible.
8195 Set TARGET to wherever the result comes back. */
8196 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8197 ffs_optab
, op0
, target
, 1);
8202 case BUILT_IN_STRLEN
:
8203 /* If not optimizing, call the library function. */
8204 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8208 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8209 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8213 tree src
= TREE_VALUE (arglist
);
8214 tree len
= c_strlen (src
);
8217 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8219 rtx result
, src_rtx
, char_rtx
;
8220 enum machine_mode insn_mode
= value_mode
, char_mode
;
8221 enum insn_code icode
;
8223 /* If the length is known, just return it. */
8225 return expand_expr (len
, target
, mode
, 0);
8227 /* If SRC is not a pointer type, don't do this operation inline. */
8231 /* Call a function if we can't compute strlen in the right mode. */
8233 while (insn_mode
!= VOIDmode
)
8235 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8236 if (icode
!= CODE_FOR_nothing
)
8239 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8241 if (insn_mode
== VOIDmode
)
8244 /* Make a place to write the result of the instruction. */
8247 && GET_CODE (result
) == REG
8248 && GET_MODE (result
) == insn_mode
8249 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8250 result
= gen_reg_rtx (insn_mode
);
8252 /* Make sure the operands are acceptable to the predicates. */
8254 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8255 result
= gen_reg_rtx (insn_mode
);
8257 src_rtx
= memory_address (BLKmode
,
8258 expand_expr (src
, NULL_RTX
, ptr_mode
,
8260 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8261 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8263 char_rtx
= const0_rtx
;
8264 char_mode
= insn_operand_mode
[(int)icode
][2];
8265 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8266 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8268 emit_insn (GEN_FCN (icode
) (result
,
8269 gen_rtx (MEM
, BLKmode
, src_rtx
),
8270 char_rtx
, GEN_INT (align
)));
8272 /* Return the value in the proper mode for this function. */
8273 if (GET_MODE (result
) == value_mode
)
8275 else if (target
!= 0)
8277 convert_move (target
, result
, 0);
8281 return convert_to_mode (value_mode
, result
, 0);
8284 case BUILT_IN_STRCPY
:
8285 /* If not optimizing, call the library function. */
8286 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8290 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8291 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8292 || TREE_CHAIN (arglist
) == 0
8293 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8297 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8302 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8304 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8308 case BUILT_IN_MEMCPY
:
8309 /* If not optimizing, call the library function. */
8310 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8314 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8315 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8316 || TREE_CHAIN (arglist
) == 0
8317 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8318 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8319 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8323 tree dest
= TREE_VALUE (arglist
);
8324 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8325 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8329 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8331 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8332 rtx dest_rtx
, dest_mem
, src_mem
;
8334 /* If either SRC or DEST is not a pointer type, don't do
8335 this operation in-line. */
8336 if (src_align
== 0 || dest_align
== 0)
8338 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8339 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8343 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8344 dest_mem
= gen_rtx (MEM
, BLKmode
,
8345 memory_address (BLKmode
, dest_rtx
));
8346 /* There could be a void* cast on top of the object. */
8347 while (TREE_CODE (dest
) == NOP_EXPR
)
8348 dest
= TREE_OPERAND (dest
, 0);
8349 type
= TREE_TYPE (TREE_TYPE (dest
));
8350 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8351 src_mem
= gen_rtx (MEM
, BLKmode
,
8352 memory_address (BLKmode
,
8353 expand_expr (src
, NULL_RTX
,
8356 /* There could be a void* cast on top of the object. */
8357 while (TREE_CODE (src
) == NOP_EXPR
)
8358 src
= TREE_OPERAND (src
, 0);
8359 type
= TREE_TYPE (TREE_TYPE (src
));
8360 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
8362 /* Copy word part most expediently. */
8363 emit_block_move (dest_mem
, src_mem
,
8364 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8365 MIN (src_align
, dest_align
));
8366 return force_operand (dest_rtx
, NULL_RTX
);
8369 /* These comparison functions need an instruction that returns an actual
8370 index. An ordinary compare that just sets the condition codes
8372 #ifdef HAVE_cmpstrsi
8373 case BUILT_IN_STRCMP
:
8374 /* If not optimizing, call the library function. */
8375 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8379 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8380 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8381 || TREE_CHAIN (arglist
) == 0
8382 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8384 else if (!HAVE_cmpstrsi
)
8387 tree arg1
= TREE_VALUE (arglist
);
8388 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8392 len
= c_strlen (arg1
);
8394 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
8395 len2
= c_strlen (arg2
);
8397 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
8399 /* If we don't have a constant length for the first, use the length
8400 of the second, if we know it. We don't require a constant for
8401 this case; some cost analysis could be done if both are available
8402 but neither is constant. For now, assume they're equally cheap.
8404 If both strings have constant lengths, use the smaller. This
8405 could arise if optimization results in strcpy being called with
8406 two fixed strings, or if the code was machine-generated. We should
8407 add some code to the `memcmp' handler below to deal with such
8408 situations, someday. */
8409 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
8416 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
8418 if (tree_int_cst_lt (len2
, len
))
8422 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8426 case BUILT_IN_MEMCMP
:
8427 /* If not optimizing, call the library function. */
8428 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8432 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8433 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8434 || TREE_CHAIN (arglist
) == 0
8435 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8436 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8437 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8439 else if (!HAVE_cmpstrsi
)
8442 tree arg1
= TREE_VALUE (arglist
);
8443 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8444 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8448 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8450 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8451 enum machine_mode insn_mode
8452 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
8454 /* If we don't have POINTER_TYPE, call the function. */
8455 if (arg1_align
== 0 || arg2_align
== 0)
8457 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
8458 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8462 /* Make a place to write the result of the instruction. */
8465 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
8466 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8467 result
= gen_reg_rtx (insn_mode
);
8469 emit_insn (gen_cmpstrsi (result
,
8470 gen_rtx (MEM
, BLKmode
,
8471 expand_expr (arg1
, NULL_RTX
,
8474 gen_rtx (MEM
, BLKmode
,
8475 expand_expr (arg2
, NULL_RTX
,
8478 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8479 GEN_INT (MIN (arg1_align
, arg2_align
))));
8481 /* Return the value in the proper mode for this function. */
8482 mode
= TYPE_MODE (TREE_TYPE (exp
));
8483 if (GET_MODE (result
) == mode
)
8485 else if (target
!= 0)
8487 convert_move (target
, result
, 0);
8491 return convert_to_mode (mode
, result
, 0);
8494 case BUILT_IN_STRCMP
:
8495 case BUILT_IN_MEMCMP
:
8499 /* __builtin_setjmp is passed a pointer to an array of five words
8500 (not all will be used on all machines). It operates similarly to
8501 the C library function of the same name, but is more efficient.
8502 Much of the code below (and for longjmp) is copied from the handling
8505 NOTE: This is intended for use by GNAT and will only work in
8506 the method used by it. This code will likely NOT survive to
8507 the GCC 2.8.0 release. */
8508 case BUILT_IN_SETJMP
:
8510 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8515 = force_reg (Pmode
, expand_expr (TREE_VALUE (arglist
), subtarget
,
8517 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
8518 enum machine_mode sa_mode
= Pmode
;
8521 if (target
== 0 || GET_CODE (target
) != REG
8522 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
8523 target
= gen_reg_rtx (value_mode
);
8527 emit_note (NULL_PTR
, NOTE_INSN_SETJMP
);
8528 current_function_calls_setjmp
= 1;
8530 /* We store the frame pointer and the address of lab1 in the buffer
8531 and use the rest of it for the stack save area, which is
8532 machine-dependent. */
8533 emit_move_insn (gen_rtx (MEM
, Pmode
, buf_addr
),
8534 virtual_stack_vars_rtx
);
8536 (validize_mem (gen_rtx (MEM
, Pmode
,
8537 plus_constant (buf_addr
,
8538 GET_MODE_SIZE (Pmode
)))),
8539 gen_rtx (LABEL_REF
, Pmode
, lab1
));
8541 #ifdef HAVE_save_stack_nonlocal
8542 if (HAVE_save_stack_nonlocal
)
8543 sa_mode
= insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0];
8546 stack_save
= gen_rtx (MEM
, sa_mode
,
8547 plus_constant (buf_addr
,
8548 2 * GET_MODE_SIZE (Pmode
)));
8549 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
8551 /* Set TARGET to zero and branch around the other case. */
8552 emit_move_insn (target
, const0_rtx
);
8553 emit_jump_insn (gen_jump (lab2
));
8557 /* Now put in the code to restore the frame pointer, and argument
8558 pointer, if needed. The code below is from expand_end_bindings
8559 in stmt.c; see detailed documentation there. */
8560 #ifdef HAVE_nonlocal_goto
8561 if (! HAVE_nonlocal_goto
)
8563 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
8565 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8566 if (fixed_regs
[ARG_POINTER_REGNUM
])
8568 #ifdef ELIMINABLE_REGS
8569 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
8572 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
8573 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
8574 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
8577 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
8580 /* Now restore our arg pointer from the address at which it
8581 was saved in our stack frame.
8582 If there hasn't be space allocated for it yet, make
8584 if (arg_pointer_save_area
== 0)
8585 arg_pointer_save_area
8586 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8587 emit_move_insn (virtual_incoming_args_rtx
,
8588 copy_to_reg (arg_pointer_save_area
));
8593 /* The result to return is in the static chain pointer. */
8594 if (GET_MODE (static_chain_rtx
) == GET_MODE (target
))
8595 emit_move_insn (target
, static_chain_rtx
);
8597 convert_move (target
, static_chain_rtx
, 0);
8603 /* __builtin_longjmp is passed a pointer to an array of five words
8604 and a value to return. It's similar to the C library longjmp
8605 function but works with __builtin_setjmp above. */
8606 case BUILT_IN_LONGJMP
:
8607 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
8608 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8613 = force_reg (Pmode
, expand_expr (TREE_VALUE (arglist
), NULL_RTX
,
8615 rtx fp
= gen_rtx (MEM
, Pmode
, buf_addr
);
8616 rtx lab
= gen_rtx (MEM
, Pmode
,
8617 plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
)));
8618 enum machine_mode sa_mode
8619 #ifdef HAVE_save_stack_nonlocal
8620 = (HAVE_save_stack_nonlocal
8621 ? insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0]
8626 rtx stack
= gen_rtx (MEM
, sa_mode
,
8627 plus_constant (buf_addr
,
8628 2 * GET_MODE_SIZE (Pmode
)));
8629 rtx value
= expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)), NULL_RTX
,
8632 /* Pick up FP, label, and SP from the block and jump. This code is
8633 from expand_goto in stmt.c; see there for detailed comments. */
8634 #if HAVE_nonlocal_goto
8635 if (HAVE_nonlocal_goto
)
8636 emit_insn (gen_nonlocal_goto (fp
, lab
, stack
, value
));
8640 emit_move_insn (hard_frame_pointer_rtx
, fp
);
8641 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
8643 /* Put in the static chain register the return value. */
8644 emit_move_insn (static_chain_rtx
, value
);
8645 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
8646 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
8647 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
8648 emit_indirect_jump (copy_to_reg (lab
));
8654 default: /* just do library call, if unknown builtin */
8655 error ("built-in function `%s' not currently supported",
8656 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8659 /* The switch statement above can drop through to cause the function
8660 to be called normally. */
8662 return expand_call (exp
, target
, ignore
);
8665 /* Built-in functions to perform an untyped call and return. */
8667 /* For each register that may be used for calling a function, this
8668 gives a mode used to copy the register's value. VOIDmode indicates
8669 the register is not used for calling a function. If the machine
8670 has register windows, this gives only the outbound registers.
8671 INCOMING_REGNO gives the corresponding inbound register. */
8672 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
8674 /* For each register that may be used for returning values, this gives
8675 a mode used to copy the register's value. VOIDmode indicates the
8676 register is not used for returning values. If the machine has
8677 register windows, this gives only the outbound registers.
8678 INCOMING_REGNO gives the corresponding inbound register. */
8679 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
8681 /* For each register that may be used for calling a function, this
8682 gives the offset of that register into the block returned by
8683 __builtin_apply_args. 0 indicates that the register is not
8684 used for calling a function. */
8685 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
8687 /* Return the offset of register REGNO into the block returned by
8688 __builtin_apply_args. This is not declared static, since it is
8689 needed in objc-act.c. */
8692 apply_args_register_offset (regno
)
8697 /* Arguments are always put in outgoing registers (in the argument
8698 block) if such make sense. */
8699 #ifdef OUTGOING_REGNO
8700 regno
= OUTGOING_REGNO(regno
);
8702 return apply_args_reg_offset
[regno
];
8705 /* Return the size required for the block returned by __builtin_apply_args,
8706 and initialize apply_args_mode. */
8711 static int size
= -1;
8713 enum machine_mode mode
;
8715 /* The values computed by this function never change. */
8718 /* The first value is the incoming arg-pointer. */
8719 size
= GET_MODE_SIZE (Pmode
);
8721 /* The second value is the structure value address unless this is
8722 passed as an "invisible" first argument. */
8723 if (struct_value_rtx
)
8724 size
+= GET_MODE_SIZE (Pmode
);
8726 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8727 if (FUNCTION_ARG_REGNO_P (regno
))
8729 /* Search for the proper mode for copying this register's
8730 value. I'm not sure this is right, but it works so far. */
8731 enum machine_mode best_mode
= VOIDmode
;
8733 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
8735 mode
= GET_MODE_WIDER_MODE (mode
))
8736 if (HARD_REGNO_MODE_OK (regno
, mode
)
8737 && HARD_REGNO_NREGS (regno
, mode
) == 1)
8740 if (best_mode
== VOIDmode
)
8741 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
8743 mode
= GET_MODE_WIDER_MODE (mode
))
8744 if (HARD_REGNO_MODE_OK (regno
, mode
)
8745 && (mov_optab
->handlers
[(int) mode
].insn_code
8746 != CODE_FOR_nothing
))
8750 if (mode
== VOIDmode
)
8753 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8754 if (size
% align
!= 0)
8755 size
= CEIL (size
, align
) * align
;
8756 apply_args_reg_offset
[regno
] = size
;
8757 size
+= GET_MODE_SIZE (mode
);
8758 apply_args_mode
[regno
] = mode
;
8762 apply_args_mode
[regno
] = VOIDmode
;
8763 apply_args_reg_offset
[regno
] = 0;
8769 /* Return the size required for the block returned by __builtin_apply,
8770 and initialize apply_result_mode. */
8773 apply_result_size ()
8775 static int size
= -1;
8777 enum machine_mode mode
;
8779 /* The values computed by this function never change. */
8784 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8785 if (FUNCTION_VALUE_REGNO_P (regno
))
8787 /* Search for the proper mode for copying this register's
8788 value. I'm not sure this is right, but it works so far. */
8789 enum machine_mode best_mode
= VOIDmode
;
8791 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
8793 mode
= GET_MODE_WIDER_MODE (mode
))
8794 if (HARD_REGNO_MODE_OK (regno
, mode
))
8797 if (best_mode
== VOIDmode
)
8798 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
8800 mode
= GET_MODE_WIDER_MODE (mode
))
8801 if (HARD_REGNO_MODE_OK (regno
, mode
)
8802 && (mov_optab
->handlers
[(int) mode
].insn_code
8803 != CODE_FOR_nothing
))
8807 if (mode
== VOIDmode
)
8810 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8811 if (size
% align
!= 0)
8812 size
= CEIL (size
, align
) * align
;
8813 size
+= GET_MODE_SIZE (mode
);
8814 apply_result_mode
[regno
] = mode
;
8817 apply_result_mode
[regno
] = VOIDmode
;
8819 /* Allow targets that use untyped_call and untyped_return to override
8820 the size so that machine-specific information can be stored here. */
8821 #ifdef APPLY_RESULT_SIZE
8822 size
= APPLY_RESULT_SIZE
;
8828 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8829 /* Create a vector describing the result block RESULT. If SAVEP is true,
8830 the result block is used to save the values; otherwise it is used to
8831 restore the values. */
8834 result_vector (savep
, result
)
8838 int regno
, size
, align
, nelts
;
8839 enum machine_mode mode
;
8841 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
8844 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8845 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
8847 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8848 if (size
% align
!= 0)
8849 size
= CEIL (size
, align
) * align
;
8850 reg
= gen_rtx (REG
, mode
, savep
? regno
: INCOMING_REGNO (regno
));
8851 mem
= change_address (result
, mode
,
8852 plus_constant (XEXP (result
, 0), size
));
8853 savevec
[nelts
++] = (savep
8854 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
8855 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
8856 size
+= GET_MODE_SIZE (mode
);
8858 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
8860 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8862 /* Save the state required to perform an untyped call with the same
8863 arguments as were passed to the current function. */
8866 expand_builtin_apply_args ()
8869 int size
, align
, regno
;
8870 enum machine_mode mode
;
8872 /* Create a block where the arg-pointer, structure value address,
8873 and argument registers can be saved. */
8874 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
8876 /* Walk past the arg-pointer and structure value address. */
8877 size
= GET_MODE_SIZE (Pmode
);
8878 if (struct_value_rtx
)
8879 size
+= GET_MODE_SIZE (Pmode
);
8881 /* Save each register used in calling a function to the block. */
8882 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8883 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
8887 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8888 if (size
% align
!= 0)
8889 size
= CEIL (size
, align
) * align
;
8891 tem
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
8894 /* For reg-stack.c's stack register household.
8895 Compare with a similar piece of code in function.c. */
8897 emit_insn (gen_rtx (USE
, mode
, tem
));
8900 emit_move_insn (change_address (registers
, mode
,
8901 plus_constant (XEXP (registers
, 0),
8904 size
+= GET_MODE_SIZE (mode
);
8907 /* Save the arg pointer to the block. */
8908 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
8909 copy_to_reg (virtual_incoming_args_rtx
));
8910 size
= GET_MODE_SIZE (Pmode
);
8912 /* Save the structure value address unless this is passed as an
8913 "invisible" first argument. */
8914 if (struct_value_incoming_rtx
)
8916 emit_move_insn (change_address (registers
, Pmode
,
8917 plus_constant (XEXP (registers
, 0),
8919 copy_to_reg (struct_value_incoming_rtx
));
8920 size
+= GET_MODE_SIZE (Pmode
);
8923 /* Return the address of the block. */
8924 return copy_addr_to_reg (XEXP (registers
, 0));
8927 /* Perform an untyped call and save the state required to perform an
8928 untyped return of whatever value was returned by the given function. */
8931 expand_builtin_apply (function
, arguments
, argsize
)
8932 rtx function
, arguments
, argsize
;
8934 int size
, align
, regno
;
8935 enum machine_mode mode
;
8936 rtx incoming_args
, result
, reg
, dest
, call_insn
;
8937 rtx old_stack_level
= 0;
8938 rtx call_fusage
= 0;
8940 /* Create a block where the return registers can be saved. */
8941 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
8943 /* ??? The argsize value should be adjusted here. */
8945 /* Fetch the arg pointer from the ARGUMENTS block. */
8946 incoming_args
= gen_reg_rtx (Pmode
);
8947 emit_move_insn (incoming_args
,
8948 gen_rtx (MEM
, Pmode
, arguments
));
8949 #ifndef STACK_GROWS_DOWNWARD
8950 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
8951 incoming_args
, 0, OPTAB_LIB_WIDEN
);
8954 /* Perform postincrements before actually calling the function. */
8957 /* Push a new argument block and copy the arguments. */
8958 do_pending_stack_adjust ();
8959 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
8961 /* Push a block of memory onto the stack to store the memory arguments.
8962 Save the address in a register, and copy the memory arguments. ??? I
8963 haven't figured out how the calling convention macros effect this,
8964 but it's likely that the source and/or destination addresses in
8965 the block copy will need updating in machine specific ways. */
8966 dest
= copy_addr_to_reg (push_block (argsize
, 0, 0));
8967 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
8968 gen_rtx (MEM
, BLKmode
, incoming_args
),
8970 PARM_BOUNDARY
/ BITS_PER_UNIT
);
8972 /* Refer to the argument block. */
8974 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
8976 /* Walk past the arg-pointer and structure value address. */
8977 size
= GET_MODE_SIZE (Pmode
);
8978 if (struct_value_rtx
)
8979 size
+= GET_MODE_SIZE (Pmode
);
8981 /* Restore each of the registers previously saved. Make USE insns
8982 for each of these registers for use in making the call. */
8983 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8984 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
8986 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8987 if (size
% align
!= 0)
8988 size
= CEIL (size
, align
) * align
;
8989 reg
= gen_rtx (REG
, mode
, regno
);
8990 emit_move_insn (reg
,
8991 change_address (arguments
, mode
,
8992 plus_constant (XEXP (arguments
, 0),
8995 use_reg (&call_fusage
, reg
);
8996 size
+= GET_MODE_SIZE (mode
);
8999 /* Restore the structure value address unless this is passed as an
9000 "invisible" first argument. */
9001 size
= GET_MODE_SIZE (Pmode
);
9002 if (struct_value_rtx
)
9004 rtx value
= gen_reg_rtx (Pmode
);
9005 emit_move_insn (value
,
9006 change_address (arguments
, Pmode
,
9007 plus_constant (XEXP (arguments
, 0),
9009 emit_move_insn (struct_value_rtx
, value
);
9010 if (GET_CODE (struct_value_rtx
) == REG
)
9011 use_reg (&call_fusage
, struct_value_rtx
);
9012 size
+= GET_MODE_SIZE (Pmode
);
9015 /* All arguments and registers used for the call are set up by now! */
9016 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9018 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9019 and we don't want to load it into a register as an optimization,
9020 because prepare_call_address already did it if it should be done. */
9021 if (GET_CODE (function
) != SYMBOL_REF
)
9022 function
= memory_address (FUNCTION_MODE
, function
);
9024 /* Generate the actual call instruction and save the return value. */
9025 #ifdef HAVE_untyped_call
9026 if (HAVE_untyped_call
)
9027 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
9028 result
, result_vector (1, result
)));
9031 #ifdef HAVE_call_value
9032 if (HAVE_call_value
)
9036 /* Locate the unique return register. It is not possible to
9037 express a call that sets more than one return register using
9038 call_value; use untyped_call for that. In fact, untyped_call
9039 only needs to save the return registers in the given block. */
9040 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9041 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9044 abort (); /* HAVE_untyped_call required. */
9045 valreg
= gen_rtx (REG
, mode
, regno
);
9048 emit_call_insn (gen_call_value (valreg
,
9049 gen_rtx (MEM
, FUNCTION_MODE
, function
),
9050 const0_rtx
, NULL_RTX
, const0_rtx
));
9052 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9060 /* Find the CALL insn we just emitted. */
9061 for (call_insn
= get_last_insn ();
9062 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9063 call_insn
= PREV_INSN (call_insn
))
9069 /* Put the register usage information on the CALL. If there is already
9070 some usage information, put ours at the end. */
9071 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9075 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9076 link
= XEXP (link
, 1))
9079 XEXP (link
, 1) = call_fusage
;
9082 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9084 /* Restore the stack. */
9085 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9087 /* Return the address of the result block. */
9088 return copy_addr_to_reg (XEXP (result
, 0));
9091 /* Perform an untyped return. */
9094 expand_builtin_return (result
)
9097 int size
, align
, regno
;
9098 enum machine_mode mode
;
9100 rtx call_fusage
= 0;
9102 apply_result_size ();
9103 result
= gen_rtx (MEM
, BLKmode
, result
);
9105 #ifdef HAVE_untyped_return
9106 if (HAVE_untyped_return
)
9108 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9114 /* Restore the return value and note that each value is used. */
9116 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9117 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9119 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9120 if (size
% align
!= 0)
9121 size
= CEIL (size
, align
) * align
;
9122 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9123 emit_move_insn (reg
,
9124 change_address (result
, mode
,
9125 plus_constant (XEXP (result
, 0),
9128 push_to_sequence (call_fusage
);
9129 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
9130 call_fusage
= get_insns ();
9132 size
+= GET_MODE_SIZE (mode
);
9135 /* Put the USE insns before the return. */
9136 emit_insns (call_fusage
);
9138 /* Return whatever values was restored by jumping directly to the end
9140 expand_null_return ();
9143 /* Expand code for a post- or pre- increment or decrement
9144 and return the RTX for the result.
9145 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9148 expand_increment (exp
, post
)
9152 register rtx op0
, op1
;
9153 register rtx temp
, value
;
9154 register tree incremented
= TREE_OPERAND (exp
, 0);
9155 optab this_optab
= add_optab
;
9157 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9158 int op0_is_copy
= 0;
9159 int single_insn
= 0;
9160 /* 1 means we can't store into OP0 directly,
9161 because it is a subreg narrower than a word,
9162 and we don't dare clobber the rest of the word. */
9165 if (output_bytecode
)
9167 bc_expand_expr (exp
);
9171 /* Stabilize any component ref that might need to be
9172 evaluated more than once below. */
9174 || TREE_CODE (incremented
) == BIT_FIELD_REF
9175 || (TREE_CODE (incremented
) == COMPONENT_REF
9176 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9177 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9178 incremented
= stabilize_reference (incremented
);
9179 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9180 ones into save exprs so that they don't accidentally get evaluated
9181 more than once by the code below. */
9182 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9183 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9184 incremented
= save_expr (incremented
);
9186 /* Compute the operands as RTX.
9187 Note whether OP0 is the actual lvalue or a copy of it:
9188 I believe it is a copy iff it is a register or subreg
9189 and insns were generated in computing it. */
9191 temp
= get_last_insn ();
9192 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9194 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9195 in place but instead must do sign- or zero-extension during assignment,
9196 so we copy it into a new register and let the code below use it as
9199 Note that we can safely modify this SUBREG since it is know not to be
9200 shared (it was made by the expand_expr call above). */
9202 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9205 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9209 else if (GET_CODE (op0
) == SUBREG
9210 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9212 /* We cannot increment this SUBREG in place. If we are
9213 post-incrementing, get a copy of the old value. Otherwise,
9214 just mark that we cannot increment in place. */
9216 op0
= copy_to_reg (op0
);
9221 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9222 && temp
!= get_last_insn ());
9223 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9225 /* Decide whether incrementing or decrementing. */
9226 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9227 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9228 this_optab
= sub_optab
;
9230 /* Convert decrement by a constant into a negative increment. */
9231 if (this_optab
== sub_optab
9232 && GET_CODE (op1
) == CONST_INT
)
9234 op1
= GEN_INT (- INTVAL (op1
));
9235 this_optab
= add_optab
;
9238 /* For a preincrement, see if we can do this with a single instruction. */
9241 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9242 if (icode
!= (int) CODE_FOR_nothing
9243 /* Make sure that OP0 is valid for operands 0 and 1
9244 of the insn we want to queue. */
9245 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9246 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9247 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9251 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9252 then we cannot just increment OP0. We must therefore contrive to
9253 increment the original value. Then, for postincrement, we can return
9254 OP0 since it is a copy of the old value. For preincrement, expand here
9255 unless we can do it with a single insn.
9257 Likewise if storing directly into OP0 would clobber high bits
9258 we need to preserve (bad_subreg). */
9259 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9261 /* This is the easiest way to increment the value wherever it is.
9262 Problems with multiple evaluation of INCREMENTED are prevented
9263 because either (1) it is a component_ref or preincrement,
9264 in which case it was stabilized above, or (2) it is an array_ref
9265 with constant index in an array in a register, which is
9266 safe to reevaluate. */
9267 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9268 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9269 ? MINUS_EXPR
: PLUS_EXPR
),
9272 TREE_OPERAND (exp
, 1));
9274 while (TREE_CODE (incremented
) == NOP_EXPR
9275 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9277 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9278 incremented
= TREE_OPERAND (incremented
, 0);
9281 temp
= expand_assignment (incremented
, newexp
, ! post
, 0);
9282 return post
? op0
: temp
;
9287 /* We have a true reference to the value in OP0.
9288 If there is an insn to add or subtract in this mode, queue it.
9289 Queueing the increment insn avoids the register shuffling
9290 that often results if we must increment now and first save
9291 the old value for subsequent use. */
9293 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9294 op0
= stabilize (op0
);
9297 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9298 if (icode
!= (int) CODE_FOR_nothing
9299 /* Make sure that OP0 is valid for operands 0 and 1
9300 of the insn we want to queue. */
9301 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9302 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
9304 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9305 op1
= force_reg (mode
, op1
);
9307 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9311 /* Preincrement, or we can't increment with one simple insn. */
9313 /* Save a copy of the value before inc or dec, to return it later. */
9314 temp
= value
= copy_to_reg (op0
);
9316 /* Arrange to return the incremented value. */
9317 /* Copy the rtx because expand_binop will protect from the queue,
9318 and the results of that would be invalid for us to return
9319 if our caller does emit_queue before using our result. */
9320 temp
= copy_rtx (value
= op0
);
9322 /* Increment however we can. */
9323 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9324 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9325 /* Make sure the value is stored into OP0. */
9327 emit_move_insn (op0
, op1
);
9332 /* Expand all function calls contained within EXP, innermost ones first.
9333 But don't look within expressions that have sequence points.
9334 For each CALL_EXPR, record the rtx for its value
9335 in the CALL_EXPR_RTL field. */
9338 preexpand_calls (exp
)
9341 register int nops
, i
;
9342 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9344 if (! do_preexpand_calls
)
9347 /* Only expressions and references can contain calls. */
9349 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9352 switch (TREE_CODE (exp
))
9355 /* Do nothing if already expanded. */
9356 if (CALL_EXPR_RTL (exp
) != 0)
9359 /* Do nothing to built-in functions. */
9360 if (TREE_CODE (TREE_OPERAND (exp
, 0)) != ADDR_EXPR
9361 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != FUNCTION_DECL
9362 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9363 /* Do nothing if the call returns a variable-sized object. */
9364 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
)
9365 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9370 case TRUTH_ANDIF_EXPR
:
9371 case TRUTH_ORIF_EXPR
:
9372 /* If we find one of these, then we can be sure
9373 the adjust will be done for it (since it makes jumps).
9374 Do it now, so that if this is inside an argument
9375 of a function, we don't get the stack adjustment
9376 after some other args have already been pushed. */
9377 do_pending_stack_adjust ();
9382 case WITH_CLEANUP_EXPR
:
9383 case CLEANUP_POINT_EXPR
:
9387 if (SAVE_EXPR_RTL (exp
) != 0)
9391 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9392 for (i
= 0; i
< nops
; i
++)
9393 if (TREE_OPERAND (exp
, i
) != 0)
9395 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9396 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9398 preexpand_calls (TREE_OPERAND (exp
, i
));
9402 /* At the start of a function, record that we have no previously-pushed
9403 arguments waiting to be popped. */
9406 init_pending_stack_adjust ()
9408 pending_stack_adjust
= 0;
9411 /* When exiting from function, if safe, clear out any pending stack adjust
9412 so the adjustment won't get done. */
9415 clear_pending_stack_adjust ()
9417 #ifdef EXIT_IGNORE_STACK
9419 && ! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
9420 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9421 && ! flag_inline_functions
)
9422 pending_stack_adjust
= 0;
9426 /* Pop any previously-pushed arguments that have not been popped yet. */
9429 do_pending_stack_adjust ()
9431 if (inhibit_defer_pop
== 0)
9433 if (pending_stack_adjust
!= 0)
9434 adjust_stack (GEN_INT (pending_stack_adjust
));
9435 pending_stack_adjust
= 0;
9439 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9440 Returns the cleanups to be performed. */
9443 defer_cleanups_to (old_cleanups
)
9446 tree new_cleanups
= NULL_TREE
;
9447 tree cleanups
= cleanups_this_call
;
9448 tree last
= NULL_TREE
;
9450 while (cleanups_this_call
!= old_cleanups
)
9452 (*interim_eh_hook
) (TREE_VALUE (cleanups_this_call
));
9453 last
= cleanups_this_call
;
9454 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
9459 /* Remove the list from the chain of cleanups. */
9460 TREE_CHAIN (last
) = NULL_TREE
;
9462 /* reverse them so that we can build them in the right order. */
9463 cleanups
= nreverse (cleanups
);
9465 /* All cleanups must be on the function_obstack. */
9466 push_obstacks_nochange ();
9467 resume_temporary_allocation ();
9472 new_cleanups
= build (COMPOUND_EXPR
, TREE_TYPE (new_cleanups
),
9473 TREE_VALUE (cleanups
), new_cleanups
);
9475 new_cleanups
= TREE_VALUE (cleanups
);
9477 cleanups
= TREE_CHAIN (cleanups
);
9483 return new_cleanups
;
9486 /* Expand all cleanups up to OLD_CLEANUPS.
9487 Needed here, and also for language-dependent calls. */
9490 expand_cleanups_to (old_cleanups
)
9493 while (cleanups_this_call
!= old_cleanups
)
9495 (*interim_eh_hook
) (TREE_VALUE (cleanups_this_call
));
9496 expand_expr (TREE_VALUE (cleanups_this_call
), const0_rtx
, VOIDmode
, 0);
9497 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
9501 /* Expand conditional expressions. */
9503 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9504 LABEL is an rtx of code CODE_LABEL, in this function and all the
9508 jumpifnot (exp
, label
)
9512 do_jump (exp
, label
, NULL_RTX
);
9515 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9522 do_jump (exp
, NULL_RTX
, label
);
9525 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9526 the result is zero, or IF_TRUE_LABEL if the result is one.
9527 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9528 meaning fall through in that case.
9530 do_jump always does any pending stack adjust except when it does not
9531 actually perform a jump. An example where there is no jump
9532 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9534 This function is responsible for optimizing cases such as
9535 &&, || and comparison operators in EXP. */
9538 do_jump (exp
, if_false_label
, if_true_label
)
9540 rtx if_false_label
, if_true_label
;
9542 register enum tree_code code
= TREE_CODE (exp
);
9543 /* Some cases need to create a label to jump to
9544 in order to properly fall through.
9545 These cases set DROP_THROUGH_LABEL nonzero. */
9546 rtx drop_through_label
= 0;
9551 enum machine_mode mode
;
9561 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9567 /* This is not true with #pragma weak */
9569 /* The address of something can never be zero. */
9571 emit_jump (if_true_label
);
9576 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9577 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9578 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9581 /* If we are narrowing the operand, we have to do the compare in the
9583 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9586 case NON_LVALUE_EXPR
:
9587 case REFERENCE_EXPR
:
9592 /* These cannot change zero->non-zero or vice versa. */
9593 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9597 /* This is never less insns than evaluating the PLUS_EXPR followed by
9598 a test and can be longer if the test is eliminated. */
9600 /* Reduce to minus. */
9601 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9602 TREE_OPERAND (exp
, 0),
9603 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9604 TREE_OPERAND (exp
, 1))));
9605 /* Process as MINUS. */
9609 /* Non-zero iff operands of minus differ. */
9610 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
9611 TREE_OPERAND (exp
, 0),
9612 TREE_OPERAND (exp
, 1)),
9617 /* If we are AND'ing with a small constant, do this comparison in the
9618 smallest type that fits. If the machine doesn't have comparisons
9619 that small, it will be converted back to the wider comparison.
9620 This helps if we are testing the sign bit of a narrower object.
9621 combine can't do this for us because it can't know whether a
9622 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9624 if (! SLOW_BYTE_ACCESS
9625 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9626 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9627 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
9628 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9629 && (type
= type_for_mode (mode
, 1)) != 0
9630 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9631 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9632 != CODE_FOR_nothing
))
9634 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9639 case TRUTH_NOT_EXPR
:
9640 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9643 case TRUTH_ANDIF_EXPR
:
9646 tree cleanups
, old_cleanups
;
9648 if (if_false_label
== 0)
9649 if_false_label
= drop_through_label
= gen_label_rtx ();
9651 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9652 seq1
= get_insns ();
9655 old_cleanups
= cleanups_this_call
;
9657 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9658 seq2
= get_insns ();
9661 cleanups
= defer_cleanups_to (old_cleanups
);
9664 rtx flag
= gen_reg_rtx (word_mode
);
9668 /* Flag cleanups as not needed. */
9669 emit_move_insn (flag
, const0_rtx
);
9672 /* Flag cleanups as needed. */
9673 emit_move_insn (flag
, const1_rtx
);
9676 /* All cleanups must be on the function_obstack. */
9677 push_obstacks_nochange ();
9678 resume_temporary_allocation ();
9680 /* convert flag, which is an rtx, into a tree. */
9681 cond
= make_node (RTL_EXPR
);
9682 TREE_TYPE (cond
) = integer_type_node
;
9683 RTL_EXPR_RTL (cond
) = flag
;
9684 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
9685 cond
= save_expr (cond
);
9687 new_cleanups
= build (COND_EXPR
, void_type_node
,
9688 truthvalue_conversion (cond
),
9689 cleanups
, integer_zero_node
);
9690 new_cleanups
= fold (new_cleanups
);
9694 /* Now add in the conditionalized cleanups. */
9696 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
9697 (*interim_eh_hook
) (NULL_TREE
);
9707 case TRUTH_ORIF_EXPR
:
9710 tree cleanups
, old_cleanups
;
9712 if (if_true_label
== 0)
9713 if_true_label
= drop_through_label
= gen_label_rtx ();
9715 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9716 seq1
= get_insns ();
9719 old_cleanups
= cleanups_this_call
;
9721 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9722 seq2
= get_insns ();
9725 cleanups
= defer_cleanups_to (old_cleanups
);
9728 rtx flag
= gen_reg_rtx (word_mode
);
9732 /* Flag cleanups as not needed. */
9733 emit_move_insn (flag
, const0_rtx
);
9736 /* Flag cleanups as needed. */
9737 emit_move_insn (flag
, const1_rtx
);
9740 /* All cleanups must be on the function_obstack. */
9741 push_obstacks_nochange ();
9742 resume_temporary_allocation ();
9744 /* convert flag, which is an rtx, into a tree. */
9745 cond
= make_node (RTL_EXPR
);
9746 TREE_TYPE (cond
) = integer_type_node
;
9747 RTL_EXPR_RTL (cond
) = flag
;
9748 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
9749 cond
= save_expr (cond
);
9751 new_cleanups
= build (COND_EXPR
, void_type_node
,
9752 truthvalue_conversion (cond
),
9753 cleanups
, integer_zero_node
);
9754 new_cleanups
= fold (new_cleanups
);
9758 /* Now add in the conditionalized cleanups. */
9760 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
9761 (*interim_eh_hook
) (NULL_TREE
);
9773 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9777 do_pending_stack_adjust ();
9778 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9785 int bitsize
, bitpos
, unsignedp
;
9786 enum machine_mode mode
;
9791 /* Get description of this reference. We don't actually care
9792 about the underlying object here. */
9793 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9794 &mode
, &unsignedp
, &volatilep
);
9796 type
= type_for_size (bitsize
, unsignedp
);
9797 if (! SLOW_BYTE_ACCESS
9798 && type
!= 0 && bitsize
>= 0
9799 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9800 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9801 != CODE_FOR_nothing
))
9803 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9810 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9811 if (integer_onep (TREE_OPERAND (exp
, 1))
9812 && integer_zerop (TREE_OPERAND (exp
, 2)))
9813 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9815 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9816 && integer_onep (TREE_OPERAND (exp
, 2)))
9817 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9821 register rtx label1
= gen_label_rtx ();
9822 drop_through_label
= gen_label_rtx ();
9823 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9824 /* Now the THEN-expression. */
9825 do_jump (TREE_OPERAND (exp
, 1),
9826 if_false_label
? if_false_label
: drop_through_label
,
9827 if_true_label
? if_true_label
: drop_through_label
);
9828 /* In case the do_jump just above never jumps. */
9829 do_pending_stack_adjust ();
9830 emit_label (label1
);
9831 /* Now the ELSE-expression. */
9832 do_jump (TREE_OPERAND (exp
, 2),
9833 if_false_label
? if_false_label
: drop_through_label
,
9834 if_true_label
? if_true_label
: drop_through_label
);
9840 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9842 if (integer_zerop (TREE_OPERAND (exp
, 1)))
9843 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9844 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9845 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9848 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9849 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9850 fold (build1 (REALPART_EXPR
,
9851 TREE_TYPE (inner_type
),
9852 TREE_OPERAND (exp
, 0))),
9853 fold (build1 (REALPART_EXPR
,
9854 TREE_TYPE (inner_type
),
9855 TREE_OPERAND (exp
, 1))))),
9856 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9857 fold (build1 (IMAGPART_EXPR
,
9858 TREE_TYPE (inner_type
),
9859 TREE_OPERAND (exp
, 0))),
9860 fold (build1 (IMAGPART_EXPR
,
9861 TREE_TYPE (inner_type
),
9862 TREE_OPERAND (exp
, 1))))))),
9863 if_false_label
, if_true_label
);
9864 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9865 && !can_compare_p (TYPE_MODE (inner_type
)))
9866 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9868 comparison
= compare (exp
, EQ
, EQ
);
9874 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9876 if (integer_zerop (TREE_OPERAND (exp
, 1)))
9877 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9878 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9879 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9882 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9883 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9884 fold (build1 (REALPART_EXPR
,
9885 TREE_TYPE (inner_type
),
9886 TREE_OPERAND (exp
, 0))),
9887 fold (build1 (REALPART_EXPR
,
9888 TREE_TYPE (inner_type
),
9889 TREE_OPERAND (exp
, 1))))),
9890 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9891 fold (build1 (IMAGPART_EXPR
,
9892 TREE_TYPE (inner_type
),
9893 TREE_OPERAND (exp
, 0))),
9894 fold (build1 (IMAGPART_EXPR
,
9895 TREE_TYPE (inner_type
),
9896 TREE_OPERAND (exp
, 1))))))),
9897 if_false_label
, if_true_label
);
9898 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9899 && !can_compare_p (TYPE_MODE (inner_type
)))
9900 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9902 comparison
= compare (exp
, NE
, NE
);
9907 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9909 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9910 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9912 comparison
= compare (exp
, LT
, LTU
);
9916 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9918 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9919 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9921 comparison
= compare (exp
, LE
, LEU
);
9925 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9927 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9928 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9930 comparison
= compare (exp
, GT
, GTU
);
9934 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9936 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9937 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9939 comparison
= compare (exp
, GE
, GEU
);
9944 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9946 /* This is not needed any more and causes poor code since it causes
9947 comparisons and tests from non-SI objects to have different code
9949 /* Copy to register to avoid generating bad insns by cse
9950 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9951 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9952 temp
= copy_to_reg (temp
);
9954 do_pending_stack_adjust ();
9955 if (GET_CODE (temp
) == CONST_INT
)
9956 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
9957 else if (GET_CODE (temp
) == LABEL_REF
)
9958 comparison
= const_true_rtx
;
9959 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9960 && !can_compare_p (GET_MODE (temp
)))
9961 /* Note swapping the labels gives us not-equal. */
9962 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9963 else if (GET_MODE (temp
) != VOIDmode
)
9964 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
9965 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9966 GET_MODE (temp
), NULL_RTX
, 0);
9971 /* Do any postincrements in the expression that was tested. */
9974 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9975 straight into a conditional jump instruction as the jump condition.
9976 Otherwise, all the work has been done already. */
9978 if (comparison
== const_true_rtx
)
9981 emit_jump (if_true_label
);
9983 else if (comparison
== const0_rtx
)
9986 emit_jump (if_false_label
);
9988 else if (comparison
)
9989 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
9991 if (drop_through_label
)
9993 /* If do_jump produces code that might be jumped around,
9994 do any stack adjusts from that code, before the place
9995 where control merges in. */
9996 do_pending_stack_adjust ();
9997 emit_label (drop_through_label
);
10001 /* Given a comparison expression EXP for values too wide to be compared
10002 with one insn, test the comparison and jump to the appropriate label.
10003 The code of EXP is ignored; we always test GT if SWAP is 0,
10004 and LT if SWAP is 1. */
10007 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10010 rtx if_false_label
, if_true_label
;
10012 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10013 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10014 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10015 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10016 rtx drop_through_label
= 0;
10017 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10020 if (! if_true_label
|| ! if_false_label
)
10021 drop_through_label
= gen_label_rtx ();
10022 if (! if_true_label
)
10023 if_true_label
= drop_through_label
;
10024 if (! if_false_label
)
10025 if_false_label
= drop_through_label
;
10027 /* Compare a word at a time, high order first. */
10028 for (i
= 0; i
< nwords
; i
++)
10031 rtx op0_word
, op1_word
;
10033 if (WORDS_BIG_ENDIAN
)
10035 op0_word
= operand_subword_force (op0
, i
, mode
);
10036 op1_word
= operand_subword_force (op1
, i
, mode
);
10040 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10041 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10044 /* All but high-order word must be compared as unsigned. */
10045 comp
= compare_from_rtx (op0_word
, op1_word
,
10046 (unsignedp
|| i
> 0) ? GTU
: GT
,
10047 unsignedp
, word_mode
, NULL_RTX
, 0);
10048 if (comp
== const_true_rtx
)
10049 emit_jump (if_true_label
);
10050 else if (comp
!= const0_rtx
)
10051 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10053 /* Consider lower words only if these are equal. */
10054 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10056 if (comp
== const_true_rtx
)
10057 emit_jump (if_false_label
);
10058 else if (comp
!= const0_rtx
)
10059 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10062 if (if_false_label
)
10063 emit_jump (if_false_label
);
10064 if (drop_through_label
)
10065 emit_label (drop_through_label
);
10068 /* Compare OP0 with OP1, word at a time, in mode MODE.
10069 UNSIGNEDP says to do unsigned comparison.
10070 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10073 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10074 enum machine_mode mode
;
10077 rtx if_false_label
, if_true_label
;
10079 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10080 rtx drop_through_label
= 0;
10083 if (! if_true_label
|| ! if_false_label
)
10084 drop_through_label
= gen_label_rtx ();
10085 if (! if_true_label
)
10086 if_true_label
= drop_through_label
;
10087 if (! if_false_label
)
10088 if_false_label
= drop_through_label
;
10090 /* Compare a word at a time, high order first. */
10091 for (i
= 0; i
< nwords
; i
++)
10094 rtx op0_word
, op1_word
;
10096 if (WORDS_BIG_ENDIAN
)
10098 op0_word
= operand_subword_force (op0
, i
, mode
);
10099 op1_word
= operand_subword_force (op1
, i
, mode
);
10103 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10104 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10107 /* All but high-order word must be compared as unsigned. */
10108 comp
= compare_from_rtx (op0_word
, op1_word
,
10109 (unsignedp
|| i
> 0) ? GTU
: GT
,
10110 unsignedp
, word_mode
, NULL_RTX
, 0);
10111 if (comp
== const_true_rtx
)
10112 emit_jump (if_true_label
);
10113 else if (comp
!= const0_rtx
)
10114 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10116 /* Consider lower words only if these are equal. */
10117 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10119 if (comp
== const_true_rtx
)
10120 emit_jump (if_false_label
);
10121 else if (comp
!= const0_rtx
)
10122 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10125 if (if_false_label
)
10126 emit_jump (if_false_label
);
10127 if (drop_through_label
)
10128 emit_label (drop_through_label
);
10131 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10132 with one insn, test the comparison and jump to the appropriate label. */
10135 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10137 rtx if_false_label
, if_true_label
;
10139 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10140 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10141 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10142 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10144 rtx drop_through_label
= 0;
10146 if (! if_false_label
)
10147 drop_through_label
= if_false_label
= gen_label_rtx ();
10149 for (i
= 0; i
< nwords
; i
++)
10151 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10152 operand_subword_force (op1
, i
, mode
),
10153 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10154 word_mode
, NULL_RTX
, 0);
10155 if (comp
== const_true_rtx
)
10156 emit_jump (if_false_label
);
10157 else if (comp
!= const0_rtx
)
10158 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10162 emit_jump (if_true_label
);
10163 if (drop_through_label
)
10164 emit_label (drop_through_label
);
10167 /* Jump according to whether OP0 is 0.
10168 We assume that OP0 has an integer mode that is too wide
10169 for the available compare insns. */
10172 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10174 rtx if_false_label
, if_true_label
;
10176 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10178 rtx drop_through_label
= 0;
10180 if (! if_false_label
)
10181 drop_through_label
= if_false_label
= gen_label_rtx ();
10183 for (i
= 0; i
< nwords
; i
++)
10185 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10187 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10188 if (comp
== const_true_rtx
)
10189 emit_jump (if_false_label
);
10190 else if (comp
!= const0_rtx
)
10191 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10195 emit_jump (if_true_label
);
10196 if (drop_through_label
)
10197 emit_label (drop_through_label
);
10200 /* Given a comparison expression in rtl form, output conditional branches to
10201 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10204 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10205 rtx comparison
, if_false_label
, if_true_label
;
10209 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10210 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10214 if (if_false_label
)
10215 emit_jump (if_false_label
);
10217 else if (if_false_label
)
10220 rtx prev
= get_last_insn ();
10223 /* Output the branch with the opposite condition. Then try to invert
10224 what is generated. If more than one insn is a branch, or if the
10225 branch is not the last insn written, abort. If we can't invert
10226 the branch, emit make a true label, redirect this jump to that,
10227 emit a jump to the false label and define the true label. */
10229 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10230 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10234 /* Here we get the first insn that was just emitted. It used to be the
10235 case that, on some machines, emitting the branch would discard
10236 the previous compare insn and emit a replacement. This isn't
10237 done anymore, but abort if we see that PREV is deleted. */
10240 insn
= get_insns ();
10241 else if (INSN_DELETED_P (prev
))
10244 insn
= NEXT_INSN (prev
);
10246 for (; insn
; insn
= NEXT_INSN (insn
))
10247 if (GET_CODE (insn
) == JUMP_INSN
)
10254 if (branch
!= get_last_insn ())
10257 JUMP_LABEL (branch
) = if_false_label
;
10258 if (! invert_jump (branch
, if_false_label
))
10260 if_true_label
= gen_label_rtx ();
10261 redirect_jump (branch
, if_true_label
);
10262 emit_jump (if_false_label
);
10263 emit_label (if_true_label
);
10268 /* Generate code for a comparison expression EXP
10269 (including code to compute the values to be compared)
10270 and set (CC0) according to the result.
10271 SIGNED_CODE should be the rtx operation for this comparison for
10272 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10274 We force a stack adjustment unless there are currently
10275 things pushed on the stack that aren't yet used. */
10278 compare (exp
, signed_code
, unsigned_code
)
10280 enum rtx_code signed_code
, unsigned_code
;
10283 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10285 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10286 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10287 register enum machine_mode mode
= TYPE_MODE (type
);
10288 int unsignedp
= TREE_UNSIGNED (type
);
10289 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10291 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10293 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10294 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10297 /* Like compare but expects the values to compare as two rtx's.
10298 The decision as to signed or unsigned comparison must be made by the caller.
10300 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10303 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10304 size of MODE should be used. */
10307 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10308 register rtx op0
, op1
;
10309 enum rtx_code code
;
10311 enum machine_mode mode
;
10317 /* If one operand is constant, make it the second one. Only do this
10318 if the other operand is not constant as well. */
10320 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10321 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10326 code
= swap_condition (code
);
10329 if (flag_force_mem
)
10331 op0
= force_not_mem (op0
);
10332 op1
= force_not_mem (op1
);
10335 do_pending_stack_adjust ();
10337 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10338 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10342 /* There's no need to do this now that combine.c can eliminate lots of
10343 sign extensions. This can be less efficient in certain cases on other
10346 /* If this is a signed equality comparison, we can do it as an
10347 unsigned comparison since zero-extension is cheaper than sign
10348 extension and comparisons with zero are done as unsigned. This is
10349 the case even on machines that can do fast sign extension, since
10350 zero-extension is easier to combine with other operations than
10351 sign-extension is. If we are comparing against a constant, we must
10352 convert it to what it would look like unsigned. */
10353 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10354 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10356 if (GET_CODE (op1
) == CONST_INT
10357 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10358 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10363 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10365 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10368 /* Generate code to calculate EXP using a store-flag instruction
10369 and return an rtx for the result. EXP is either a comparison
10370 or a TRUTH_NOT_EXPR whose operand is a comparison.
10372 If TARGET is nonzero, store the result there if convenient.
10374 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10377 Return zero if there is no suitable set-flag instruction
10378 available on this machine.
10380 Once expand_expr has been called on the arguments of the comparison,
10381 we are committed to doing the store flag, since it is not safe to
10382 re-evaluate the expression. We emit the store-flag insn by calling
10383 emit_store_flag, but only expand the arguments if we have a reason
10384 to believe that emit_store_flag will be successful. If we think that
10385 it will, but it isn't, we have to simulate the store-flag with a
10386 set/jump/set sequence. */
10389 do_store_flag (exp
, target
, mode
, only_cheap
)
10392 enum machine_mode mode
;
10395 enum rtx_code code
;
10396 tree arg0
, arg1
, type
;
10398 enum machine_mode operand_mode
;
10402 enum insn_code icode
;
10403 rtx subtarget
= target
;
10404 rtx result
, label
, pattern
, jump_pat
;
10406 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10407 result at the end. We can't simply invert the test since it would
10408 have already been inverted if it were valid. This case occurs for
10409 some floating-point comparisons. */
10411 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10412 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10414 arg0
= TREE_OPERAND (exp
, 0);
10415 arg1
= TREE_OPERAND (exp
, 1);
10416 type
= TREE_TYPE (arg0
);
10417 operand_mode
= TYPE_MODE (type
);
10418 unsignedp
= TREE_UNSIGNED (type
);
10420 /* We won't bother with BLKmode store-flag operations because it would mean
10421 passing a lot of information to emit_store_flag. */
10422 if (operand_mode
== BLKmode
)
10428 /* Get the rtx comparison code to use. We know that EXP is a comparison
10429 operation of some type. Some comparisons against 1 and -1 can be
10430 converted to comparisons with zero. Do so here so that the tests
10431 below will be aware that we have a comparison with zero. These
10432 tests will not catch constants in the first operand, but constants
10433 are rarely passed as the first operand. */
10435 switch (TREE_CODE (exp
))
10444 if (integer_onep (arg1
))
10445 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10447 code
= unsignedp
? LTU
: LT
;
10450 if (! unsignedp
&& integer_all_onesp (arg1
))
10451 arg1
= integer_zero_node
, code
= LT
;
10453 code
= unsignedp
? LEU
: LE
;
10456 if (! unsignedp
&& integer_all_onesp (arg1
))
10457 arg1
= integer_zero_node
, code
= GE
;
10459 code
= unsignedp
? GTU
: GT
;
10462 if (integer_onep (arg1
))
10463 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10465 code
= unsignedp
? GEU
: GE
;
10471 /* Put a constant second. */
10472 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10474 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10475 code
= swap_condition (code
);
10478 /* If this is an equality or inequality test of a single bit, we can
10479 do this by shifting the bit being tested to the low-order bit and
10480 masking the result with the constant 1. If the condition was EQ,
10481 we xor it with 1. This does not require an scc insn and is faster
10482 than an scc insn even if we have it. */
10484 if ((code
== NE
|| code
== EQ
)
10485 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10486 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10487 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
10489 tree inner
= TREE_OPERAND (arg0
, 0);
10490 int bitnum
= exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
10491 NULL_RTX
, VOIDmode
, 0)));
10494 /* If INNER is a right shift of a constant and it plus BITNUM does
10495 not overflow, adjust BITNUM and INNER. */
10497 if (TREE_CODE (inner
) == RSHIFT_EXPR
10498 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10499 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10500 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
10501 < TYPE_PRECISION (type
)))
10503 bitnum
+=TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10504 inner
= TREE_OPERAND (inner
, 0);
10507 /* If we are going to be able to omit the AND below, we must do our
10508 operations as unsigned. If we must use the AND, we have a choice.
10509 Normally unsigned is faster, but for some machines signed is. */
10510 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10511 #ifdef LOAD_EXTEND_OP
10512 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10518 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10519 || GET_MODE (subtarget
) != operand_mode
10520 || ! safe_from_p (subtarget
, inner
))
10523 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10526 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10527 size_int (bitnum
), subtarget
, ops_unsignedp
);
10529 if (GET_MODE (op0
) != mode
)
10530 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10532 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10533 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10534 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10536 /* Put the AND last so it can combine with more things. */
10537 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10538 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10543 /* Now see if we are likely to be able to do this. Return if not. */
10544 if (! can_compare_p (operand_mode
))
10546 icode
= setcc_gen_code
[(int) code
];
10547 if (icode
== CODE_FOR_nothing
10548 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
10550 /* We can only do this if it is one of the special cases that
10551 can be handled without an scc insn. */
10552 if ((code
== LT
&& integer_zerop (arg1
))
10553 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10555 else if (BRANCH_COST
>= 0
10556 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10557 && TREE_CODE (type
) != REAL_TYPE
10558 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10559 != CODE_FOR_nothing
)
10560 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10561 != CODE_FOR_nothing
)))
10567 preexpand_calls (exp
);
10568 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10569 || GET_MODE (subtarget
) != operand_mode
10570 || ! safe_from_p (subtarget
, arg1
))
10573 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10574 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10577 target
= gen_reg_rtx (mode
);
10579 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10580 because, if the emit_store_flag does anything it will succeed and
10581 OP0 and OP1 will not be used subsequently. */
10583 result
= emit_store_flag (target
, code
,
10584 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10585 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10586 operand_mode
, unsignedp
, 1);
10591 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10592 result
, 0, OPTAB_LIB_WIDEN
);
10596 /* If this failed, we have to do this with set/compare/jump/set code. */
10597 if (target
== 0 || GET_CODE (target
) != REG
10598 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10599 target
= gen_reg_rtx (GET_MODE (target
));
10601 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10602 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10603 operand_mode
, NULL_RTX
, 0);
10604 if (GET_CODE (result
) == CONST_INT
)
10605 return (((result
== const0_rtx
&& ! invert
)
10606 || (result
!= const0_rtx
&& invert
))
10607 ? const0_rtx
: const1_rtx
);
10609 label
= gen_label_rtx ();
10610 if (bcc_gen_fctn
[(int) code
] == 0)
10613 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10614 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10615 emit_label (label
);
10620 /* Generate a tablejump instruction (used for switch statements). */
10622 #ifdef HAVE_tablejump
10624 /* INDEX is the value being switched on, with the lowest value
10625 in the table already subtracted.
10626 MODE is its expected mode (needed if INDEX is constant).
10627 RANGE is the length of the jump table.
10628 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10630 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10631 index value is out of range. */
10634 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10635 rtx index
, range
, table_label
, default_label
;
10636 enum machine_mode mode
;
10638 register rtx temp
, vector
;
10640 /* Do an unsigned comparison (in the proper mode) between the index
10641 expression and the value which represents the length of the range.
10642 Since we just finished subtracting the lower bound of the range
10643 from the index expression, this comparison allows us to simultaneously
10644 check that the original index expression value is both greater than
10645 or equal to the minimum value of the range and less than or equal to
10646 the maximum value of the range. */
10648 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
10649 emit_jump_insn (gen_bgtu (default_label
));
10651 /* If index is in range, it must fit in Pmode.
10652 Convert to Pmode so we can index with it. */
10654 index
= convert_to_mode (Pmode
, index
, 1);
10656 /* Don't let a MEM slip thru, because then INDEX that comes
10657 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10658 and break_out_memory_refs will go to work on it and mess it up. */
10659 #ifdef PIC_CASE_VECTOR_ADDRESS
10660 if (flag_pic
&& GET_CODE (index
) != REG
)
10661 index
= copy_to_mode_reg (Pmode
, index
);
10664 /* If flag_force_addr were to affect this address
10665 it could interfere with the tricky assumptions made
10666 about addresses that contain label-refs,
10667 which may be valid only very near the tablejump itself. */
10668 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10669 GET_MODE_SIZE, because this indicates how large insns are. The other
10670 uses should all be Pmode, because they are addresses. This code
10671 could fail if addresses and insns are not the same size. */
10672 index
= gen_rtx (PLUS
, Pmode
,
10673 gen_rtx (MULT
, Pmode
, index
,
10674 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10675 gen_rtx (LABEL_REF
, Pmode
, table_label
));
10676 #ifdef PIC_CASE_VECTOR_ADDRESS
10678 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10681 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10682 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10683 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
10684 RTX_UNCHANGING_P (vector
) = 1;
10685 convert_move (temp
, vector
, 0);
10687 emit_jump_insn (gen_tablejump (temp
, table_label
));
10689 #ifndef CASE_VECTOR_PC_RELATIVE
10690 /* If we are generating PIC code or if the table is PC-relative, the
10691 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10697 #endif /* HAVE_tablejump */
10700 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10701 to that value is on the top of the stack. The resulting type is TYPE, and
10702 the source declaration is DECL. */
10705 bc_load_memory (type
, decl
)
10708 enum bytecode_opcode opcode
;
10711 /* Bit fields are special. We only know about signed and
10712 unsigned ints, and enums. The latter are treated as
10713 signed integers. */
10715 if (DECL_BIT_FIELD (decl
))
10716 if (TREE_CODE (type
) == ENUMERAL_TYPE
10717 || TREE_CODE (type
) == INTEGER_TYPE
)
10718 opcode
= TREE_UNSIGNED (type
) ? zxloadBI
: sxloadBI
;
10722 /* See corresponding comment in bc_store_memory(). */
10723 if (TYPE_MODE (type
) == BLKmode
10724 || TYPE_MODE (type
) == VOIDmode
)
10727 opcode
= mode_to_load_map
[(int) TYPE_MODE (type
)];
10729 if (opcode
== neverneverland
)
10732 bc_emit_bytecode (opcode
);
10734 #ifdef DEBUG_PRINT_CODE
10735 fputc ('\n', stderr
);
10740 /* Store the contents of the second stack slot to the address in the
10741 top stack slot. DECL is the declaration of the destination and is used
10742 to determine whether we're dealing with a bitfield. */
10745 bc_store_memory (type
, decl
)
10748 enum bytecode_opcode opcode
;
10751 if (DECL_BIT_FIELD (decl
))
10753 if (TREE_CODE (type
) == ENUMERAL_TYPE
10754 || TREE_CODE (type
) == INTEGER_TYPE
)
10760 if (TYPE_MODE (type
) == BLKmode
)
10762 /* Copy structure. This expands to a block copy instruction, storeBLK.
10763 In addition to the arguments expected by the other store instructions,
10764 it also expects a type size (SImode) on top of the stack, which is the
10765 structure size in size units (usually bytes). The two first arguments
10766 are already on the stack; so we just put the size on level 1. For some
10767 other languages, the size may be variable, this is why we don't encode
10768 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10770 bc_expand_expr (TYPE_SIZE (type
));
10774 opcode
= mode_to_store_map
[(int) TYPE_MODE (type
)];
10776 if (opcode
== neverneverland
)
10779 bc_emit_bytecode (opcode
);
10781 #ifdef DEBUG_PRINT_CODE
10782 fputc ('\n', stderr
);
10787 /* Allocate local stack space sufficient to hold a value of the given
10788 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10789 integral power of 2. A special case is locals of type VOID, which
10790 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10791 remapped into the corresponding attribute of SI. */
10794 bc_allocate_local (size
, alignment
)
10795 int size
, alignment
;
10798 int byte_alignment
;
10803 /* Normalize size and alignment */
10805 size
= UNITS_PER_WORD
;
10807 if (alignment
< BITS_PER_UNIT
)
10808 byte_alignment
= 1 << (INT_ALIGN
- 1);
10811 byte_alignment
= alignment
/ BITS_PER_UNIT
;
10813 if (local_vars_size
& (byte_alignment
- 1))
10814 local_vars_size
+= byte_alignment
- (local_vars_size
& (byte_alignment
- 1));
10816 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
10817 local_vars_size
+= size
;
10823 /* Allocate variable-sized local array. Variable-sized arrays are
10824 actually pointers to the address in memory where they are stored. */
10827 bc_allocate_variable_array (size
)
10831 const int ptralign
= (1 << (PTR_ALIGN
- 1));
10833 /* Align pointer */
10834 if (local_vars_size
& ptralign
)
10835 local_vars_size
+= ptralign
- (local_vars_size
& ptralign
);
10837 /* Note down local space needed: pointer to block; also return
10840 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
10841 local_vars_size
+= POINTER_SIZE
/ BITS_PER_UNIT
;
10846 /* Push the machine address for the given external variable offset. */
10848 bc_load_externaddr (externaddr
)
10851 bc_emit_bytecode (constP
);
10852 bc_emit_code_labelref (BYTECODE_LABEL (externaddr
),
10853 BYTECODE_BC_LABEL (externaddr
)->offset
);
10855 #ifdef DEBUG_PRINT_CODE
10856 fputc ('\n', stderr
);
10865 char *new = (char *) xmalloc ((strlen (s
) + 1) * sizeof *s
);
10871 /* Like above, but expects an IDENTIFIER. */
10873 bc_load_externaddr_id (id
, offset
)
10877 if (!IDENTIFIER_POINTER (id
))
10880 bc_emit_bytecode (constP
);
10881 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id
)), offset
);
10883 #ifdef DEBUG_PRINT_CODE
10884 fputc ('\n', stderr
);
10889 /* Push the machine address for the given local variable offset. */
10891 bc_load_localaddr (localaddr
)
10894 bc_emit_instruction (localP
, (HOST_WIDE_INT
) BYTECODE_BC_LABEL (localaddr
)->offset
);
10898 /* Push the machine address for the given parameter offset.
10899 NOTE: offset is in bits. */
10901 bc_load_parmaddr (parmaddr
)
10904 bc_emit_instruction (argP
, ((HOST_WIDE_INT
) BYTECODE_BC_LABEL (parmaddr
)->offset
10909 /* Convert a[i] into *(a + i). */
10911 bc_canonicalize_array_ref (exp
)
10914 tree type
= TREE_TYPE (exp
);
10915 tree array_adr
= build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
),
10916 TREE_OPERAND (exp
, 0));
10917 tree index
= TREE_OPERAND (exp
, 1);
10920 /* Convert the integer argument to a type the same size as a pointer
10921 so the multiply won't overflow spuriously. */
10923 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
10924 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
10926 /* The array address isn't volatile even if the array is.
10927 (Of course this isn't terribly relevant since the bytecode
10928 translator treats nearly everything as volatile anyway.) */
10929 TREE_THIS_VOLATILE (array_adr
) = 0;
10931 return build1 (INDIRECT_REF
, type
,
10932 fold (build (PLUS_EXPR
,
10933 TYPE_POINTER_TO (type
),
10935 fold (build (MULT_EXPR
,
10936 TYPE_POINTER_TO (type
),
10938 size_in_bytes (type
))))));
10942 /* Load the address of the component referenced by the given
10943 COMPONENT_REF expression.
10945 Returns innermost lvalue. */
10948 bc_expand_component_address (exp
)
10952 enum machine_mode mode
;
10954 HOST_WIDE_INT SIval
;
10957 tem
= TREE_OPERAND (exp
, 1);
10958 mode
= DECL_MODE (tem
);
10961 /* Compute cumulative bit offset for nested component refs
10962 and array refs, and find the ultimate containing object. */
10964 for (tem
= exp
;; tem
= TREE_OPERAND (tem
, 0))
10966 if (TREE_CODE (tem
) == COMPONENT_REF
)
10967 bitpos
+= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem
, 1)));
10969 if (TREE_CODE (tem
) == ARRAY_REF
10970 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
10971 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
))) == INTEGER_CST
)
10973 bitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1))
10974 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)))
10975 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10980 bc_expand_expr (tem
);
10983 /* For bitfields also push their offset and size */
10984 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
10985 bc_push_offset_and_size (bitpos
, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp
, 1)));
10987 if (SIval
= bitpos
/ BITS_PER_UNIT
)
10988 bc_emit_instruction (addconstPSI
, SIval
);
10990 return (TREE_OPERAND (exp
, 1));
10994 /* Emit code to push two SI constants */
10996 bc_push_offset_and_size (offset
, size
)
10997 HOST_WIDE_INT offset
, size
;
10999 bc_emit_instruction (constSI
, offset
);
11000 bc_emit_instruction (constSI
, size
);
11004 /* Emit byte code to push the address of the given lvalue expression to
11005 the stack. If it's a bit field, we also push offset and size info.
11007 Returns innermost component, which allows us to determine not only
11008 its type, but also whether it's a bitfield. */
11011 bc_expand_address (exp
)
11015 if (!exp
|| TREE_CODE (exp
) == ERROR_MARK
)
11019 switch (TREE_CODE (exp
))
11023 return (bc_expand_address (bc_canonicalize_array_ref (exp
)));
11025 case COMPONENT_REF
:
11027 return (bc_expand_component_address (exp
));
11031 bc_expand_expr (TREE_OPERAND (exp
, 0));
11033 /* For variable-sized types: retrieve pointer. Sometimes the
11034 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11035 also make sure we have an operand, just in case... */
11037 if (TREE_OPERAND (exp
, 0)
11038 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
11039 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))) != INTEGER_CST
)
11040 bc_emit_instruction (loadP
);
11042 /* If packed, also return offset and size */
11043 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 0)))
11045 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 0))),
11046 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp
, 0))));
11048 return (TREE_OPERAND (exp
, 0));
11050 case FUNCTION_DECL
:
11052 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11053 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
11058 bc_load_parmaddr (DECL_RTL (exp
));
11060 /* For variable-sized types: retrieve pointer */
11061 if (TYPE_SIZE (TREE_TYPE (exp
))
11062 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11063 bc_emit_instruction (loadP
);
11065 /* If packed, also return offset and size */
11066 if (DECL_BIT_FIELD (exp
))
11067 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11068 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11074 bc_emit_instruction (returnP
);
11080 if (BYTECODE_LABEL (DECL_RTL (exp
)))
11081 bc_load_externaddr (DECL_RTL (exp
));
11084 if (DECL_EXTERNAL (exp
))
11085 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11086 (BYTECODE_BC_LABEL (DECL_RTL (exp
)))->offset
);
11088 bc_load_localaddr (DECL_RTL (exp
));
11090 /* For variable-sized types: retrieve pointer */
11091 if (TYPE_SIZE (TREE_TYPE (exp
))
11092 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11093 bc_emit_instruction (loadP
);
11095 /* If packed, also return offset and size */
11096 if (DECL_BIT_FIELD (exp
))
11097 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11098 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11106 bc_emit_bytecode (constP
);
11107 r
= output_constant_def (exp
);
11108 bc_emit_code_labelref (BYTECODE_LABEL (r
), BYTECODE_BC_LABEL (r
)->offset
);
11110 #ifdef DEBUG_PRINT_CODE
11111 fputc ('\n', stderr
);
11122 /* Most lvalues don't have components. */
11127 /* Emit a type code to be used by the runtime support in handling
11128 parameter passing. The type code consists of the machine mode
11129 plus the minimal alignment shifted left 8 bits. */
11132 bc_runtime_type_code (type
)
11137 switch (TREE_CODE (type
))
11143 case ENUMERAL_TYPE
:
11147 val
= (int) TYPE_MODE (type
) | TYPE_ALIGN (type
) << 8;
11159 return build_int_2 (val
, 0);
11163 /* Generate constructor label */
11165 bc_gen_constr_label ()
11167 static int label_counter
;
11168 static char label
[20];
11170 sprintf (label
, "*LR%d", label_counter
++);
11172 return (obstack_copy0 (&permanent_obstack
, label
, strlen (label
)));
11176 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11177 expand the constructor data as static data, and push a pointer to it.
11178 The pointer is put in the pointer table and is retrieved by a constP
11179 bytecode instruction. We then loop and store each constructor member in
11180 the corresponding component. Finally, we return the original pointer on
11184 bc_expand_constructor (constr
)
11188 HOST_WIDE_INT ptroffs
;
11192 /* Literal constructors are handled as constants, whereas
11193 non-literals are evaluated and stored element by element
11194 into the data segment. */
11196 /* Allocate space in proper segment and push pointer to space on stack.
11199 l
= bc_gen_constr_label ();
11201 if (TREE_CONSTANT (constr
))
11205 bc_emit_const_labeldef (l
);
11206 bc_output_constructor (constr
, int_size_in_bytes (TREE_TYPE (constr
)));
11212 bc_emit_data_labeldef (l
);
11213 bc_output_data_constructor (constr
);
11217 /* Add reference to pointer table and recall pointer to stack;
11218 this code is common for both types of constructors: literals
11219 and non-literals. */
11221 ptroffs
= bc_define_pointer (l
);
11222 bc_emit_instruction (constP
, ptroffs
);
11224 /* This is all that has to be done if it's a literal. */
11225 if (TREE_CONSTANT (constr
))
11229 /* At this point, we have the pointer to the structure on top of the stack.
11230 Generate sequences of store_memory calls for the constructor. */
11232 /* constructor type is structure */
11233 if (TREE_CODE (TREE_TYPE (constr
)) == RECORD_TYPE
)
11237 /* If the constructor has fewer fields than the structure,
11238 clear the whole structure first. */
11240 if (list_length (CONSTRUCTOR_ELTS (constr
))
11241 != list_length (TYPE_FIELDS (TREE_TYPE (constr
))))
11243 bc_emit_instruction (duplicate
);
11244 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11245 bc_emit_instruction (clearBLK
);
11248 /* Store each element of the constructor into the corresponding
11249 field of TARGET. */
11251 for (elt
= CONSTRUCTOR_ELTS (constr
); elt
; elt
= TREE_CHAIN (elt
))
11253 register tree field
= TREE_PURPOSE (elt
);
11254 register enum machine_mode mode
;
11259 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
)) /* * DECL_SIZE_UNIT (field) */;
11260 mode
= DECL_MODE (field
);
11261 unsignedp
= TREE_UNSIGNED (field
);
11263 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
11265 bc_store_field (elt
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11266 /* The alignment of TARGET is
11267 at least what its type requires. */
11269 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11270 int_size_in_bytes (TREE_TYPE (constr
)));
11275 /* Constructor type is array */
11276 if (TREE_CODE (TREE_TYPE (constr
)) == ARRAY_TYPE
)
11280 tree domain
= TYPE_DOMAIN (TREE_TYPE (constr
));
11281 int minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
11282 int maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
11283 tree elttype
= TREE_TYPE (TREE_TYPE (constr
));
11285 /* If the constructor has fewer fields than the structure,
11286 clear the whole structure first. */
11288 if (list_length (CONSTRUCTOR_ELTS (constr
)) < maxelt
- minelt
+ 1)
11290 bc_emit_instruction (duplicate
);
11291 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11292 bc_emit_instruction (clearBLK
);
11296 /* Store each element of the constructor into the corresponding
11297 element of TARGET, determined by counting the elements. */
11299 for (elt
= CONSTRUCTOR_ELTS (constr
), i
= 0;
11301 elt
= TREE_CHAIN (elt
), i
++)
11303 register enum machine_mode mode
;
11308 mode
= TYPE_MODE (elttype
);
11309 bitsize
= GET_MODE_BITSIZE (mode
);
11310 unsignedp
= TREE_UNSIGNED (elttype
);
11312 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
))
11313 /* * TYPE_SIZE_UNIT (elttype) */ );
11315 bc_store_field (elt
, bitsize
, bitpos
, mode
,
11316 TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11317 /* The alignment of TARGET is
11318 at least what its type requires. */
11320 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11321 int_size_in_bytes (TREE_TYPE (constr
)));
11328 /* Store the value of EXP (an expression tree) into member FIELD of
11329 structure at address on stack, which has type TYPE, mode MODE and
11330 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11333 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11334 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11337 bc_store_field (field
, bitsize
, bitpos
, mode
, exp
, type
,
11338 value_mode
, unsignedp
, align
, total_size
)
11339 int bitsize
, bitpos
;
11340 enum machine_mode mode
;
11341 tree field
, exp
, type
;
11342 enum machine_mode value_mode
;
11348 /* Expand expression and copy pointer */
11349 bc_expand_expr (exp
);
11350 bc_emit_instruction (over
);
11353 /* If the component is a bit field, we cannot use addressing to access
11354 it. Use bit-field techniques to store in it. */
11356 if (DECL_BIT_FIELD (field
))
11358 bc_store_bit_field (bitpos
, bitsize
, unsignedp
);
11362 /* Not bit field */
11364 HOST_WIDE_INT offset
= bitpos
/ BITS_PER_UNIT
;
11366 /* Advance pointer to the desired member */
11368 bc_emit_instruction (addconstPSI
, offset
);
11371 bc_store_memory (type
, field
);
11376 /* Store SI/SU in bitfield */
11378 bc_store_bit_field (offset
, size
, unsignedp
)
11379 int offset
, size
, unsignedp
;
11381 /* Push bitfield offset and size */
11382 bc_push_offset_and_size (offset
, size
);
11385 bc_emit_instruction (sstoreBI
);
11389 /* Load SI/SU from bitfield */
11391 bc_load_bit_field (offset
, size
, unsignedp
)
11392 int offset
, size
, unsignedp
;
11394 /* Push bitfield offset and size */
11395 bc_push_offset_and_size (offset
, size
);
11397 /* Load: sign-extend if signed, else zero-extend */
11398 bc_emit_instruction (unsignedp
? zxloadBI
: sxloadBI
);
11402 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11403 (adjust stack pointer upwards), negative means add that number of
11404 levels (adjust the stack pointer downwards). Only positive values
11405 normally make sense. */
11408 bc_adjust_stack (nlevels
)
11417 bc_emit_instruction (drop
);
11420 bc_emit_instruction (drop
);
11425 bc_emit_instruction (adjstackSI
, (HOST_WIDE_INT
) nlevels
);
11426 stack_depth
-= nlevels
;
11429 #if defined (VALIDATE_STACK_FOR_BC)
11430 VALIDATE_STACK_FOR_BC ();