1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "insn-flags.h"
31 #include "insn-codes.h"
33 #include "insn-config.h"
36 #include "typeclass.h"
39 #include "bc-opcode.h"
40 #include "bc-typecd.h"
45 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
70 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72 /* If this is nonzero, we do not bother generating VOLATILE
73 around volatile memory references, and we are willing to
74 output indirect addresses. If cse is to follow, we reject
75 indirect addresses so a useful potential cse is generated;
76 if it is used only once, instruction combination will produce
77 the same indirect address eventually. */
80 /* Nonzero to generate code for all the subroutines within an
81 expression before generating the upper levels of the expression.
82 Nowadays this is never zero. */
83 int do_preexpand_calls
= 1;
85 /* Number of units that we should eventually pop off the stack.
86 These are the arguments to function calls that have already returned. */
87 int pending_stack_adjust
;
89 /* Nonzero means stack pops must not be deferred, and deferred stack
90 pops must not be output. It is nonzero inside a function call,
91 inside a conditional expression, inside a statement expression,
92 and in other cases as well. */
93 int inhibit_defer_pop
;
95 /* A list of all cleanups which belong to the arguments of
96 function calls being expanded by expand_call. */
97 tree cleanups_this_call
;
99 /* When temporaries are created by TARGET_EXPRs, they are created at
100 this level of temp_slot_level, so that they can remain allocated
101 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 int target_temp_slot_level
;
105 /* Nonzero means __builtin_saveregs has already been done in this function.
106 The value is the pseudoreg containing the value __builtin_saveregs
108 static rtx saveregs_value
;
110 /* Similarly for __builtin_apply_args. */
111 static rtx apply_args_value
;
113 /* This structure is used by move_by_pieces to describe the move to
116 struct move_by_pieces
126 int explicit_inc_from
;
133 /* Used to generate bytecodes: keep track of size of local variables,
134 as well as depth of arithmetic stack. (Notice that variables are
135 stored on the machine's stack, not the arithmetic stack.) */
137 extern int local_vars_size
;
138 extern int stack_depth
;
139 extern int max_stack_depth
;
140 extern struct obstack permanent_obstack
;
143 static rtx enqueue_insn
PROTO((rtx
, rtx
));
144 static int queued_subexp_p
PROTO((rtx
));
145 static void init_queue
PROTO((void));
146 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
147 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
148 static void move_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
149 struct move_by_pieces
*));
150 static void store_constructor
PROTO((tree
, rtx
));
151 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
152 enum machine_mode
, int, int, int));
153 static int get_inner_unaligned_p
PROTO((tree
));
154 static tree save_noncopied_parts
PROTO((tree
, tree
));
155 static tree init_noncopied_parts
PROTO((tree
, tree
));
156 static int safe_from_p
PROTO((rtx
, tree
));
157 static int fixed_type_p
PROTO((tree
));
158 static int get_pointer_alignment
PROTO((tree
, unsigned));
159 static tree string_constant
PROTO((tree
, tree
*));
160 static tree c_strlen
PROTO((tree
));
161 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
162 enum machine_mode
, int));
163 static int apply_args_size
PROTO((void));
164 static int apply_result_size
PROTO((void));
165 static rtx result_vector
PROTO((int, rtx
));
166 static rtx expand_builtin_apply_args
PROTO((void));
167 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
168 static void expand_builtin_return
PROTO((rtx
));
169 static rtx expand_increment
PROTO((tree
, int));
170 rtx bc_expand_increment
PROTO((struct increment_operator
*, tree
));
171 tree bc_runtime_type_code
PROTO((tree
));
172 rtx bc_allocate_local
PROTO((int, int));
173 void bc_store_memory
PROTO((tree
, tree
));
174 tree bc_expand_component_address
PROTO((tree
));
175 tree bc_expand_address
PROTO((tree
));
176 void bc_expand_constructor
PROTO((tree
));
177 void bc_adjust_stack
PROTO((int));
178 tree bc_canonicalize_array_ref
PROTO((tree
));
179 void bc_load_memory
PROTO((tree
, tree
));
180 void bc_load_externaddr
PROTO((rtx
));
181 void bc_load_externaddr_id
PROTO((tree
, int));
182 void bc_load_localaddr
PROTO((rtx
));
183 void bc_load_parmaddr
PROTO((rtx
));
184 static void preexpand_calls
PROTO((tree
));
185 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
186 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
187 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
188 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
189 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
190 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
191 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
192 static tree defer_cleanups_to
PROTO((tree
));
193 extern void (*interim_eh_hook
) PROTO((tree
));
194 extern tree truthvalue_conversion
PROTO((tree
));
196 /* Record for each mode whether we can move a register directly to or
197 from an object of that mode in memory. If we can't, we won't try
198 to use that mode directly when accessing a field of that mode. */
200 static char direct_load
[NUM_MACHINE_MODES
];
201 static char direct_store
[NUM_MACHINE_MODES
];
203 /* MOVE_RATIO is the number of move instructions that is better than
207 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
210 /* A value of around 6 would minimize code size; infinity would minimize
212 #define MOVE_RATIO 15
216 /* This array records the insn_code of insns to perform block moves. */
217 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
219 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
221 #ifndef SLOW_UNALIGNED_ACCESS
222 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
225 /* Register mappings for target machines without register windows. */
226 #ifndef INCOMING_REGNO
227 #define INCOMING_REGNO(OUT) (OUT)
229 #ifndef OUTGOING_REGNO
230 #define OUTGOING_REGNO(IN) (IN)
233 /* Maps used to convert modes to const, load, and store bytecodes. */
234 enum bytecode_opcode mode_to_const_map
[MAX_MACHINE_MODE
];
235 enum bytecode_opcode mode_to_load_map
[MAX_MACHINE_MODE
];
236 enum bytecode_opcode mode_to_store_map
[MAX_MACHINE_MODE
];
238 /* Initialize maps used to convert modes to const, load, and store
241 bc_init_mode_to_opcode_maps ()
245 for (mode
= 0; mode
< (int) MAX_MACHINE_MODE
; mode
++)
246 mode_to_const_map
[mode
] =
247 mode_to_load_map
[mode
] =
248 mode_to_store_map
[mode
] = neverneverland
;
250 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
251 mode_to_const_map[(int) SYM] = CONST; \
252 mode_to_load_map[(int) SYM] = LOAD; \
253 mode_to_store_map[(int) SYM] = STORE;
255 #include "modemap.def"
259 /* This is run once per compilation to set up which modes can be used
260 directly in memory and to initialize the block move optab. */
266 enum machine_mode mode
;
267 /* Try indexing by frame ptr and try by stack ptr.
268 It is known that on the Convex the stack ptr isn't a valid index.
269 With luck, one or the other is valid on any machine. */
270 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
271 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
274 insn
= emit_insn (gen_rtx (SET
, 0, 0));
275 pat
= PATTERN (insn
);
277 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
278 mode
= (enum machine_mode
) ((int) mode
+ 1))
284 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
285 PUT_MODE (mem
, mode
);
286 PUT_MODE (mem1
, mode
);
288 /* See if there is some register that can be used in this mode and
289 directly loaded or stored from memory. */
291 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
292 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
293 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
296 if (! HARD_REGNO_MODE_OK (regno
, mode
))
299 reg
= gen_rtx (REG
, mode
, regno
);
302 SET_DEST (pat
) = reg
;
303 if (recog (pat
, insn
, &num_clobbers
) >= 0)
304 direct_load
[(int) mode
] = 1;
306 SET_SRC (pat
) = mem1
;
307 SET_DEST (pat
) = reg
;
308 if (recog (pat
, insn
, &num_clobbers
) >= 0)
309 direct_load
[(int) mode
] = 1;
312 SET_DEST (pat
) = mem
;
313 if (recog (pat
, insn
, &num_clobbers
) >= 0)
314 direct_store
[(int) mode
] = 1;
317 SET_DEST (pat
) = mem1
;
318 if (recog (pat
, insn
, &num_clobbers
) >= 0)
319 direct_store
[(int) mode
] = 1;
326 /* This is run at the start of compiling a function. */
333 pending_stack_adjust
= 0;
334 inhibit_defer_pop
= 0;
335 cleanups_this_call
= 0;
337 apply_args_value
= 0;
341 /* Save all variables describing the current status into the structure *P.
342 This is used before starting a nested function. */
348 /* Instead of saving the postincrement queue, empty it. */
351 p
->pending_stack_adjust
= pending_stack_adjust
;
352 p
->inhibit_defer_pop
= inhibit_defer_pop
;
353 p
->cleanups_this_call
= cleanups_this_call
;
354 p
->saveregs_value
= saveregs_value
;
355 p
->apply_args_value
= apply_args_value
;
356 p
->forced_labels
= forced_labels
;
358 pending_stack_adjust
= 0;
359 inhibit_defer_pop
= 0;
360 cleanups_this_call
= 0;
362 apply_args_value
= 0;
366 /* Restore all variables describing the current status from the structure *P.
367 This is used after a nested function. */
370 restore_expr_status (p
)
373 pending_stack_adjust
= p
->pending_stack_adjust
;
374 inhibit_defer_pop
= p
->inhibit_defer_pop
;
375 cleanups_this_call
= p
->cleanups_this_call
;
376 saveregs_value
= p
->saveregs_value
;
377 apply_args_value
= p
->apply_args_value
;
378 forced_labels
= p
->forced_labels
;
381 /* Manage the queue of increment instructions to be output
382 for POSTINCREMENT_EXPR expressions, etc. */
384 static rtx pending_chain
;
386 /* Queue up to increment (or change) VAR later. BODY says how:
387 BODY should be the same thing you would pass to emit_insn
388 to increment right away. It will go to emit_insn later on.
390 The value is a QUEUED expression to be used in place of VAR
391 where you want to guarantee the pre-incrementation value of VAR. */
394 enqueue_insn (var
, body
)
397 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
398 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
399 return pending_chain
;
402 /* Use protect_from_queue to convert a QUEUED expression
403 into something that you can put immediately into an instruction.
404 If the queued incrementation has not happened yet,
405 protect_from_queue returns the variable itself.
406 If the incrementation has happened, protect_from_queue returns a temp
407 that contains a copy of the old value of the variable.
409 Any time an rtx which might possibly be a QUEUED is to be put
410 into an instruction, it must be passed through protect_from_queue first.
411 QUEUED expressions are not meaningful in instructions.
413 Do not pass a value through protect_from_queue and then hold
414 on to it for a while before putting it in an instruction!
415 If the queue is flushed in between, incorrect code will result. */
418 protect_from_queue (x
, modify
)
422 register RTX_CODE code
= GET_CODE (x
);
424 #if 0 /* A QUEUED can hang around after the queue is forced out. */
425 /* Shortcut for most common case. */
426 if (pending_chain
== 0)
432 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
433 use of autoincrement. Make a copy of the contents of the memory
434 location rather than a copy of the address, but not if the value is
435 of mode BLKmode. Don't modify X in place since it might be
437 if (code
== MEM
&& GET_MODE (x
) != BLKmode
438 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
440 register rtx y
= XEXP (x
, 0);
441 register rtx
new = gen_rtx (MEM
, GET_MODE (x
), QUEUED_VAR (y
));
443 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
444 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
445 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
449 register rtx temp
= gen_reg_rtx (GET_MODE (new));
450 emit_insn_before (gen_move_insn (temp
, new),
456 /* Otherwise, recursively protect the subexpressions of all
457 the kinds of rtx's that can contain a QUEUED. */
460 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
461 if (tem
!= XEXP (x
, 0))
467 else if (code
== PLUS
|| code
== MULT
)
469 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
470 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
471 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
480 /* If the increment has not happened, use the variable itself. */
481 if (QUEUED_INSN (x
) == 0)
482 return QUEUED_VAR (x
);
483 /* If the increment has happened and a pre-increment copy exists,
485 if (QUEUED_COPY (x
) != 0)
486 return QUEUED_COPY (x
);
487 /* The increment has happened but we haven't set up a pre-increment copy.
488 Set one up now, and use it. */
489 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
490 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
492 return QUEUED_COPY (x
);
495 /* Return nonzero if X contains a QUEUED expression:
496 if it contains anything that will be altered by a queued increment.
497 We handle only combinations of MEM, PLUS, MINUS and MULT operators
498 since memory addresses generally contain only those. */
504 register enum rtx_code code
= GET_CODE (x
);
510 return queued_subexp_p (XEXP (x
, 0));
514 return queued_subexp_p (XEXP (x
, 0))
515 || queued_subexp_p (XEXP (x
, 1));
520 /* Perform all the pending incrementations. */
526 while (p
= pending_chain
)
528 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
529 pending_chain
= QUEUED_NEXT (p
);
540 /* Copy data from FROM to TO, where the machine modes are not the same.
541 Both modes may be integer, or both may be floating.
542 UNSIGNEDP should be nonzero if FROM is an unsigned type.
543 This causes zero-extension instead of sign-extension. */
546 convert_move (to
, from
, unsignedp
)
547 register rtx to
, from
;
550 enum machine_mode to_mode
= GET_MODE (to
);
551 enum machine_mode from_mode
= GET_MODE (from
);
552 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
553 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
557 /* rtx code for making an equivalent value. */
558 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
560 to
= protect_from_queue (to
, 1);
561 from
= protect_from_queue (from
, 0);
563 if (to_real
!= from_real
)
566 /* If FROM is a SUBREG that indicates that we have already done at least
567 the required extension, strip it. We don't handle such SUBREGs as
570 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
571 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
572 >= GET_MODE_SIZE (to_mode
))
573 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
574 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
576 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
579 if (to_mode
== from_mode
580 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
582 emit_move_insn (to
, from
);
590 #ifdef HAVE_extendqfhf2
591 if (HAVE_extendqfsf2
&& from_mode
== QFmode
&& to_mode
== HFmode
)
593 emit_unop_insn (CODE_FOR_extendqfsf2
, to
, from
, UNKNOWN
);
597 #ifdef HAVE_extendqfsf2
598 if (HAVE_extendqfsf2
&& from_mode
== QFmode
&& to_mode
== SFmode
)
600 emit_unop_insn (CODE_FOR_extendqfsf2
, to
, from
, UNKNOWN
);
604 #ifdef HAVE_extendqfdf2
605 if (HAVE_extendqfdf2
&& from_mode
== QFmode
&& to_mode
== DFmode
)
607 emit_unop_insn (CODE_FOR_extendqfdf2
, to
, from
, UNKNOWN
);
611 #ifdef HAVE_extendqfxf2
612 if (HAVE_extendqfxf2
&& from_mode
== QFmode
&& to_mode
== XFmode
)
614 emit_unop_insn (CODE_FOR_extendqfxf2
, to
, from
, UNKNOWN
);
618 #ifdef HAVE_extendqftf2
619 if (HAVE_extendqftf2
&& from_mode
== QFmode
&& to_mode
== TFmode
)
621 emit_unop_insn (CODE_FOR_extendqftf2
, to
, from
, UNKNOWN
);
626 #ifdef HAVE_extendhftqf2
627 if (HAVE_extendhftqf2
&& from_mode
== HFmode
&& to_mode
== TQFmode
)
629 emit_unop_insn (CODE_FOR_extendhftqf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_extendhfsf2
635 if (HAVE_extendhfsf2
&& from_mode
== HFmode
&& to_mode
== SFmode
)
637 emit_unop_insn (CODE_FOR_extendhfsf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_extendhfdf2
642 if (HAVE_extendhfdf2
&& from_mode
== HFmode
&& to_mode
== DFmode
)
644 emit_unop_insn (CODE_FOR_extendhfdf2
, to
, from
, UNKNOWN
);
648 #ifdef HAVE_extendhfxf2
649 if (HAVE_extendhfxf2
&& from_mode
== HFmode
&& to_mode
== XFmode
)
651 emit_unop_insn (CODE_FOR_extendhfxf2
, to
, from
, UNKNOWN
);
655 #ifdef HAVE_extendhftf2
656 if (HAVE_extendhftf2
&& from_mode
== HFmode
&& to_mode
== TFmode
)
658 emit_unop_insn (CODE_FOR_extendhftf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_extendsfdf2
664 if (HAVE_extendsfdf2
&& from_mode
== SFmode
&& to_mode
== DFmode
)
666 emit_unop_insn (CODE_FOR_extendsfdf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_extendsfxf2
671 if (HAVE_extendsfxf2
&& from_mode
== SFmode
&& to_mode
== XFmode
)
673 emit_unop_insn (CODE_FOR_extendsfxf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_extendsftf2
678 if (HAVE_extendsftf2
&& from_mode
== SFmode
&& to_mode
== TFmode
)
680 emit_unop_insn (CODE_FOR_extendsftf2
, to
, from
, UNKNOWN
);
684 #ifdef HAVE_extenddfxf2
685 if (HAVE_extenddfxf2
&& from_mode
== DFmode
&& to_mode
== XFmode
)
687 emit_unop_insn (CODE_FOR_extenddfxf2
, to
, from
, UNKNOWN
);
691 #ifdef HAVE_extenddftf2
692 if (HAVE_extenddftf2
&& from_mode
== DFmode
&& to_mode
== TFmode
)
694 emit_unop_insn (CODE_FOR_extenddftf2
, to
, from
, UNKNOWN
);
699 #ifdef HAVE_trunchfqf2
700 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
702 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
706 #ifdef HAVE_truncsfqf2
707 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
709 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
713 #ifdef HAVE_truncdfqf2
714 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
716 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
720 #ifdef HAVE_truncxfqf2
721 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
723 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
727 #ifdef HAVE_trunctfqf2
728 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
730 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
735 #ifdef HAVE_trunctqfhf2
736 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
738 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
742 #ifdef HAVE_truncsfhf2
743 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
745 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
749 #ifdef HAVE_truncdfhf2
750 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
752 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
756 #ifdef HAVE_truncxfhf2
757 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
759 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
763 #ifdef HAVE_trunctfhf2
764 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
766 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
770 #ifdef HAVE_truncdfsf2
771 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
773 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
777 #ifdef HAVE_truncxfsf2
778 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
780 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
784 #ifdef HAVE_trunctfsf2
785 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
787 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
791 #ifdef HAVE_truncxfdf2
792 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
794 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
798 #ifdef HAVE_trunctfdf2
799 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
801 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
813 libcall
= extendsfdf2_libfunc
;
817 libcall
= extendsfxf2_libfunc
;
821 libcall
= extendsftf2_libfunc
;
830 libcall
= truncdfsf2_libfunc
;
834 libcall
= extenddfxf2_libfunc
;
838 libcall
= extenddftf2_libfunc
;
847 libcall
= truncxfsf2_libfunc
;
851 libcall
= truncxfdf2_libfunc
;
860 libcall
= trunctfsf2_libfunc
;
864 libcall
= trunctfdf2_libfunc
;
870 if (libcall
== (rtx
) 0)
871 /* This conversion is not implemented yet. */
874 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
876 emit_move_insn (to
, value
);
880 /* Now both modes are integers. */
882 /* Handle expanding beyond a word. */
883 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
884 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
891 enum machine_mode lowpart_mode
;
892 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
894 /* Try converting directly if the insn is supported. */
895 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
898 /* If FROM is a SUBREG, put it into a register. Do this
899 so that we always generate the same set of insns for
900 better cse'ing; if an intermediate assignment occurred,
901 we won't be doing the operation directly on the SUBREG. */
902 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
903 from
= force_reg (from_mode
, from
);
904 emit_unop_insn (code
, to
, from
, equiv_code
);
907 /* Next, try converting via full word. */
908 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
909 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
910 != CODE_FOR_nothing
))
912 if (GET_CODE (to
) == REG
)
913 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, to
));
914 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
915 emit_unop_insn (code
, to
,
916 gen_lowpart (word_mode
, to
), equiv_code
);
920 /* No special multiword conversion insn; do it by hand. */
923 /* Since we will turn this into a no conflict block, we must ensure
924 that the source does not overlap the target. */
926 if (reg_overlap_mentioned_p (to
, from
))
927 from
= force_reg (from_mode
, from
);
929 /* Get a copy of FROM widened to a word, if necessary. */
930 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
931 lowpart_mode
= word_mode
;
933 lowpart_mode
= from_mode
;
935 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
937 lowpart
= gen_lowpart (lowpart_mode
, to
);
938 emit_move_insn (lowpart
, lowfrom
);
940 /* Compute the value to put in each remaining word. */
942 fill_value
= const0_rtx
;
947 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
948 && STORE_FLAG_VALUE
== -1)
950 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
952 fill_value
= gen_reg_rtx (word_mode
);
953 emit_insn (gen_slt (fill_value
));
959 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
960 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
962 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
966 /* Fill the remaining words. */
967 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
969 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
970 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
975 if (fill_value
!= subword
)
976 emit_move_insn (subword
, fill_value
);
979 insns
= get_insns ();
982 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
983 gen_rtx (equiv_code
, to_mode
, copy_rtx (from
)));
987 /* Truncating multi-word to a word or less. */
988 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
989 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
991 if (!((GET_CODE (from
) == MEM
992 && ! MEM_VOLATILE_P (from
)
993 && direct_load
[(int) to_mode
]
994 && ! mode_dependent_address_p (XEXP (from
, 0)))
995 || GET_CODE (from
) == REG
996 || GET_CODE (from
) == SUBREG
))
997 from
= force_reg (from_mode
, from
);
998 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
1002 /* Handle pointer conversion */ /* SPEE 900220 */
1003 if (to_mode
== PSImode
)
1005 if (from_mode
!= SImode
)
1006 from
= convert_to_mode (SImode
, from
, unsignedp
);
1008 #ifdef HAVE_truncsipsi2
1009 if (HAVE_truncsipsi2
)
1011 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1014 #endif /* HAVE_truncsipsi2 */
1018 if (from_mode
== PSImode
)
1020 if (to_mode
!= SImode
)
1022 from
= convert_to_mode (SImode
, from
, unsignedp
);
1027 #ifdef HAVE_extendpsisi2
1028 if (HAVE_extendpsisi2
)
1030 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1033 #endif /* HAVE_extendpsisi2 */
1038 if (to_mode
== PDImode
)
1040 if (from_mode
!= DImode
)
1041 from
= convert_to_mode (DImode
, from
, unsignedp
);
1043 #ifdef HAVE_truncdipdi2
1044 if (HAVE_truncdipdi2
)
1046 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1049 #endif /* HAVE_truncdipdi2 */
1053 if (from_mode
== PDImode
)
1055 if (to_mode
!= DImode
)
1057 from
= convert_to_mode (DImode
, from
, unsignedp
);
1062 #ifdef HAVE_extendpdidi2
1063 if (HAVE_extendpdidi2
)
1065 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1068 #endif /* HAVE_extendpdidi2 */
1073 /* Now follow all the conversions between integers
1074 no more than a word long. */
1076 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1077 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1078 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1079 GET_MODE_BITSIZE (from_mode
)))
1081 if (!((GET_CODE (from
) == MEM
1082 && ! MEM_VOLATILE_P (from
)
1083 && direct_load
[(int) to_mode
]
1084 && ! mode_dependent_address_p (XEXP (from
, 0)))
1085 || GET_CODE (from
) == REG
1086 || GET_CODE (from
) == SUBREG
))
1087 from
= force_reg (from_mode
, from
);
1088 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1089 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1090 from
= copy_to_reg (from
);
1091 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1095 /* Handle extension. */
1096 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1098 /* Convert directly if that works. */
1099 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1100 != CODE_FOR_nothing
)
1102 emit_unop_insn (code
, to
, from
, equiv_code
);
1107 enum machine_mode intermediate
;
1109 /* Search for a mode to convert via. */
1110 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1111 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1112 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1113 != CODE_FOR_nothing
)
1114 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1115 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1116 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1117 != CODE_FOR_nothing
))
1119 convert_move (to
, convert_to_mode (intermediate
, from
,
1120 unsignedp
), unsignedp
);
1124 /* No suitable intermediate mode. */
1129 /* Support special truncate insns for certain modes. */
1131 if (from_mode
== DImode
&& to_mode
== SImode
)
1133 #ifdef HAVE_truncdisi2
1134 if (HAVE_truncdisi2
)
1136 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1140 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1144 if (from_mode
== DImode
&& to_mode
== HImode
)
1146 #ifdef HAVE_truncdihi2
1147 if (HAVE_truncdihi2
)
1149 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1153 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1157 if (from_mode
== DImode
&& to_mode
== QImode
)
1159 #ifdef HAVE_truncdiqi2
1160 if (HAVE_truncdiqi2
)
1162 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1166 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1170 if (from_mode
== SImode
&& to_mode
== HImode
)
1172 #ifdef HAVE_truncsihi2
1173 if (HAVE_truncsihi2
)
1175 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1179 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1183 if (from_mode
== SImode
&& to_mode
== QImode
)
1185 #ifdef HAVE_truncsiqi2
1186 if (HAVE_truncsiqi2
)
1188 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1192 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1196 if (from_mode
== HImode
&& to_mode
== QImode
)
1198 #ifdef HAVE_trunchiqi2
1199 if (HAVE_trunchiqi2
)
1201 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1205 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1209 if (from_mode
== TImode
&& to_mode
== DImode
)
1211 #ifdef HAVE_trunctidi2
1212 if (HAVE_trunctidi2
)
1214 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1218 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1222 if (from_mode
== TImode
&& to_mode
== SImode
)
1224 #ifdef HAVE_trunctisi2
1225 if (HAVE_trunctisi2
)
1227 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1231 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1235 if (from_mode
== TImode
&& to_mode
== HImode
)
1237 #ifdef HAVE_trunctihi2
1238 if (HAVE_trunctihi2
)
1240 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1244 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1248 if (from_mode
== TImode
&& to_mode
== QImode
)
1250 #ifdef HAVE_trunctiqi2
1251 if (HAVE_trunctiqi2
)
1253 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1257 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1261 /* Handle truncation of volatile memrefs, and so on;
1262 the things that couldn't be truncated directly,
1263 and for which there was no special instruction. */
1264 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1266 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1267 emit_move_insn (to
, temp
);
1271 /* Mode combination is not recognized. */
1275 /* Return an rtx for a value that would result
1276 from converting X to mode MODE.
1277 Both X and MODE may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1282 This function *must not* call protect_from_queue
1283 except when putting X into an insn (in which case convert_move does it). */
1286 convert_to_mode (mode
, x
, unsignedp
)
1287 enum machine_mode mode
;
1291 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1294 /* Return an rtx for a value that would result
1295 from converting X from mode OLDMODE to mode MODE.
1296 Both modes may be floating, or both integer.
1297 UNSIGNEDP is nonzero if X is an unsigned value.
1299 This can be done by referring to a part of X in place
1300 or by copying to a new temporary with conversion.
1302 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1304 This function *must not* call protect_from_queue
1305 except when putting X into an insn (in which case convert_move does it). */
1308 convert_modes (mode
, oldmode
, x
, unsignedp
)
1309 enum machine_mode mode
, oldmode
;
1315 /* If FROM is a SUBREG that indicates that we have already done at least
1316 the required extension, strip it. */
1318 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1319 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1320 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1321 x
= gen_lowpart (mode
, x
);
1323 if (GET_MODE (x
) != VOIDmode
)
1324 oldmode
= GET_MODE (x
);
1326 if (mode
== oldmode
)
1329 /* There is one case that we must handle specially: If we are converting
1330 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1331 we are to interpret the constant as unsigned, gen_lowpart will do
1332 the wrong if the constant appears negative. What we want to do is
1333 make the high-order word of the constant zero, not all ones. */
1335 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1336 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1337 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1338 return immed_double_const (INTVAL (x
), (HOST_WIDE_INT
) 0, mode
);
1340 /* We can do this with a gen_lowpart if both desired and current modes
1341 are integer, and this is either a constant integer, a register, or a
1342 non-volatile MEM. Except for the constant case where MODE is no
1343 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1345 if ((GET_CODE (x
) == CONST_INT
1346 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1347 || (GET_MODE_CLASS (mode
) == MODE_INT
1348 && GET_MODE_CLASS (oldmode
) == MODE_INT
1349 && (GET_CODE (x
) == CONST_DOUBLE
1350 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1351 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1352 && direct_load
[(int) mode
])
1353 || (GET_CODE (x
) == REG
1354 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1355 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1357 /* ?? If we don't know OLDMODE, we have to assume here that
1358 X does not need sign- or zero-extension. This may not be
1359 the case, but it's the best we can do. */
1360 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1361 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1363 HOST_WIDE_INT val
= INTVAL (x
);
1364 int width
= GET_MODE_BITSIZE (oldmode
);
1366 /* We must sign or zero-extend in this case. Start by
1367 zero-extending, then sign extend if we need to. */
1368 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1370 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1371 val
|= (HOST_WIDE_INT
) (-1) << width
;
1373 return GEN_INT (val
);
1376 return gen_lowpart (mode
, x
);
1379 temp
= gen_reg_rtx (mode
);
1380 convert_move (temp
, x
, unsignedp
);
1384 /* Generate several move instructions to copy LEN bytes
1385 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1386 The caller must pass FROM and TO
1387 through protect_from_queue before calling.
1388 ALIGN (in bytes) is maximum alignment we can assume. */
1391 move_by_pieces (to
, from
, len
, align
)
1395 struct move_by_pieces data
;
1396 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1397 int max_size
= MOVE_MAX
+ 1;
1400 data
.to_addr
= to_addr
;
1401 data
.from_addr
= from_addr
;
1405 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1406 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1408 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1409 || GET_CODE (from_addr
) == POST_INC
1410 || GET_CODE (from_addr
) == POST_DEC
);
1412 data
.explicit_inc_from
= 0;
1413 data
.explicit_inc_to
= 0;
1415 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1416 if (data
.reverse
) data
.offset
= len
;
1419 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1420 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1422 /* If copying requires more than two move insns,
1423 copy addresses to registers (to make displacements shorter)
1424 and use post-increment if available. */
1425 if (!(data
.autinc_from
&& data
.autinc_to
)
1426 && move_by_pieces_ninsns (len
, align
) > 2)
1428 #ifdef HAVE_PRE_DECREMENT
1429 if (data
.reverse
&& ! data
.autinc_from
)
1431 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1432 data
.autinc_from
= 1;
1433 data
.explicit_inc_from
= -1;
1436 #ifdef HAVE_POST_INCREMENT
1437 if (! data
.autinc_from
)
1439 data
.from_addr
= copy_addr_to_reg (from_addr
);
1440 data
.autinc_from
= 1;
1441 data
.explicit_inc_from
= 1;
1444 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1445 data
.from_addr
= copy_addr_to_reg (from_addr
);
1446 #ifdef HAVE_PRE_DECREMENT
1447 if (data
.reverse
&& ! data
.autinc_to
)
1449 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1451 data
.explicit_inc_to
= -1;
1454 #ifdef HAVE_POST_INCREMENT
1455 if (! data
.reverse
&& ! data
.autinc_to
)
1457 data
.to_addr
= copy_addr_to_reg (to_addr
);
1459 data
.explicit_inc_to
= 1;
1462 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1463 data
.to_addr
= copy_addr_to_reg (to_addr
);
1466 if (! SLOW_UNALIGNED_ACCESS
1467 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1470 /* First move what we can in the largest integer mode, then go to
1471 successively smaller modes. */
1473 while (max_size
> 1)
1475 enum machine_mode mode
= VOIDmode
, tmode
;
1476 enum insn_code icode
;
1478 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1479 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1480 if (GET_MODE_SIZE (tmode
) < max_size
)
1483 if (mode
== VOIDmode
)
1486 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1487 if (icode
!= CODE_FOR_nothing
1488 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1489 GET_MODE_SIZE (mode
)))
1490 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1492 max_size
= GET_MODE_SIZE (mode
);
1495 /* The code above should have handled everything. */
1500 /* Return number of insns required to move L bytes by pieces.
1501 ALIGN (in bytes) is maximum alignment we can assume. */
1504 move_by_pieces_ninsns (l
, align
)
1508 register int n_insns
= 0;
1509 int max_size
= MOVE_MAX
+ 1;
1511 if (! SLOW_UNALIGNED_ACCESS
1512 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1515 while (max_size
> 1)
1517 enum machine_mode mode
= VOIDmode
, tmode
;
1518 enum insn_code icode
;
1520 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1521 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1522 if (GET_MODE_SIZE (tmode
) < max_size
)
1525 if (mode
== VOIDmode
)
1528 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1529 if (icode
!= CODE_FOR_nothing
1530 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1531 GET_MODE_SIZE (mode
)))
1532 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1534 max_size
= GET_MODE_SIZE (mode
);
1540 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1541 with move instructions for mode MODE. GENFUN is the gen_... function
1542 to make a move insn for that mode. DATA has all the other info. */
1545 move_by_pieces_1 (genfun
, mode
, data
)
1547 enum machine_mode mode
;
1548 struct move_by_pieces
*data
;
1550 register int size
= GET_MODE_SIZE (mode
);
1551 register rtx to1
, from1
;
1553 while (data
->len
>= size
)
1555 if (data
->reverse
) data
->offset
-= size
;
1557 to1
= (data
->autinc_to
1558 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1559 : change_address (data
->to
, mode
,
1560 plus_constant (data
->to_addr
, data
->offset
)));
1561 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1564 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1565 : change_address (data
->from
, mode
,
1566 plus_constant (data
->from_addr
, data
->offset
)));
1567 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1569 #ifdef HAVE_PRE_DECREMENT
1570 if (data
->explicit_inc_to
< 0)
1571 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1572 if (data
->explicit_inc_from
< 0)
1573 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1576 emit_insn ((*genfun
) (to1
, from1
));
1577 #ifdef HAVE_POST_INCREMENT
1578 if (data
->explicit_inc_to
> 0)
1579 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1580 if (data
->explicit_inc_from
> 0)
1581 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1584 if (! data
->reverse
) data
->offset
+= size
;
1590 /* Emit code to move a block Y to a block X.
1591 This may be done with string-move instructions,
1592 with multiple scalar move instructions, or with a library call.
1594 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1596 SIZE is an rtx that says how long they are.
1597 ALIGN is the maximum alignment we can assume they have,
1598 measured in bytes. */
1601 emit_block_move (x
, y
, size
, align
)
1606 if (GET_MODE (x
) != BLKmode
)
1609 if (GET_MODE (y
) != BLKmode
)
1612 x
= protect_from_queue (x
, 1);
1613 y
= protect_from_queue (y
, 0);
1614 size
= protect_from_queue (size
, 0);
1616 if (GET_CODE (x
) != MEM
)
1618 if (GET_CODE (y
) != MEM
)
1623 if (GET_CODE (size
) == CONST_INT
1624 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1625 move_by_pieces (x
, y
, INTVAL (size
), align
);
1628 /* Try the most limited insn first, because there's no point
1629 including more than one in the machine description unless
1630 the more limited one has some advantage. */
1632 rtx opalign
= GEN_INT (align
);
1633 enum machine_mode mode
;
1635 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1636 mode
= GET_MODE_WIDER_MODE (mode
))
1638 enum insn_code code
= movstr_optab
[(int) mode
];
1640 if (code
!= CODE_FOR_nothing
1641 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1642 here because if SIZE is less than the mode mask, as it is
1643 returned by the macro, it will definitely be less than the
1644 actual mode mask. */
1645 && ((GET_CODE (size
) == CONST_INT
1646 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1647 <= GET_MODE_MASK (mode
)))
1648 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1649 && (insn_operand_predicate
[(int) code
][0] == 0
1650 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1651 && (insn_operand_predicate
[(int) code
][1] == 0
1652 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1653 && (insn_operand_predicate
[(int) code
][3] == 0
1654 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1658 rtx last
= get_last_insn ();
1661 op2
= convert_to_mode (mode
, size
, 1);
1662 if (insn_operand_predicate
[(int) code
][2] != 0
1663 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1664 op2
= copy_to_mode_reg (mode
, op2
);
1666 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1673 delete_insns_since (last
);
1677 #ifdef TARGET_MEM_FUNCTIONS
1678 emit_library_call (memcpy_libfunc
, 0,
1679 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1681 convert_to_mode (TYPE_MODE (sizetype
), size
,
1682 TREE_UNSIGNED (sizetype
)),
1683 TYPE_MODE (sizetype
));
1685 emit_library_call (bcopy_libfunc
, 0,
1686 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1688 convert_to_mode (TYPE_MODE (sizetype
), size
,
1689 TREE_UNSIGNED (sizetype
)),
1690 TYPE_MODE (sizetype
));
1695 /* Copy all or part of a value X into registers starting at REGNO.
1696 The number of registers to be filled is NREGS. */
1699 move_block_to_reg (regno
, x
, nregs
, mode
)
1703 enum machine_mode mode
;
1711 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1712 x
= validize_mem (force_const_mem (mode
, x
));
1714 /* See if the machine can do this with a load multiple insn. */
1715 #ifdef HAVE_load_multiple
1716 if (HAVE_load_multiple
)
1718 last
= get_last_insn ();
1719 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1727 delete_insns_since (last
);
1731 for (i
= 0; i
< nregs
; i
++)
1732 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1733 operand_subword_force (x
, i
, mode
));
1736 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1737 The number of registers to be filled is NREGS. SIZE indicates the number
1738 of bytes in the object X. */
1742 move_block_from_reg (regno
, x
, nregs
, size
)
1751 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1752 to the left before storing to memory. */
1753 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1755 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1761 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1762 gen_rtx (REG
, word_mode
, regno
),
1763 build_int_2 ((UNITS_PER_WORD
- size
)
1764 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1765 emit_move_insn (tem
, shift
);
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple
)
1773 last
= get_last_insn ();
1774 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1782 delete_insns_since (last
);
1786 for (i
= 0; i
< nregs
; i
++)
1788 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1793 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1797 /* Add a USE expression for REG to the (possibly empty) list pointed
1798 to by CALL_FUSAGE. REG must denote a hard register. */
1801 use_reg (call_fusage
, reg
)
1802 rtx
*call_fusage
, reg
;
1804 if (GET_CODE (reg
) != REG
1805 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1809 = gen_rtx (EXPR_LIST
, VOIDmode
,
1810 gen_rtx (USE
, VOIDmode
, reg
), *call_fusage
);
1813 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1814 starting at REGNO. All of these registers must be hard registers. */
1817 use_regs (call_fusage
, regno
, nregs
)
1824 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1827 for (i
= 0; i
< nregs
; i
++)
1828 use_reg (call_fusage
, gen_rtx (REG
, reg_raw_mode
[regno
+ i
], regno
+ i
));
1831 /* Write zeros through the storage of OBJECT.
1832 If OBJECT has BLKmode, SIZE is its length in bytes. */
1835 clear_storage (object
, size
)
1839 if (GET_MODE (object
) == BLKmode
)
1841 #ifdef TARGET_MEM_FUNCTIONS
1842 emit_library_call (memset_libfunc
, 0,
1844 XEXP (object
, 0), Pmode
, const0_rtx
, ptr_mode
,
1845 convert_to_mode (TYPE_MODE (sizetype
),
1846 size
, TREE_UNSIGNED (sizetype
)),
1847 TYPE_MODE (sizetype
));
1849 emit_library_call (bzero_libfunc
, 0,
1851 XEXP (object
, 0), Pmode
,
1852 convert_to_mode (TYPE_MODE (sizetype
),
1853 size
, TREE_UNSIGNED (sizetype
)),
1854 TYPE_MODE (sizetype
));
1858 emit_move_insn (object
, const0_rtx
);
1861 /* Generate code to copy Y into X.
1862 Both Y and X must have the same mode, except that
1863 Y can be a constant with VOIDmode.
1864 This mode cannot be BLKmode; use emit_block_move for that.
1866 Return the last instruction emitted. */
1869 emit_move_insn (x
, y
)
1872 enum machine_mode mode
= GET_MODE (x
);
1874 x
= protect_from_queue (x
, 1);
1875 y
= protect_from_queue (y
, 0);
1877 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
1880 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
1881 y
= force_const_mem (mode
, y
);
1883 /* If X or Y are memory references, verify that their addresses are valid
1885 if (GET_CODE (x
) == MEM
1886 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
1887 && ! push_operand (x
, GET_MODE (x
)))
1889 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
1890 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
1892 if (GET_CODE (y
) == MEM
1893 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
1895 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
1896 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
1898 if (mode
== BLKmode
)
1901 return emit_move_insn_1 (x
, y
);
1904 /* Low level part of emit_move_insn.
1905 Called just like emit_move_insn, but assumes X and Y
1906 are basically valid. */
1909 emit_move_insn_1 (x
, y
)
1912 enum machine_mode mode
= GET_MODE (x
);
1913 enum machine_mode submode
;
1914 enum mode_class
class = GET_MODE_CLASS (mode
);
1917 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
1919 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
1921 /* Expand complex moves by moving real part and imag part, if possible. */
1922 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
1923 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
1925 (class == MODE_COMPLEX_INT
1926 ? MODE_INT
: MODE_FLOAT
),
1928 && (mov_optab
->handlers
[(int) submode
].insn_code
1929 != CODE_FOR_nothing
))
1931 /* Don't split destination if it is a stack push. */
1932 int stack
= push_operand (x
, GET_MODE (x
));
1935 /* If this is a stack, push the highpart first, so it
1936 will be in the argument order.
1938 In that case, change_address is used only to convert
1939 the mode, not to change the address. */
1942 /* Note that the real part always precedes the imag part in memory
1943 regardless of machine's endianness. */
1944 #ifdef STACK_GROWS_DOWNWARD
1945 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
1946 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
1947 gen_imagpart (submode
, y
)));
1948 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
1949 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
1950 gen_realpart (submode
, y
)));
1952 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
1953 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
1954 gen_realpart (submode
, y
)));
1955 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
1956 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
1957 gen_imagpart (submode
, y
)));
1962 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
1963 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
1964 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
1965 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
1968 return get_last_insn ();
1971 /* This will handle any multi-word mode that lacks a move_insn pattern.
1972 However, you will get better code if you define such patterns,
1973 even if they must turn into multiple assembler instructions. */
1974 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1979 #ifdef PUSH_ROUNDING
1981 /* If X is a push on the stack, do the push now and replace
1982 X with a reference to the stack pointer. */
1983 if (push_operand (x
, GET_MODE (x
)))
1985 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
1986 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
1990 /* Show the output dies here. */
1991 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
1994 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
1997 rtx xpart
= operand_subword (x
, i
, 1, mode
);
1998 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2000 /* If we can't get a part of Y, put Y into memory if it is a
2001 constant. Otherwise, force it into a register. If we still
2002 can't get a part of Y, abort. */
2003 if (ypart
== 0 && CONSTANT_P (y
))
2005 y
= force_const_mem (mode
, y
);
2006 ypart
= operand_subword (y
, i
, 1, mode
);
2008 else if (ypart
== 0)
2009 ypart
= operand_subword_force (y
, i
, mode
);
2011 if (xpart
== 0 || ypart
== 0)
2014 last_insn
= emit_move_insn (xpart
, ypart
);
2023 /* Pushing data onto the stack. */
2025 /* Push a block of length SIZE (perhaps variable)
2026 and return an rtx to address the beginning of the block.
2027 Note that it is not possible for the value returned to be a QUEUED.
2028 The value may be virtual_outgoing_args_rtx.
2030 EXTRA is the number of bytes of padding to push in addition to SIZE.
2031 BELOW nonzero means this padding comes at low addresses;
2032 otherwise, the padding comes at high addresses. */
2035 push_block (size
, extra
, below
)
2041 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2042 if (CONSTANT_P (size
))
2043 anti_adjust_stack (plus_constant (size
, extra
));
2044 else if (GET_CODE (size
) == REG
&& extra
== 0)
2045 anti_adjust_stack (size
);
2048 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2050 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2051 temp
, 0, OPTAB_LIB_WIDEN
);
2052 anti_adjust_stack (temp
);
2055 #ifdef STACK_GROWS_DOWNWARD
2056 temp
= virtual_outgoing_args_rtx
;
2057 if (extra
!= 0 && below
)
2058 temp
= plus_constant (temp
, extra
);
2060 if (GET_CODE (size
) == CONST_INT
)
2061 temp
= plus_constant (virtual_outgoing_args_rtx
,
2062 - INTVAL (size
) - (below
? 0 : extra
));
2063 else if (extra
!= 0 && !below
)
2064 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2065 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2067 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2068 negate_rtx (Pmode
, size
));
2071 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2077 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2080 /* Generate code to push X onto the stack, assuming it has mode MODE and
2082 MODE is redundant except when X is a CONST_INT (since they don't
2084 SIZE is an rtx for the size of data to be copied (in bytes),
2085 needed only if X is BLKmode.
2087 ALIGN (in bytes) is maximum alignment we can assume.
2089 If PARTIAL and REG are both nonzero, then copy that many of the first
2090 words of X into registers starting with REG, and push the rest of X.
2091 The amount of space pushed is decreased by PARTIAL words,
2092 rounded *down* to a multiple of PARM_BOUNDARY.
2093 REG must be a hard register in this case.
2094 If REG is zero but PARTIAL is not, take any all others actions for an
2095 argument partially in registers, but do not actually load any
2098 EXTRA is the amount in bytes of extra space to leave next to this arg.
2099 This is ignored if an argument block has already been allocated.
2101 On a machine that lacks real push insns, ARGS_ADDR is the address of
2102 the bottom of the argument block for this call. We use indexing off there
2103 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2104 argument block has not been preallocated.
2106 ARGS_SO_FAR is the size of args previously pushed for this call. */
2109 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2110 args_addr
, args_so_far
)
2112 enum machine_mode mode
;
2123 enum direction stack_direction
2124 #ifdef STACK_GROWS_DOWNWARD
2130 /* Decide where to pad the argument: `downward' for below,
2131 `upward' for above, or `none' for don't pad it.
2132 Default is below for small data on big-endian machines; else above. */
2133 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2135 /* Invert direction if stack is post-update. */
2136 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2137 if (where_pad
!= none
)
2138 where_pad
= (where_pad
== downward
? upward
: downward
);
2140 xinner
= x
= protect_from_queue (x
, 0);
2142 if (mode
== BLKmode
)
2144 /* Copy a block into the stack, entirely or partially. */
2147 int used
= partial
* UNITS_PER_WORD
;
2148 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2156 /* USED is now the # of bytes we need not copy to the stack
2157 because registers will take care of them. */
2160 xinner
= change_address (xinner
, BLKmode
,
2161 plus_constant (XEXP (xinner
, 0), used
));
2163 /* If the partial register-part of the arg counts in its stack size,
2164 skip the part of stack space corresponding to the registers.
2165 Otherwise, start copying to the beginning of the stack space,
2166 by setting SKIP to 0. */
2167 #ifndef REG_PARM_STACK_SPACE
2173 #ifdef PUSH_ROUNDING
2174 /* Do it with several push insns if that doesn't take lots of insns
2175 and if there is no difficulty with push insns that skip bytes
2176 on the stack for alignment purposes. */
2178 && GET_CODE (size
) == CONST_INT
2180 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2182 /* Here we avoid the case of a structure whose weak alignment
2183 forces many pushes of a small amount of data,
2184 and such small pushes do rounding that causes trouble. */
2185 && ((! SLOW_UNALIGNED_ACCESS
)
2186 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2187 || PUSH_ROUNDING (align
) == align
)
2188 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2190 /* Push padding now if padding above and stack grows down,
2191 or if padding below and stack grows up.
2192 But if space already allocated, this has already been done. */
2193 if (extra
&& args_addr
== 0
2194 && where_pad
!= none
&& where_pad
!= stack_direction
)
2195 anti_adjust_stack (GEN_INT (extra
));
2197 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
2198 INTVAL (size
) - used
, align
);
2201 #endif /* PUSH_ROUNDING */
2203 /* Otherwise make space on the stack and copy the data
2204 to the address of that space. */
2206 /* Deduct words put into registers from the size we must copy. */
2209 if (GET_CODE (size
) == CONST_INT
)
2210 size
= GEN_INT (INTVAL (size
) - used
);
2212 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2213 GEN_INT (used
), NULL_RTX
, 0,
2217 /* Get the address of the stack space.
2218 In this case, we do not deal with EXTRA separately.
2219 A single stack adjust will do. */
2222 temp
= push_block (size
, extra
, where_pad
== downward
);
2225 else if (GET_CODE (args_so_far
) == CONST_INT
)
2226 temp
= memory_address (BLKmode
,
2227 plus_constant (args_addr
,
2228 skip
+ INTVAL (args_so_far
)));
2230 temp
= memory_address (BLKmode
,
2231 plus_constant (gen_rtx (PLUS
, Pmode
,
2232 args_addr
, args_so_far
),
2235 /* TEMP is the address of the block. Copy the data there. */
2236 if (GET_CODE (size
) == CONST_INT
2237 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2240 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
2241 INTVAL (size
), align
);
2244 /* Try the most limited insn first, because there's no point
2245 including more than one in the machine description unless
2246 the more limited one has some advantage. */
2247 #ifdef HAVE_movstrqi
2249 && GET_CODE (size
) == CONST_INT
2250 && ((unsigned) INTVAL (size
)
2251 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
2253 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
2254 xinner
, size
, GEN_INT (align
));
2262 #ifdef HAVE_movstrhi
2264 && GET_CODE (size
) == CONST_INT
2265 && ((unsigned) INTVAL (size
)
2266 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
2268 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
2269 xinner
, size
, GEN_INT (align
));
2277 #ifdef HAVE_movstrsi
2280 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
2281 xinner
, size
, GEN_INT (align
));
2289 #ifdef HAVE_movstrdi
2292 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
2293 xinner
, size
, GEN_INT (align
));
2302 #ifndef ACCUMULATE_OUTGOING_ARGS
2303 /* If the source is referenced relative to the stack pointer,
2304 copy it to another register to stabilize it. We do not need
2305 to do this if we know that we won't be changing sp. */
2307 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2308 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2309 temp
= copy_to_reg (temp
);
2312 /* Make inhibit_defer_pop nonzero around the library call
2313 to force it to pop the bcopy-arguments right away. */
2315 #ifdef TARGET_MEM_FUNCTIONS
2316 emit_library_call (memcpy_libfunc
, 0,
2317 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2318 convert_to_mode (TYPE_MODE (sizetype
),
2319 size
, TREE_UNSIGNED (sizetype
)),
2320 TYPE_MODE (sizetype
));
2322 emit_library_call (bcopy_libfunc
, 0,
2323 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2324 convert_to_mode (TYPE_MODE (sizetype
),
2325 size
, TREE_UNSIGNED (sizetype
)),
2326 TYPE_MODE (sizetype
));
2331 else if (partial
> 0)
2333 /* Scalar partly in registers. */
2335 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2338 /* # words of start of argument
2339 that we must make space for but need not store. */
2340 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2341 int args_offset
= INTVAL (args_so_far
);
2344 /* Push padding now if padding above and stack grows down,
2345 or if padding below and stack grows up.
2346 But if space already allocated, this has already been done. */
2347 if (extra
&& args_addr
== 0
2348 && where_pad
!= none
&& where_pad
!= stack_direction
)
2349 anti_adjust_stack (GEN_INT (extra
));
2351 /* If we make space by pushing it, we might as well push
2352 the real data. Otherwise, we can leave OFFSET nonzero
2353 and leave the space uninitialized. */
2357 /* Now NOT_STACK gets the number of words that we don't need to
2358 allocate on the stack. */
2359 not_stack
= partial
- offset
;
2361 /* If the partial register-part of the arg counts in its stack size,
2362 skip the part of stack space corresponding to the registers.
2363 Otherwise, start copying to the beginning of the stack space,
2364 by setting SKIP to 0. */
2365 #ifndef REG_PARM_STACK_SPACE
2371 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2372 x
= validize_mem (force_const_mem (mode
, x
));
2374 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2375 SUBREGs of such registers are not allowed. */
2376 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2377 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2378 x
= copy_to_reg (x
);
2380 /* Loop over all the words allocated on the stack for this arg. */
2381 /* We can do it by words, because any scalar bigger than a word
2382 has a size a multiple of a word. */
2383 #ifndef PUSH_ARGS_REVERSED
2384 for (i
= not_stack
; i
< size
; i
++)
2386 for (i
= size
- 1; i
>= not_stack
; i
--)
2388 if (i
>= not_stack
+ offset
)
2389 emit_push_insn (operand_subword_force (x
, i
, mode
),
2390 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2392 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2393 * UNITS_PER_WORD
)));
2399 /* Push padding now if padding above and stack grows down,
2400 or if padding below and stack grows up.
2401 But if space already allocated, this has already been done. */
2402 if (extra
&& args_addr
== 0
2403 && where_pad
!= none
&& where_pad
!= stack_direction
)
2404 anti_adjust_stack (GEN_INT (extra
));
2406 #ifdef PUSH_ROUNDING
2408 addr
= gen_push_operand ();
2411 if (GET_CODE (args_so_far
) == CONST_INT
)
2413 = memory_address (mode
,
2414 plus_constant (args_addr
, INTVAL (args_so_far
)));
2416 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2419 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2423 /* If part should go in registers, copy that part
2424 into the appropriate registers. Do this now, at the end,
2425 since mem-to-mem copies above may do function calls. */
2426 if (partial
> 0 && reg
!= 0)
2427 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2429 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2430 anti_adjust_stack (GEN_INT (extra
));
2433 /* Expand an assignment that stores the value of FROM into TO.
2434 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2435 (This may contain a QUEUED rtx;
2436 if the value is constant, this rtx is a constant.)
2437 Otherwise, the returned value is NULL_RTX.
2439 SUGGEST_REG is no longer actually used.
2440 It used to mean, copy the value through a register
2441 and return that register, if that is possible.
2442 We now use WANT_VALUE to decide whether to do this. */
2445 expand_assignment (to
, from
, want_value
, suggest_reg
)
2450 register rtx to_rtx
= 0;
2453 /* Don't crash if the lhs of the assignment was erroneous. */
2455 if (TREE_CODE (to
) == ERROR_MARK
)
2457 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2458 return want_value
? result
: NULL_RTX
;
2461 if (output_bytecode
)
2463 tree dest_innermost
;
2465 bc_expand_expr (from
);
2466 bc_emit_instruction (duplicate
);
2468 dest_innermost
= bc_expand_address (to
);
2470 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2471 take care of it here. */
2473 bc_store_memory (TREE_TYPE (to
), dest_innermost
);
2477 /* Assignment of a structure component needs special treatment
2478 if the structure component's rtx is not simply a MEM.
2479 Assignment of an array element at a constant index, and assignment of
2480 an array element in an unaligned packed structure field, has the same
2483 if (TREE_CODE (to
) == COMPONENT_REF
2484 || TREE_CODE (to
) == BIT_FIELD_REF
2485 || (TREE_CODE (to
) == ARRAY_REF
2486 && ((TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2487 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
)
2488 || (SLOW_UNALIGNED_ACCESS
&& get_inner_unaligned_p (to
)))))
2490 enum machine_mode mode1
;
2500 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
,
2501 &mode1
, &unsignedp
, &volatilep
);
2503 /* If we are going to use store_bit_field and extract_bit_field,
2504 make sure to_rtx will be safe for multiple use. */
2506 if (mode1
== VOIDmode
&& want_value
)
2507 tem
= stabilize_reference (tem
);
2509 alignment
= TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
;
2510 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2513 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2515 if (GET_CODE (to_rtx
) != MEM
)
2517 to_rtx
= change_address (to_rtx
, VOIDmode
,
2518 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
2519 force_reg (ptr_mode
, offset_rtx
)));
2520 /* If we have a variable offset, the known alignment
2521 is only that of the innermost structure containing the field.
2522 (Actually, we could sometimes do better by using the
2523 align of an element of the innermost array, but no need.) */
2524 if (TREE_CODE (to
) == COMPONENT_REF
2525 || TREE_CODE (to
) == BIT_FIELD_REF
)
2527 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to
, 0))) / BITS_PER_UNIT
;
2531 if (GET_CODE (to_rtx
) == MEM
)
2533 /* When the offset is zero, to_rtx is the address of the
2534 structure we are storing into, and hence may be shared.
2535 We must make a new MEM before setting the volatile bit. */
2537 to_rtx
= change_address (to_rtx
, VOIDmode
, XEXP (to_rtx
, 0));
2538 MEM_VOLATILE_P (to_rtx
) = 1;
2540 #if 0 /* This was turned off because, when a field is volatile
2541 in an object which is not volatile, the object may be in a register,
2542 and then we would abort over here. */
2548 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2550 /* Spurious cast makes HPUX compiler happy. */
2551 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2554 /* Required alignment of containing datum. */
2556 int_size_in_bytes (TREE_TYPE (tem
)));
2557 preserve_temp_slots (result
);
2561 /* If the value is meaningful, convert RESULT to the proper mode.
2562 Otherwise, return nothing. */
2563 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
2564 TYPE_MODE (TREE_TYPE (from
)),
2566 TREE_UNSIGNED (TREE_TYPE (to
)))
2570 /* If the rhs is a function call and its value is not an aggregate,
2571 call the function before we start to compute the lhs.
2572 This is needed for correct code for cases such as
2573 val = setjmp (buf) on machines where reference to val
2574 requires loading up part of an address in a separate insn.
2576 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2577 a promoted variable where the zero- or sign- extension needs to be done.
2578 Handling this in the normal way is safe because no computation is done
2580 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
2581 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
2586 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2588 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2590 if (GET_MODE (to_rtx
) == BLKmode
)
2592 int align
= MIN (TYPE_ALIGN (TREE_TYPE (from
)), BITS_PER_WORD
);
2593 emit_block_move (to_rtx
, value
, expr_size (from
), align
);
2596 emit_move_insn (to_rtx
, value
);
2597 preserve_temp_slots (to_rtx
);
2600 return want_value
? to_rtx
: NULL_RTX
;
2603 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2604 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2607 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2609 /* Don't move directly into a return register. */
2610 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
2615 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
2616 emit_move_insn (to_rtx
, temp
);
2617 preserve_temp_slots (to_rtx
);
2620 return want_value
? to_rtx
: NULL_RTX
;
2623 /* In case we are returning the contents of an object which overlaps
2624 the place the value is being stored, use a safe function when copying
2625 a value through a pointer into a structure value return block. */
2626 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2627 && current_function_returns_struct
2628 && !current_function_returns_pcc_struct
)
2633 size
= expr_size (from
);
2634 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 emit_library_call (memcpy_libfunc
, 0,
2638 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2639 XEXP (from_rtx
, 0), Pmode
,
2640 convert_to_mode (TYPE_MODE (sizetype
),
2641 size
, TREE_UNSIGNED (sizetype
)),
2642 TYPE_MODE (sizetype
));
2644 emit_library_call (bcopy_libfunc
, 0,
2645 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2646 XEXP (to_rtx
, 0), Pmode
,
2647 convert_to_mode (TYPE_MODE (sizetype
),
2648 size
, TREE_UNSIGNED (sizetype
)),
2649 TYPE_MODE (sizetype
));
2652 preserve_temp_slots (to_rtx
);
2655 return want_value
? to_rtx
: NULL_RTX
;
2658 /* Compute FROM and store the value in the rtx we got. */
2661 result
= store_expr (from
, to_rtx
, want_value
);
2662 preserve_temp_slots (result
);
2665 return want_value
? result
: NULL_RTX
;
2668 /* Generate code for computing expression EXP,
2669 and storing the value into TARGET.
2670 TARGET may contain a QUEUED rtx.
2672 If WANT_VALUE is nonzero, return a copy of the value
2673 not in TARGET, so that we can be sure to use the proper
2674 value in a containing expression even if TARGET has something
2675 else stored in it. If possible, we copy the value through a pseudo
2676 and return that pseudo. Or, if the value is constant, we try to
2677 return the constant. In some cases, we return a pseudo
2678 copied *from* TARGET.
2680 If the mode is BLKmode then we may return TARGET itself.
2681 It turns out that in BLKmode it doesn't cause a problem.
2682 because C has no operators that could combine two different
2683 assignments into the same BLKmode object with different values
2684 with no sequence point. Will other languages need this to
2687 If WANT_VALUE is 0, we return NULL, to make sure
2688 to catch quickly any cases where the caller uses the value
2689 and fails to set WANT_VALUE. */
2692 store_expr (exp
, target
, want_value
)
2694 register rtx target
;
2698 int dont_return_target
= 0;
2700 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2702 /* Perform first part of compound expression, then assign from second
2704 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
2706 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
2708 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
2710 /* For conditional expression, get safe form of the target. Then
2711 test the condition, doing the appropriate assignment on either
2712 side. This avoids the creation of unnecessary temporaries.
2713 For non-BLKmode, it is more efficient not to do this. */
2715 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
2718 target
= protect_from_queue (target
, 1);
2720 do_pending_stack_adjust ();
2722 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
2723 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
2725 emit_jump_insn (gen_jump (lab2
));
2728 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
2732 return want_value
? target
: NULL_RTX
;
2734 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
2735 && GET_MODE (target
) != BLKmode
)
2736 /* If target is in memory and caller wants value in a register instead,
2737 arrange that. Pass TARGET as target for expand_expr so that,
2738 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2739 We know expand_expr will not use the target in that case.
2740 Don't do this if TARGET is volatile because we are supposed
2741 to write it and then read it. */
2743 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
2744 GET_MODE (target
), 0);
2745 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
2746 temp
= copy_to_reg (temp
);
2747 dont_return_target
= 1;
2749 else if (queued_subexp_p (target
))
2750 /* If target contains a postincrement, let's not risk
2751 using it as the place to generate the rhs. */
2753 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
2755 /* Expand EXP into a new pseudo. */
2756 temp
= gen_reg_rtx (GET_MODE (target
));
2757 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
2760 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
2762 /* If target is volatile, ANSI requires accessing the value
2763 *from* the target, if it is accessed. So make that happen.
2764 In no case return the target itself. */
2765 if (! MEM_VOLATILE_P (target
) && want_value
)
2766 dont_return_target
= 1;
2768 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
2769 /* If this is an scalar in a register that is stored in a wider mode
2770 than the declared mode, compute the result into its declared mode
2771 and then convert to the wider mode. Our value is the computed
2774 /* If we don't want a value, we can do the conversion inside EXP,
2775 which will often result in some optimizations. Do the conversion
2776 in two steps: first change the signedness, if needed, then
2780 if (TREE_UNSIGNED (TREE_TYPE (exp
))
2781 != SUBREG_PROMOTED_UNSIGNED_P (target
))
2784 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
2788 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
2789 SUBREG_PROMOTED_UNSIGNED_P (target
)),
2793 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
2795 /* If TEMP is a volatile MEM and we want a result value, make
2796 the access now so it gets done only once. Likewise if
2797 it contains TARGET. */
2798 if (GET_CODE (temp
) == MEM
&& want_value
2799 && (MEM_VOLATILE_P (temp
)
2800 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
2801 temp
= copy_to_reg (temp
);
2803 /* If TEMP is a VOIDmode constant, use convert_modes to make
2804 sure that we properly convert it. */
2805 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
2806 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
2807 TYPE_MODE (TREE_TYPE (exp
)), temp
,
2808 SUBREG_PROMOTED_UNSIGNED_P (target
));
2810 convert_move (SUBREG_REG (target
), temp
,
2811 SUBREG_PROMOTED_UNSIGNED_P (target
));
2812 return want_value
? temp
: NULL_RTX
;
2816 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
2817 /* Return TARGET if it's a specified hardware register.
2818 If TARGET is a volatile mem ref, either return TARGET
2819 or return a reg copied *from* TARGET; ANSI requires this.
2821 Otherwise, if TEMP is not TARGET, return TEMP
2822 if it is constant (for efficiency),
2823 or if we really want the correct value. */
2824 if (!(target
&& GET_CODE (target
) == REG
2825 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
2826 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
2828 && (CONSTANT_P (temp
) || want_value
))
2829 dont_return_target
= 1;
2832 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2833 the same as that of TARGET, adjust the constant. This is needed, for
2834 example, in case it is a CONST_DOUBLE and we want only a word-sized
2836 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
2837 && TREE_CODE (exp
) != ERROR_MARK
2838 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2839 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
2840 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
2842 /* If value was not generated in the target, store it there.
2843 Convert the value to TARGET's type first if nec. */
2845 if (temp
!= target
&& TREE_CODE (exp
) != ERROR_MARK
)
2847 target
= protect_from_queue (target
, 1);
2848 if (GET_MODE (temp
) != GET_MODE (target
)
2849 && GET_MODE (temp
) != VOIDmode
)
2851 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
2852 if (dont_return_target
)
2854 /* In this case, we will return TEMP,
2855 so make sure it has the proper mode.
2856 But don't forget to store the value into TARGET. */
2857 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
2858 emit_move_insn (target
, temp
);
2861 convert_move (target
, temp
, unsignedp
);
2864 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
2866 /* Handle copying a string constant into an array.
2867 The string constant may be shorter than the array.
2868 So copy just the string's actual length, and clear the rest. */
2872 /* Get the size of the data type of the string,
2873 which is actually the size of the target. */
2874 size
= expr_size (exp
);
2875 if (GET_CODE (size
) == CONST_INT
2876 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
2877 emit_block_move (target
, temp
, size
,
2878 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
2881 /* Compute the size of the data to copy from the string. */
2883 = size_binop (MIN_EXPR
,
2884 make_tree (sizetype
, size
),
2886 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
2887 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
2891 /* Copy that much. */
2892 emit_block_move (target
, temp
, copy_size_rtx
,
2893 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
2895 /* Figure out how much is left in TARGET that we have to clear.
2896 Do all calculations in ptr_mode. */
2898 addr
= XEXP (target
, 0);
2899 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
2901 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
2903 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
2904 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
2908 addr
= force_reg (ptr_mode
, addr
);
2909 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
2910 copy_size_rtx
, NULL_RTX
, 0,
2913 size
= expand_binop (ptr_mode
, sub_optab
, size
,
2914 copy_size_rtx
, NULL_RTX
, 0,
2917 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
2918 GET_MODE (size
), 0, 0);
2919 label
= gen_label_rtx ();
2920 emit_jump_insn (gen_blt (label
));
2923 if (size
!= const0_rtx
)
2925 #ifdef TARGET_MEM_FUNCTIONS
2926 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3, addr
,
2927 Pmode
, const0_rtx
, Pmode
, size
, ptr_mode
);
2929 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
2930 addr
, Pmode
, size
, ptr_mode
);
2938 else if (GET_MODE (temp
) == BLKmode
)
2939 emit_block_move (target
, temp
, expr_size (exp
),
2940 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
2942 emit_move_insn (target
, temp
);
2945 /* If we don't want a value, return NULL_RTX. */
2949 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2950 ??? The latter test doesn't seem to make sense. */
2951 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
2954 /* Return TARGET itself if it is a hard register. */
2955 else if (want_value
&& GET_MODE (target
) != BLKmode
2956 && ! (GET_CODE (target
) == REG
2957 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
2958 return copy_to_reg (target
);
2964 /* Store the value of constructor EXP into the rtx TARGET.
2965 TARGET is either a REG or a MEM. */
2968 store_constructor (exp
, target
)
2972 tree type
= TREE_TYPE (exp
);
2974 /* We know our target cannot conflict, since safe_from_p has been called. */
2976 /* Don't try copying piece by piece into a hard register
2977 since that is vulnerable to being clobbered by EXP.
2978 Instead, construct in a pseudo register and then copy it all. */
2979 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
2981 rtx temp
= gen_reg_rtx (GET_MODE (target
));
2982 store_constructor (exp
, temp
);
2983 emit_move_insn (target
, temp
);
2988 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
2989 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
2993 /* Inform later passes that the whole union value is dead. */
2994 if (TREE_CODE (type
) == UNION_TYPE
2995 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
2996 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
2998 /* If we are building a static constructor into a register,
2999 set the initial value as zero so we can fold the value into
3000 a constant. But if more than one register is involved,
3001 this probably loses. */
3002 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3003 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3004 emit_move_insn (target
, const0_rtx
);
3006 /* If the constructor has fewer fields than the structure,
3007 clear the whole structure first. */
3008 else if (list_length (CONSTRUCTOR_ELTS (exp
))
3009 != list_length (TYPE_FIELDS (type
)))
3010 clear_storage (target
, expr_size (exp
));
3012 /* Inform later passes that the old value is dead. */
3013 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3015 /* Store each element of the constructor into
3016 the corresponding field of TARGET. */
3018 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3020 register tree field
= TREE_PURPOSE (elt
);
3021 register enum machine_mode mode
;
3025 tree pos
, constant
= 0, offset
= 0;
3026 rtx to_rtx
= target
;
3028 /* Just ignore missing fields.
3029 We cleared the whole structure, above,
3030 if any fields are missing. */
3034 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3035 unsignedp
= TREE_UNSIGNED (field
);
3036 mode
= DECL_MODE (field
);
3037 if (DECL_BIT_FIELD (field
))
3040 pos
= DECL_FIELD_BITPOS (field
);
3041 if (TREE_CODE (pos
) == INTEGER_CST
)
3043 else if (TREE_CODE (pos
) == PLUS_EXPR
3044 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3045 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3050 bitpos
= TREE_INT_CST_LOW (constant
);
3056 if (contains_placeholder_p (offset
))
3057 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3060 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3061 size_int (BITS_PER_UNIT
));
3063 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3064 if (GET_CODE (to_rtx
) != MEM
)
3068 = change_address (to_rtx
, VOIDmode
,
3069 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
3070 force_reg (ptr_mode
, offset_rtx
)));
3073 if (TREE_READONLY (field
))
3075 to_rtx
= copy_rtx (to_rtx
);
3076 RTX_UNCHANGING_P (to_rtx
) = 1;
3079 store_field (to_rtx
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
),
3080 /* The alignment of TARGET is
3081 at least what its type requires. */
3083 TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3084 int_size_in_bytes (type
));
3087 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3091 tree domain
= TYPE_DOMAIN (type
);
3092 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3093 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3094 tree elttype
= TREE_TYPE (type
);
3096 /* If the constructor has fewer fields than the structure,
3097 clear the whole structure first. Similarly if this this is
3098 static constructor of a non-BLKmode object. */
3100 if (list_length (CONSTRUCTOR_ELTS (exp
)) < maxelt
- minelt
+ 1
3101 || (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3102 clear_storage (target
, expr_size (exp
));
3104 /* Inform later passes that the old value is dead. */
3105 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3107 /* Store each element of the constructor into
3108 the corresponding element of TARGET, determined
3109 by counting the elements. */
3110 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3112 elt
= TREE_CHAIN (elt
), i
++)
3114 register enum machine_mode mode
;
3118 tree index
= TREE_PURPOSE (elt
);
3119 rtx xtarget
= target
;
3121 mode
= TYPE_MODE (elttype
);
3122 bitsize
= GET_MODE_BITSIZE (mode
);
3123 unsignedp
= TREE_UNSIGNED (elttype
);
3125 if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3126 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3128 rtx pos_rtx
, addr
, xtarget
;
3132 index
= size_int (i
);
3134 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3135 size_int (BITS_PER_UNIT
));
3136 position
= size_binop (MULT_EXPR
, index
, position
);
3137 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3138 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3139 xtarget
= change_address (target
, mode
, addr
);
3140 store_expr (TREE_VALUE (elt
), xtarget
, 0);
3145 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3146 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3148 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3150 store_field (xtarget
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
),
3151 /* The alignment of TARGET is
3152 at least what its type requires. */
3154 TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3155 int_size_in_bytes (type
));
3159 /* set constructor assignments */
3160 else if (TREE_CODE (type
) == SET_TYPE
)
3163 rtx xtarget
= XEXP (target
, 0);
3164 int set_word_size
= TYPE_ALIGN (type
);
3165 int nbytes
= int_size_in_bytes (type
);
3166 tree non_const_elements
;
3167 int need_to_clear_first
;
3168 tree domain
= TYPE_DOMAIN (type
);
3169 tree domain_min
, domain_max
, bitlength
;
3171 /* The default implementation strategy is to extract the constant
3172 parts of the constructor, use that to initialize the target,
3173 and then "or" in whatever non-constant ranges we need in addition.
3175 If a large set is all zero or all ones, it is
3176 probably better to set it using memset (if available) or bzero.
3177 Also, if a large set has just a single range, it may also be
3178 better to first clear all the first clear the set (using
3179 bzero/memset), and set the bits we want. */
3181 /* Check for all zeros. */
3182 if (CONSTRUCTOR_ELTS (exp
) == NULL_TREE
)
3184 clear_storage (target
, expr_size (exp
));
3191 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3192 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3193 bitlength
= size_binop (PLUS_EXPR
,
3194 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3197 /* Check for range all ones, or at most a single range.
3198 (This optimization is only a win for big sets.) */
3199 if (GET_MODE (target
) == BLKmode
&& nbytes
> 16
3200 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp
)) == NULL_TREE
)
3202 need_to_clear_first
= 1;
3203 non_const_elements
= CONSTRUCTOR_ELTS (exp
);
3207 int nbits
= nbytes
* BITS_PER_UNIT
;
3208 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3209 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3210 char *bit_buffer
= (char*) alloca (nbits
);
3211 HOST_WIDE_INT word
= 0;
3214 int offset
= 0; /* In bytes from beginning of set. */
3215 non_const_elements
= get_set_constructor_bits (exp
,
3219 if (bit_buffer
[ibit
])
3221 if (BYTES_BIG_ENDIAN
)
3222 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3224 word
|= 1 << bit_pos
;
3227 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
3229 rtx datum
= GEN_INT (word
);
3231 /* The assumption here is that it is safe to use XEXP if
3232 the set is multi-word, but not if it's single-word. */
3233 if (GET_CODE (target
) == MEM
)
3234 to_rtx
= change_address (target
, mode
,
3235 plus_constant (XEXP (target
, 0),
3237 else if (offset
== 0)
3241 emit_move_insn (to_rtx
, datum
);
3246 offset
+= set_word_size
/ BITS_PER_UNIT
;
3249 need_to_clear_first
= 0;
3252 for (elt
= non_const_elements
; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
3254 /* start of range of element or NULL */
3255 tree startbit
= TREE_PURPOSE (elt
);
3256 /* end of range of element, or element value */
3257 tree endbit
= TREE_VALUE (elt
);
3258 HOST_WIDE_INT startb
, endb
;
3259 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
3261 bitlength_rtx
= expand_expr (bitlength
,
3262 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
3264 /* handle non-range tuple element like [ expr ] */
3265 if (startbit
== NULL_TREE
)
3267 startbit
= save_expr (endbit
);
3270 startbit
= convert (sizetype
, startbit
);
3271 endbit
= convert (sizetype
, endbit
);
3272 if (! integer_zerop (domain_min
))
3274 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
3275 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
3277 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
3278 EXPAND_CONST_ADDRESS
);
3279 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
3280 EXPAND_CONST_ADDRESS
);
3284 targetx
= assign_stack_temp (GET_MODE (target
),
3285 GET_MODE_SIZE (GET_MODE (target
)),
3287 emit_move_insn (targetx
, target
);
3289 else if (GET_CODE (target
) == MEM
)
3294 #ifdef TARGET_MEM_FUNCTIONS
3295 /* Optimization: If startbit and endbit are
3296 constants divisible by BITS_PER_UNIT,
3297 call memset instead. */
3298 if (TREE_CODE (startbit
) == INTEGER_CST
3299 && TREE_CODE (endbit
) == INTEGER_CST
3300 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
3301 && (endb
= TREE_INT_CST_LOW (endbit
)) % BITS_PER_UNIT
== 0)
3304 if (need_to_clear_first
3305 && endb
- startb
!= nbytes
* BITS_PER_UNIT
)
3306 clear_storage (target
, expr_size (exp
));
3307 need_to_clear_first
= 0;
3308 emit_library_call (memset_libfunc
, 0,
3310 plus_constant (XEXP (targetx
, 0), startb
),
3313 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
3319 if (need_to_clear_first
)
3321 clear_storage (target
, expr_size (exp
));
3322 need_to_clear_first
= 0;
3324 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__setbits"),
3325 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
3326 bitlength_rtx
, TYPE_MODE (sizetype
),
3327 startbit_rtx
, TYPE_MODE (sizetype
),
3328 endbit_rtx
, TYPE_MODE (sizetype
));
3331 emit_move_insn (target
, targetx
);
3339 /* Store the value of EXP (an expression tree)
3340 into a subfield of TARGET which has mode MODE and occupies
3341 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3342 If MODE is VOIDmode, it means that we are storing into a bit-field.
3344 If VALUE_MODE is VOIDmode, return nothing in particular.
3345 UNSIGNEDP is not used in this case.
3347 Otherwise, return an rtx for the value stored. This rtx
3348 has mode VALUE_MODE if that is convenient to do.
3349 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3351 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3352 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3355 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
3356 unsignedp
, align
, total_size
)
3358 int bitsize
, bitpos
;
3359 enum machine_mode mode
;
3361 enum machine_mode value_mode
;
3366 HOST_WIDE_INT width_mask
= 0;
3368 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
3369 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
3371 /* If we are storing into an unaligned field of an aligned union that is
3372 in a register, we may have the mode of TARGET being an integer mode but
3373 MODE == BLKmode. In that case, get an aligned object whose size and
3374 alignment are the same as TARGET and store TARGET into it (we can avoid
3375 the store if the field being stored is the entire width of TARGET). Then
3376 call ourselves recursively to store the field into a BLKmode version of
3377 that object. Finally, load from the object into TARGET. This is not
3378 very efficient in general, but should only be slightly more expensive
3379 than the otherwise-required unaligned accesses. Perhaps this can be
3380 cleaned up later. */
3383 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
3385 rtx object
= assign_stack_temp (GET_MODE (target
),
3386 GET_MODE_SIZE (GET_MODE (target
)), 0);
3387 rtx blk_object
= copy_rtx (object
);
3389 MEM_IN_STRUCT_P (object
) = 1;
3390 MEM_IN_STRUCT_P (blk_object
) = 1;
3391 PUT_MODE (blk_object
, BLKmode
);
3393 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
3394 emit_move_insn (object
, target
);
3396 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
3399 /* Even though we aren't returning target, we need to
3400 give it the updated value. */
3401 emit_move_insn (target
, object
);
3406 /* If the structure is in a register or if the component
3407 is a bit field, we cannot use addressing to access it.
3408 Use bit-field techniques or SUBREG to store in it. */
3410 if (mode
== VOIDmode
3411 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
3412 || GET_CODE (target
) == REG
3413 || GET_CODE (target
) == SUBREG
3414 /* If the field isn't aligned enough to store as an ordinary memref,
3415 store it as a bit field. */
3416 || (SLOW_UNALIGNED_ACCESS
3417 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
3418 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
3420 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3422 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3424 if (mode
!= VOIDmode
&& mode
!= BLKmode
3425 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
3426 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
3428 /* Store the value in the bitfield. */
3429 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
3430 if (value_mode
!= VOIDmode
)
3432 /* The caller wants an rtx for the value. */
3433 /* If possible, avoid refetching from the bitfield itself. */
3435 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
3438 enum machine_mode tmode
;
3441 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
3442 tmode
= GET_MODE (temp
);
3443 if (tmode
== VOIDmode
)
3445 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
3446 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
3447 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
3449 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
3450 NULL_RTX
, value_mode
, 0, align
,
3457 rtx addr
= XEXP (target
, 0);
3460 /* If a value is wanted, it must be the lhs;
3461 so make the address stable for multiple use. */
3463 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
3464 && ! CONSTANT_ADDRESS_P (addr
)
3465 /* A frame-pointer reference is already stable. */
3466 && ! (GET_CODE (addr
) == PLUS
3467 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
3468 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
3469 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
3470 addr
= copy_to_reg (addr
);
3472 /* Now build a reference to just the desired component. */
3474 to_rtx
= change_address (target
, mode
,
3475 plus_constant (addr
, (bitpos
/ BITS_PER_UNIT
)));
3476 MEM_IN_STRUCT_P (to_rtx
) = 1;
3478 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
3482 /* Return true if any object containing the innermost array is an unaligned
3483 packed structure field. */
3486 get_inner_unaligned_p (exp
)
3489 int needed_alignment
= TYPE_ALIGN (TREE_TYPE (exp
));
3493 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
3495 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
3499 else if (TREE_CODE (exp
) != ARRAY_REF
3500 && TREE_CODE (exp
) != NON_LVALUE_EXPR
3501 && ! ((TREE_CODE (exp
) == NOP_EXPR
3502 || TREE_CODE (exp
) == CONVERT_EXPR
)
3503 && (TYPE_MODE (TREE_TYPE (exp
))
3504 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
3507 exp
= TREE_OPERAND (exp
, 0);
3513 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3514 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3515 ARRAY_REFs and find the ultimate containing object, which we return.
3517 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3518 bit position, and *PUNSIGNEDP to the signedness of the field.
3519 If the position of the field is variable, we store a tree
3520 giving the variable offset (in units) in *POFFSET.
3521 This offset is in addition to the bit position.
3522 If the position is not variable, we store 0 in *POFFSET.
3524 If any of the extraction expressions is volatile,
3525 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3527 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3528 is a mode that can be used to access the field. In that case, *PBITSIZE
3531 If the field describes a variable-sized object, *PMODE is set to
3532 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3533 this case, but the address of the object can be found. */
3536 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
3537 punsignedp
, pvolatilep
)
3542 enum machine_mode
*pmode
;
3546 tree orig_exp
= exp
;
3548 enum machine_mode mode
= VOIDmode
;
3549 tree offset
= integer_zero_node
;
3551 if (TREE_CODE (exp
) == COMPONENT_REF
)
3553 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
3554 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
3555 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
3556 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
3558 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
3560 size_tree
= TREE_OPERAND (exp
, 1);
3561 *punsignedp
= TREE_UNSIGNED (exp
);
3565 mode
= TYPE_MODE (TREE_TYPE (exp
));
3566 *pbitsize
= GET_MODE_BITSIZE (mode
);
3567 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3572 if (TREE_CODE (size_tree
) != INTEGER_CST
)
3573 mode
= BLKmode
, *pbitsize
= -1;
3575 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
3578 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3579 and find the ultimate containing object. */
3585 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
3587 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
3588 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
3589 : TREE_OPERAND (exp
, 2));
3590 tree constant
= integer_zero_node
, var
= pos
;
3592 /* If this field hasn't been filled in yet, don't go
3593 past it. This should only happen when folding expressions
3594 made during type construction. */
3598 /* Assume here that the offset is a multiple of a unit.
3599 If not, there should be an explicitly added constant. */
3600 if (TREE_CODE (pos
) == PLUS_EXPR
3601 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3602 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
3603 else if (TREE_CODE (pos
) == INTEGER_CST
)
3604 constant
= pos
, var
= integer_zero_node
;
3606 *pbitpos
+= TREE_INT_CST_LOW (constant
);
3609 offset
= size_binop (PLUS_EXPR
, offset
,
3610 size_binop (EXACT_DIV_EXPR
, var
,
3611 size_int (BITS_PER_UNIT
)));
3614 else if (TREE_CODE (exp
) == ARRAY_REF
)
3616 /* This code is based on the code in case ARRAY_REF in expand_expr
3617 below. We assume here that the size of an array element is
3618 always an integral multiple of BITS_PER_UNIT. */
3620 tree index
= TREE_OPERAND (exp
, 1);
3621 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
3623 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
3624 tree index_type
= TREE_TYPE (index
);
3626 if (! integer_zerop (low_bound
))
3627 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
3629 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
3631 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
3633 index_type
= TREE_TYPE (index
);
3636 index
= fold (build (MULT_EXPR
, index_type
, index
,
3637 TYPE_SIZE (TREE_TYPE (exp
))));
3639 if (TREE_CODE (index
) == INTEGER_CST
3640 && TREE_INT_CST_HIGH (index
) == 0)
3641 *pbitpos
+= TREE_INT_CST_LOW (index
);
3643 offset
= size_binop (PLUS_EXPR
, offset
,
3644 size_binop (FLOOR_DIV_EXPR
, index
,
3645 size_int (BITS_PER_UNIT
)));
3647 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
3648 && ! ((TREE_CODE (exp
) == NOP_EXPR
3649 || TREE_CODE (exp
) == CONVERT_EXPR
)
3650 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
3651 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
3653 && (TYPE_MODE (TREE_TYPE (exp
))
3654 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
3657 /* If any reference in the chain is volatile, the effect is volatile. */
3658 if (TREE_THIS_VOLATILE (exp
))
3660 exp
= TREE_OPERAND (exp
, 0);
3663 /* If this was a bit-field, see if there is a mode that allows direct
3664 access in case EXP is in memory. */
3665 if (mode
== VOIDmode
&& *pbitsize
!= 0 && *pbitpos
% *pbitsize
== 0)
3667 mode
= mode_for_size (*pbitsize
, MODE_INT
, 0);
3668 if (mode
== BLKmode
)
3672 if (integer_zerop (offset
))
3675 if (offset
!= 0 && contains_placeholder_p (offset
))
3676 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
3683 /* Given an rtx VALUE that may contain additions and multiplications,
3684 return an equivalent value that just refers to a register or memory.
3685 This is done by generating instructions to perform the arithmetic
3686 and returning a pseudo-register containing the value.
3688 The returned value may be a REG, SUBREG, MEM or constant. */
3691 force_operand (value
, target
)
3694 register optab binoptab
= 0;
3695 /* Use a temporary to force order of execution of calls to
3699 /* Use subtarget as the target for operand 0 of a binary operation. */
3700 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
3702 if (GET_CODE (value
) == PLUS
)
3703 binoptab
= add_optab
;
3704 else if (GET_CODE (value
) == MINUS
)
3705 binoptab
= sub_optab
;
3706 else if (GET_CODE (value
) == MULT
)
3708 op2
= XEXP (value
, 1);
3709 if (!CONSTANT_P (op2
)
3710 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
3712 tmp
= force_operand (XEXP (value
, 0), subtarget
);
3713 return expand_mult (GET_MODE (value
), tmp
,
3714 force_operand (op2
, NULL_RTX
),
3720 op2
= XEXP (value
, 1);
3721 if (!CONSTANT_P (op2
)
3722 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
3724 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
3726 binoptab
= add_optab
;
3727 op2
= negate_rtx (GET_MODE (value
), op2
);
3730 /* Check for an addition with OP2 a constant integer and our first
3731 operand a PLUS of a virtual register and something else. In that
3732 case, we want to emit the sum of the virtual register and the
3733 constant first and then add the other value. This allows virtual
3734 register instantiation to simply modify the constant rather than
3735 creating another one around this addition. */
3736 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
3737 && GET_CODE (XEXP (value
, 0)) == PLUS
3738 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
3739 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3740 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
3742 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
3743 XEXP (XEXP (value
, 0), 0), op2
,
3744 subtarget
, 0, OPTAB_LIB_WIDEN
);
3745 return expand_binop (GET_MODE (value
), binoptab
, temp
,
3746 force_operand (XEXP (XEXP (value
, 0), 1), 0),
3747 target
, 0, OPTAB_LIB_WIDEN
);
3750 tmp
= force_operand (XEXP (value
, 0), subtarget
);
3751 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
3752 force_operand (op2
, NULL_RTX
),
3753 target
, 0, OPTAB_LIB_WIDEN
);
3754 /* We give UNSIGNEDP = 0 to expand_binop
3755 because the only operations we are expanding here are signed ones. */
3760 /* Subroutine of expand_expr:
3761 save the non-copied parts (LIST) of an expr (LHS), and return a list
3762 which can restore these values to their previous values,
3763 should something modify their storage. */
3766 save_noncopied_parts (lhs
, list
)
3773 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
3774 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
3775 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
3778 tree part
= TREE_VALUE (tail
);
3779 tree part_type
= TREE_TYPE (part
);
3780 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
3781 rtx target
= assign_stack_temp (TYPE_MODE (part_type
),
3782 int_size_in_bytes (part_type
), 0);
3783 MEM_IN_STRUCT_P (target
) = AGGREGATE_TYPE_P (part_type
);
3784 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
3785 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
3786 parts
= tree_cons (to_be_saved
,
3787 build (RTL_EXPR
, part_type
, NULL_TREE
,
3790 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
3795 /* Subroutine of expand_expr:
3796 record the non-copied parts (LIST) of an expr (LHS), and return a list
3797 which specifies the initial values of these parts. */
3800 init_noncopied_parts (lhs
, list
)
3807 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
3808 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
3809 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
3812 tree part
= TREE_VALUE (tail
);
3813 tree part_type
= TREE_TYPE (part
);
3814 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
3815 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
3820 /* Subroutine of expand_expr: return nonzero iff there is no way that
3821 EXP can reference X, which is being modified. */
3824 safe_from_p (x
, exp
)
3832 /* If EXP has varying size, we MUST use a target since we currently
3833 have no way of allocating temporaries of variable size. So we
3834 assume here that something at a higher level has prevented a
3835 clash. This is somewhat bogus, but the best we can do. Only
3836 do this when X is BLKmode. */
3837 || (TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
3838 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
3839 && GET_MODE (x
) == BLKmode
))
3842 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3843 find the underlying pseudo. */
3844 if (GET_CODE (x
) == SUBREG
)
3847 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3851 /* If X is a location in the outgoing argument area, it is always safe. */
3852 if (GET_CODE (x
) == MEM
3853 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
3854 || (GET_CODE (XEXP (x
, 0)) == PLUS
3855 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
3858 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
3861 exp_rtl
= DECL_RTL (exp
);
3868 if (TREE_CODE (exp
) == TREE_LIST
)
3869 return ((TREE_VALUE (exp
) == 0
3870 || safe_from_p (x
, TREE_VALUE (exp
)))
3871 && (TREE_CHAIN (exp
) == 0
3872 || safe_from_p (x
, TREE_CHAIN (exp
))));
3877 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
3881 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
3882 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
3886 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3887 the expression. If it is set, we conflict iff we are that rtx or
3888 both are in memory. Otherwise, we check all operands of the
3889 expression recursively. */
3891 switch (TREE_CODE (exp
))
3894 return (staticp (TREE_OPERAND (exp
, 0))
3895 || safe_from_p (x
, TREE_OPERAND (exp
, 0)));
3898 if (GET_CODE (x
) == MEM
)
3903 exp_rtl
= CALL_EXPR_RTL (exp
);
3906 /* Assume that the call will clobber all hard registers and
3908 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3909 || GET_CODE (x
) == MEM
)
3916 /* If a sequence exists, we would have to scan every instruction
3917 in the sequence to see if it was safe. This is probably not
3919 if (RTL_EXPR_SEQUENCE (exp
))
3922 exp_rtl
= RTL_EXPR_RTL (exp
);
3925 case WITH_CLEANUP_EXPR
:
3926 exp_rtl
= RTL_EXPR_RTL (exp
);
3929 case CLEANUP_POINT_EXPR
:
3930 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
3933 exp_rtl
= SAVE_EXPR_RTL (exp
);
3937 /* The only operand we look at is operand 1. The rest aren't
3938 part of the expression. */
3939 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
3941 case METHOD_CALL_EXPR
:
3942 /* This takes a rtx argument, but shouldn't appear here. */
3946 /* If we have an rtx, we do not need to scan our operands. */
3950 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
3951 for (i
= 0; i
< nops
; i
++)
3952 if (TREE_OPERAND (exp
, i
) != 0
3953 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
3957 /* If we have an rtl, find any enclosed object. Then see if we conflict
3961 if (GET_CODE (exp_rtl
) == SUBREG
)
3963 exp_rtl
= SUBREG_REG (exp_rtl
);
3964 if (GET_CODE (exp_rtl
) == REG
3965 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
3969 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3970 are memory and EXP is not readonly. */
3971 return ! (rtx_equal_p (x
, exp_rtl
)
3972 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
3973 && ! TREE_READONLY (exp
)));
3976 /* If we reach here, it is safe. */
3980 /* Subroutine of expand_expr: return nonzero iff EXP is an
3981 expression whose type is statically determinable. */
3987 if (TREE_CODE (exp
) == PARM_DECL
3988 || TREE_CODE (exp
) == VAR_DECL
3989 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
3990 || TREE_CODE (exp
) == COMPONENT_REF
3991 || TREE_CODE (exp
) == ARRAY_REF
)
3996 /* expand_expr: generate code for computing expression EXP.
3997 An rtx for the computed value is returned. The value is never null.
3998 In the case of a void EXP, const0_rtx is returned.
4000 The value may be stored in TARGET if TARGET is nonzero.
4001 TARGET is just a suggestion; callers must assume that
4002 the rtx returned may not be the same as TARGET.
4004 If TARGET is CONST0_RTX, it means that the value will be ignored.
4006 If TMODE is not VOIDmode, it suggests generating the
4007 result in mode TMODE. But this is done only when convenient.
4008 Otherwise, TMODE is ignored and the value generated in its natural mode.
4009 TMODE is just a suggestion; callers must assume that
4010 the rtx returned may not have mode TMODE.
4012 Note that TARGET may have neither TMODE nor MODE. In that case, it
4013 probably will not be used.
4015 If MODIFIER is EXPAND_SUM then when EXP is an addition
4016 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4017 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4018 products as above, or REG or MEM, or constant.
4019 Ordinarily in such cases we would output mul or add instructions
4020 and then return a pseudo reg containing the sum.
4022 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4023 it also marks a label as absolutely required (it can't be dead).
4024 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4025 This is used for outputting expressions used in initializers.
4027 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4028 with a constant address even if that address is not normally legitimate.
4029 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4032 expand_expr (exp
, target
, tmode
, modifier
)
4035 enum machine_mode tmode
;
4036 enum expand_modifier modifier
;
4038 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4039 This is static so it will be accessible to our recursive callees. */
4040 static tree placeholder_list
= 0;
4041 register rtx op0
, op1
, temp
;
4042 tree type
= TREE_TYPE (exp
);
4043 int unsignedp
= TREE_UNSIGNED (type
);
4044 register enum machine_mode mode
= TYPE_MODE (type
);
4045 register enum tree_code code
= TREE_CODE (exp
);
4047 /* Use subtarget as the target for operand 0 of a binary operation. */
4048 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4049 rtx original_target
= target
;
4050 /* Maybe defer this until sure not doing bytecode? */
4051 int ignore
= (target
== const0_rtx
4052 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4053 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4054 || code
== COND_EXPR
)
4055 && TREE_CODE (type
) == VOID_TYPE
));
4059 if (output_bytecode
&& modifier
!= EXPAND_INITIALIZER
)
4061 bc_expand_expr (exp
);
4065 /* Don't use hard regs as subtargets, because the combiner
4066 can only handle pseudo regs. */
4067 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4069 /* Avoid subtargets inside loops,
4070 since they hide some invariant expressions. */
4071 if (preserve_subexpressions_p ())
4074 /* If we are going to ignore this result, we need only do something
4075 if there is a side-effect somewhere in the expression. If there
4076 is, short-circuit the most common cases here. Note that we must
4077 not call expand_expr with anything but const0_rtx in case this
4078 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4082 if (! TREE_SIDE_EFFECTS (exp
))
4085 /* Ensure we reference a volatile object even if value is ignored. */
4086 if (TREE_THIS_VOLATILE (exp
)
4087 && TREE_CODE (exp
) != FUNCTION_DECL
4088 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4090 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
4091 if (GET_CODE (temp
) == MEM
)
4092 temp
= copy_to_reg (temp
);
4096 if (TREE_CODE_CLASS (code
) == '1')
4097 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4098 VOIDmode
, modifier
);
4099 else if (TREE_CODE_CLASS (code
) == '2'
4100 || TREE_CODE_CLASS (code
) == '<')
4102 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
4103 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
4106 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4107 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4108 /* If the second operand has no side effects, just evaluate
4110 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4111 VOIDmode
, modifier
);
4116 /* If will do cse, generate all results into pseudo registers
4117 since 1) that allows cse to find more things
4118 and 2) otherwise cse could produce an insn the machine
4121 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4122 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4129 tree function
= decl_function_context (exp
);
4130 /* Handle using a label in a containing function. */
4131 if (function
!= current_function_decl
&& function
!= 0)
4133 struct function
*p
= find_function_data (function
);
4134 /* Allocate in the memory associated with the function
4135 that the label is in. */
4136 push_obstacks (p
->function_obstack
,
4137 p
->function_maybepermanent_obstack
);
4139 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4140 label_rtx (exp
), p
->forced_labels
);
4143 else if (modifier
== EXPAND_INITIALIZER
)
4144 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4145 label_rtx (exp
), forced_labels
);
4146 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
4147 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
4148 if (function
!= current_function_decl
&& function
!= 0)
4149 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
4154 if (DECL_RTL (exp
) == 0)
4156 error_with_decl (exp
, "prior parameter's size depends on `%s'");
4157 return CONST0_RTX (mode
);
4160 /* ... fall through ... */
4163 /* If a static var's type was incomplete when the decl was written,
4164 but the type is complete now, lay out the decl now. */
4165 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4166 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
4168 push_obstacks_nochange ();
4169 end_temporary_allocation ();
4170 layout_decl (exp
, 0);
4171 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
4175 /* ... fall through ... */
4179 if (DECL_RTL (exp
) == 0)
4182 /* Ensure variable marked as used even if it doesn't go through
4183 a parser. If it hasn't be used yet, write out an external
4185 if (! TREE_USED (exp
))
4187 assemble_external (exp
);
4188 TREE_USED (exp
) = 1;
4191 /* Handle variables inherited from containing functions. */
4192 context
= decl_function_context (exp
);
4194 /* We treat inline_function_decl as an alias for the current function
4195 because that is the inline function whose vars, types, etc.
4196 are being merged into the current function.
4197 See expand_inline_function. */
4199 if (context
!= 0 && context
!= current_function_decl
4200 && context
!= inline_function_decl
4201 /* If var is static, we don't need a static chain to access it. */
4202 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
4203 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
4207 /* Mark as non-local and addressable. */
4208 DECL_NONLOCAL (exp
) = 1;
4209 mark_addressable (exp
);
4210 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
4212 addr
= XEXP (DECL_RTL (exp
), 0);
4213 if (GET_CODE (addr
) == MEM
)
4214 addr
= gen_rtx (MEM
, Pmode
,
4215 fix_lexical_addr (XEXP (addr
, 0), exp
));
4217 addr
= fix_lexical_addr (addr
, exp
);
4218 return change_address (DECL_RTL (exp
), mode
, addr
);
4221 /* This is the case of an array whose size is to be determined
4222 from its initializer, while the initializer is still being parsed.
4225 if (GET_CODE (DECL_RTL (exp
)) == MEM
4226 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
4227 return change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
4228 XEXP (DECL_RTL (exp
), 0));
4230 /* If DECL_RTL is memory, we are in the normal case and either
4231 the address is not valid or it is not a register and -fforce-addr
4232 is specified, get the address into a register. */
4234 if (GET_CODE (DECL_RTL (exp
)) == MEM
4235 && modifier
!= EXPAND_CONST_ADDRESS
4236 && modifier
!= EXPAND_SUM
4237 && modifier
!= EXPAND_INITIALIZER
4238 && (! memory_address_p (DECL_MODE (exp
), XEXP (DECL_RTL (exp
), 0))
4240 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
4241 return change_address (DECL_RTL (exp
), VOIDmode
,
4242 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
4244 /* If the mode of DECL_RTL does not match that of the decl, it
4245 must be a promoted value. We return a SUBREG of the wanted mode,
4246 but mark it so that we know that it was already extended. */
4248 if (GET_CODE (DECL_RTL (exp
)) == REG
4249 && GET_MODE (DECL_RTL (exp
)) != mode
)
4251 /* Get the signedness used for this variable. Ensure we get the
4252 same mode we got when the variable was declared. */
4253 if (GET_MODE (DECL_RTL (exp
))
4254 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
4257 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
4258 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4259 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4263 return DECL_RTL (exp
);
4266 return immed_double_const (TREE_INT_CST_LOW (exp
),
4267 TREE_INT_CST_HIGH (exp
),
4271 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
4274 /* If optimized, generate immediate CONST_DOUBLE
4275 which will be turned into memory by reload if necessary.
4277 We used to force a register so that loop.c could see it. But
4278 this does not allow gen_* patterns to perform optimizations with
4279 the constants. It also produces two insns in cases like "x = 1.0;".
4280 On most machines, floating-point constants are not permitted in
4281 many insns, so we'd end up copying it to a register in any case.
4283 Now, we do the copying in expand_binop, if appropriate. */
4284 return immed_real_const (exp
);
4288 if (! TREE_CST_RTL (exp
))
4289 output_constant_def (exp
);
4291 /* TREE_CST_RTL probably contains a constant address.
4292 On RISC machines where a constant address isn't valid,
4293 make some insns to get that address into a register. */
4294 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
4295 && modifier
!= EXPAND_CONST_ADDRESS
4296 && modifier
!= EXPAND_INITIALIZER
4297 && modifier
!= EXPAND_SUM
4298 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
4300 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
4301 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
4302 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
4303 return TREE_CST_RTL (exp
);
4306 context
= decl_function_context (exp
);
4308 /* We treat inline_function_decl as an alias for the current function
4309 because that is the inline function whose vars, types, etc.
4310 are being merged into the current function.
4311 See expand_inline_function. */
4312 if (context
== current_function_decl
|| context
== inline_function_decl
)
4315 /* If this is non-local, handle it. */
4318 temp
= SAVE_EXPR_RTL (exp
);
4319 if (temp
&& GET_CODE (temp
) == REG
)
4321 put_var_into_stack (exp
);
4322 temp
= SAVE_EXPR_RTL (exp
);
4324 if (temp
== 0 || GET_CODE (temp
) != MEM
)
4326 return change_address (temp
, mode
,
4327 fix_lexical_addr (XEXP (temp
, 0), exp
));
4329 if (SAVE_EXPR_RTL (exp
) == 0)
4331 if (mode
== BLKmode
)
4334 = assign_stack_temp (mode
, int_size_in_bytes (type
), 0);
4335 MEM_IN_STRUCT_P (temp
) = AGGREGATE_TYPE_P (type
);
4337 else if (mode
== VOIDmode
)
4340 temp
= gen_reg_rtx (promote_mode (type
, mode
, &unsignedp
, 0));
4342 SAVE_EXPR_RTL (exp
) = temp
;
4343 if (!optimize
&& GET_CODE (temp
) == REG
)
4344 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
4347 /* If the mode of TEMP does not match that of the expression, it
4348 must be a promoted value. We pass store_expr a SUBREG of the
4349 wanted mode but mark it so that we know that it was already
4350 extended. Note that `unsignedp' was modified above in
4353 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
4355 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
4356 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4357 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4360 if (temp
== const0_rtx
)
4361 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
4363 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
4366 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4367 must be a promoted value. We return a SUBREG of the wanted mode,
4368 but mark it so that we know that it was already extended. */
4370 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
4371 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
4373 /* Compute the signedness and make the proper SUBREG. */
4374 promote_mode (type
, mode
, &unsignedp
, 0);
4375 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
4376 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4377 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4381 return SAVE_EXPR_RTL (exp
);
4383 case PLACEHOLDER_EXPR
:
4384 /* If there is an object on the head of the placeholder list,
4385 see if some object in it's references is of type TYPE. For
4386 further information, see tree.def. */
4387 if (placeholder_list
)
4390 tree old_list
= placeholder_list
;
4392 for (object
= TREE_PURPOSE (placeholder_list
);
4393 (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
4394 != TYPE_MAIN_VARIANT (type
))
4395 && (TREE_CODE_CLASS (TREE_CODE (object
)) == 'r'
4396 || TREE_CODE_CLASS (TREE_CODE (object
)) == '1'
4397 || TREE_CODE_CLASS (TREE_CODE (object
)) == '2'
4398 || TREE_CODE_CLASS (TREE_CODE (object
)) == 'e');
4399 object
= TREE_OPERAND (object
, 0))
4403 && (TYPE_MAIN_VARIANT (TREE_TYPE (object
))
4404 == TYPE_MAIN_VARIANT (type
)))
4406 /* Expand this object skipping the list entries before
4407 it was found in case it is also a PLACEHOLDER_EXPR.
4408 In that case, we want to translate it using subsequent
4410 placeholder_list
= TREE_CHAIN (placeholder_list
);
4411 temp
= expand_expr (object
, original_target
, tmode
, modifier
);
4412 placeholder_list
= old_list
;
4417 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4420 case WITH_RECORD_EXPR
:
4421 /* Put the object on the placeholder list, expand our first operand,
4422 and pop the list. */
4423 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
4425 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
4427 placeholder_list
= TREE_CHAIN (placeholder_list
);
4431 expand_exit_loop_if_false (NULL_PTR
,
4432 invert_truthvalue (TREE_OPERAND (exp
, 0)));
4437 expand_start_loop (1);
4438 expand_expr_stmt (TREE_OPERAND (exp
, 0));
4446 tree vars
= TREE_OPERAND (exp
, 0);
4447 int vars_need_expansion
= 0;
4449 /* Need to open a binding contour here because
4450 if there are any cleanups they most be contained here. */
4451 expand_start_bindings (0);
4453 /* Mark the corresponding BLOCK for output in its proper place. */
4454 if (TREE_OPERAND (exp
, 2) != 0
4455 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
4456 insert_block (TREE_OPERAND (exp
, 2));
4458 /* If VARS have not yet been expanded, expand them now. */
4461 if (DECL_RTL (vars
) == 0)
4463 vars_need_expansion
= 1;
4466 expand_decl_init (vars
);
4467 vars
= TREE_CHAIN (vars
);
4470 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
4472 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
4478 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
4480 emit_insns (RTL_EXPR_SEQUENCE (exp
));
4481 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
4482 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
4483 free_temps_for_rtl_expr (exp
);
4484 return RTL_EXPR_RTL (exp
);
4487 /* If we don't need the result, just ensure we evaluate any
4492 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4493 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
4497 /* All elts simple constants => refer to a constant in memory. But
4498 if this is a non-BLKmode mode, let it store a field at a time
4499 since that should make a CONST_INT or CONST_DOUBLE when we
4500 fold. Likewise, if we have a target we can use, it is best to
4501 store directly into the target unless the type is large enough
4502 that memcpy will be used. If we are making an initializer and
4503 all operands are constant, put it in memory as well. */
4504 else if ((TREE_STATIC (exp
)
4505 && ((mode
== BLKmode
4506 && ! (target
!= 0 && safe_from_p (target
, exp
)))
4507 || TREE_ADDRESSABLE (exp
)
4508 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
4509 && (move_by_pieces_ninsns
4510 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
4511 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
4513 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
4515 rtx constructor
= output_constant_def (exp
);
4516 if (modifier
!= EXPAND_CONST_ADDRESS
4517 && modifier
!= EXPAND_INITIALIZER
4518 && modifier
!= EXPAND_SUM
4519 && (! memory_address_p (GET_MODE (constructor
),
4520 XEXP (constructor
, 0))
4522 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
4523 constructor
= change_address (constructor
, VOIDmode
,
4524 XEXP (constructor
, 0));
4530 if (target
== 0 || ! safe_from_p (target
, exp
))
4532 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
4533 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
4537 = assign_stack_temp (mode
, int_size_in_bytes (type
), 0);
4538 if (AGGREGATE_TYPE_P (type
))
4539 MEM_IN_STRUCT_P (target
) = 1;
4543 if (TREE_READONLY (exp
))
4545 target
= copy_rtx (target
);
4546 RTX_UNCHANGING_P (target
) = 1;
4549 store_constructor (exp
, target
);
4555 tree exp1
= TREE_OPERAND (exp
, 0);
4558 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4559 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4560 This code has the same general effect as simply doing
4561 expand_expr on the save expr, except that the expression PTR
4562 is computed for use as a memory address. This means different
4563 code, suitable for indexing, may be generated. */
4564 if (TREE_CODE (exp1
) == SAVE_EXPR
4565 && SAVE_EXPR_RTL (exp1
) == 0
4566 && TYPE_MODE (TREE_TYPE (exp1
)) == ptr_mode
)
4568 temp
= expand_expr (TREE_OPERAND (exp1
, 0), NULL_RTX
,
4569 VOIDmode
, EXPAND_SUM
);
4570 op0
= memory_address (mode
, temp
);
4571 op0
= copy_all_regs (op0
);
4572 SAVE_EXPR_RTL (exp1
) = op0
;
4576 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4577 op0
= memory_address (mode
, op0
);
4580 temp
= gen_rtx (MEM
, mode
, op0
);
4581 /* If address was computed by addition,
4582 mark this as an element of an aggregate. */
4583 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
4584 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
4585 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
4586 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
4587 || (TREE_CODE (exp1
) == ADDR_EXPR
4588 && (exp2
= TREE_OPERAND (exp1
, 0))
4589 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
4590 MEM_IN_STRUCT_P (temp
) = 1;
4591 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
4593 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
4594 here, because, in C and C++, the fact that a location is accessed
4595 through a pointer to const does not mean that the value there can
4596 never change. Languages where it can never change should
4597 also set TREE_STATIC. */
4598 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
4603 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
4607 tree array
= TREE_OPERAND (exp
, 0);
4608 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
4609 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4610 tree index
= TREE_OPERAND (exp
, 1);
4611 tree index_type
= TREE_TYPE (index
);
4614 if (TREE_CODE (low_bound
) != INTEGER_CST
4615 && contains_placeholder_p (low_bound
))
4616 low_bound
= build (WITH_RECORD_EXPR
, sizetype
, low_bound
, exp
);
4618 /* Optimize the special-case of a zero lower bound.
4620 We convert the low_bound to sizetype to avoid some problems
4621 with constant folding. (E.g. suppose the lower bound is 1,
4622 and its mode is QI. Without the conversion, (ARRAY
4623 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4624 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4626 But sizetype isn't quite right either (especially if
4627 the lowbound is negative). FIXME */
4629 if (! integer_zerop (low_bound
))
4630 index
= fold (build (MINUS_EXPR
, index_type
, index
,
4631 convert (sizetype
, low_bound
)));
4633 if ((TREE_CODE (index
) != INTEGER_CST
4634 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4635 && (! SLOW_UNALIGNED_ACCESS
|| ! get_inner_unaligned_p (exp
)))
4637 /* Nonconstant array index or nonconstant element size, and
4638 not an array in an unaligned (packed) structure field.
4639 Generate the tree for *(&array+index) and expand that,
4640 except do it in a language-independent way
4641 and don't complain about non-lvalue arrays.
4642 `mark_addressable' should already have been called
4643 for any array for which this case will be reached. */
4645 /* Don't forget the const or volatile flag from the array
4647 tree variant_type
= build_type_variant (type
,
4648 TREE_READONLY (exp
),
4649 TREE_THIS_VOLATILE (exp
));
4650 tree array_adr
= build1 (ADDR_EXPR
,
4651 build_pointer_type (variant_type
), array
);
4653 tree size
= size_in_bytes (type
);
4655 /* Convert the integer argument to a type the same size as sizetype
4656 so the multiply won't overflow spuriously. */
4657 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4658 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4661 if (TREE_CODE (size
) != INTEGER_CST
4662 && contains_placeholder_p (size
))
4663 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, exp
);
4665 /* Don't think the address has side effects
4666 just because the array does.
4667 (In some cases the address might have side effects,
4668 and we fail to record that fact here. However, it should not
4669 matter, since expand_expr should not care.) */
4670 TREE_SIDE_EFFECTS (array_adr
) = 0;
4674 (INDIRECT_REF
, type
,
4675 fold (build (PLUS_EXPR
,
4676 TYPE_POINTER_TO (variant_type
),
4681 TYPE_POINTER_TO (variant_type
),
4682 fold (build (MULT_EXPR
, TREE_TYPE (index
),
4684 convert (TREE_TYPE (index
),
4687 /* Volatility, etc., of new expression is same as old
4689 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
4690 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
4691 TREE_READONLY (elt
) = TREE_READONLY (exp
);
4693 return expand_expr (elt
, target
, tmode
, modifier
);
4696 /* Fold an expression like: "foo"[2].
4697 This is not done in fold so it won't happen inside &.
4698 Don't fold if this is for wide characters since it's too
4699 difficult to do correctly and this is a very rare case. */
4701 if (TREE_CODE (array
) == STRING_CST
4702 && TREE_CODE (index
) == INTEGER_CST
4703 && !TREE_INT_CST_HIGH (index
)
4704 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
4705 && GET_MODE_CLASS (mode
) == MODE_INT
4706 && GET_MODE_SIZE (mode
) == 1)
4707 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
4709 /* If this is a constant index into a constant array,
4710 just get the value from the array. Handle both the cases when
4711 we have an explicit constructor and when our operand is a variable
4712 that was declared const. */
4714 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
4716 if (TREE_CODE (index
) == INTEGER_CST
4717 && TREE_INT_CST_HIGH (index
) == 0)
4719 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
4721 i
= TREE_INT_CST_LOW (index
);
4723 elem
= TREE_CHAIN (elem
);
4725 return expand_expr (fold (TREE_VALUE (elem
)), target
,
4730 else if (optimize
>= 1
4731 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
4732 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
4733 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
4735 if (TREE_CODE (index
) == INTEGER_CST
4736 && TREE_INT_CST_HIGH (index
) == 0)
4738 tree init
= DECL_INITIAL (array
);
4740 i
= TREE_INT_CST_LOW (index
);
4741 if (TREE_CODE (init
) == CONSTRUCTOR
)
4743 tree elem
= CONSTRUCTOR_ELTS (init
);
4746 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
4747 elem
= TREE_CHAIN (elem
);
4749 return expand_expr (fold (TREE_VALUE (elem
)), target
,
4752 else if (TREE_CODE (init
) == STRING_CST
4753 && i
< TREE_STRING_LENGTH (init
))
4754 return GEN_INT (TREE_STRING_POINTER (init
)[i
]);
4759 /* Treat array-ref with constant index as a component-ref. */
4763 /* If the operand is a CONSTRUCTOR, we can just extract the
4764 appropriate field if it is present. Don't do this if we have
4765 already written the data since we want to refer to that copy
4766 and varasm.c assumes that's what we'll do. */
4767 if (code
!= ARRAY_REF
4768 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
4769 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
4773 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
4774 elt
= TREE_CHAIN (elt
))
4775 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
4776 return expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
4780 enum machine_mode mode1
;
4785 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
4786 &mode1
, &unsignedp
, &volatilep
);
4789 /* If we got back the original object, something is wrong. Perhaps
4790 we are evaluating an expression too early. In any event, don't
4791 infinitely recurse. */
4795 /* In some cases, we will be offsetting OP0's address by a constant.
4796 So get it as a sum, if possible. If we will be using it
4797 directly in an insn, we validate it.
4799 If TEM's type is a union of variable size, pass TARGET to the inner
4800 computation, since it will need a temporary and TARGET is known
4801 to have to do. This occurs in unchecked conversion in Ada. */
4803 op0
= expand_expr (tem
,
4804 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
4805 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
4807 ? target
: NULL_RTX
),
4808 VOIDmode
, EXPAND_SUM
);
4810 /* If this is a constant, put it into a register if it is a
4811 legitimate constant and memory if it isn't. */
4812 if (CONSTANT_P (op0
))
4814 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
4815 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
4816 op0
= force_reg (mode
, op0
);
4818 op0
= validize_mem (force_const_mem (mode
, op0
));
4821 alignment
= TYPE_ALIGN (TREE_TYPE (tem
)) / BITS_PER_UNIT
;
4824 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4826 if (GET_CODE (op0
) != MEM
)
4828 op0
= change_address (op0
, VOIDmode
,
4829 gen_rtx (PLUS
, ptr_mode
, XEXP (op0
, 0),
4830 force_reg (ptr_mode
, offset_rtx
)));
4831 /* If we have a variable offset, the known alignment
4832 is only that of the innermost structure containing the field.
4833 (Actually, we could sometimes do better by using the
4834 size of an element of the innermost array, but no need.) */
4835 if (TREE_CODE (exp
) == COMPONENT_REF
4836 || TREE_CODE (exp
) == BIT_FIELD_REF
)
4837 alignment
= (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4841 /* Don't forget about volatility even if this is a bitfield. */
4842 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
4844 op0
= copy_rtx (op0
);
4845 MEM_VOLATILE_P (op0
) = 1;
4848 /* In cases where an aligned union has an unaligned object
4849 as a field, we might be extracting a BLKmode value from
4850 an integer-mode (e.g., SImode) object. Handle this case
4851 by doing the extract into an object as wide as the field
4852 (which we know to be the width of a basic mode), then
4853 storing into memory, and changing the mode to BLKmode. */
4854 if (mode1
== VOIDmode
4855 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
4856 || (modifier
!= EXPAND_CONST_ADDRESS
4857 && modifier
!= EXPAND_SUM
4858 && modifier
!= EXPAND_INITIALIZER
4859 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
])
4860 /* If the field isn't aligned enough to fetch as a memref,
4861 fetch it as a bit field. */
4862 || (SLOW_UNALIGNED_ACCESS
4863 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
4864 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
4866 enum machine_mode ext_mode
= mode
;
4868 if (ext_mode
== BLKmode
)
4869 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
4871 if (ext_mode
== BLKmode
)
4874 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
4875 unsignedp
, target
, ext_mode
, ext_mode
,
4877 int_size_in_bytes (TREE_TYPE (tem
)));
4878 if (mode
== BLKmode
)
4880 rtx
new = assign_stack_temp (ext_mode
,
4881 bitsize
/ BITS_PER_UNIT
, 0);
4883 emit_move_insn (new, op0
);
4884 op0
= copy_rtx (new);
4885 PUT_MODE (op0
, BLKmode
);
4886 MEM_IN_STRUCT_P (op0
) = 1;
4892 /* If the result is BLKmode, use that to access the object
4894 if (mode
== BLKmode
)
4897 /* Get a reference to just this component. */
4898 if (modifier
== EXPAND_CONST_ADDRESS
4899 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
4900 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
4901 (bitpos
/ BITS_PER_UNIT
)));
4903 op0
= change_address (op0
, mode1
,
4904 plus_constant (XEXP (op0
, 0),
4905 (bitpos
/ BITS_PER_UNIT
)));
4906 MEM_IN_STRUCT_P (op0
) = 1;
4907 MEM_VOLATILE_P (op0
) |= volatilep
;
4908 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
)
4911 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
4912 convert_move (target
, op0
, unsignedp
);
4918 tree base
= build1 (ADDR_EXPR
, type
, TREE_OPERAND (exp
, 0));
4919 tree addr
= build (PLUS_EXPR
, type
, base
, TREE_OPERAND (exp
, 1));
4920 op0
= expand_expr (addr
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4921 temp
= gen_rtx (MEM
, mode
, memory_address (mode
, op0
));
4922 MEM_IN_STRUCT_P (temp
) = 1;
4923 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
);
4924 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4925 a location is accessed through a pointer to const does not mean
4926 that the value there can never change. */
4927 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
);
4932 /* Intended for a reference to a buffer of a file-object in Pascal.
4933 But it's not certain that a special tree code will really be
4934 necessary for these. INDIRECT_REF might work for them. */
4940 /* Pascal set IN expression.
4943 rlo = set_low - (set_low%bits_per_word);
4944 the_word = set [ (index - rlo)/bits_per_word ];
4945 bit_index = index % bits_per_word;
4946 bitmask = 1 << bit_index;
4947 return !!(the_word & bitmask); */
4949 tree set
= TREE_OPERAND (exp
, 0);
4950 tree index
= TREE_OPERAND (exp
, 1);
4951 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
4952 tree set_type
= TREE_TYPE (set
);
4953 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
4954 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
4955 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
4956 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
4957 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
4958 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
4959 rtx setaddr
= XEXP (setval
, 0);
4960 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
4962 rtx diff
, quo
, rem
, addr
, bit
, result
;
4964 preexpand_calls (exp
);
4966 /* If domain is empty, answer is no. Likewise if index is constant
4967 and out of bounds. */
4968 if ((TREE_CODE (set_high_bound
) == INTEGER_CST
4969 && TREE_CODE (set_low_bound
) == INTEGER_CST
4970 && tree_int_cst_lt (set_high_bound
, set_low_bound
)
4971 || (TREE_CODE (index
) == INTEGER_CST
4972 && TREE_CODE (set_low_bound
) == INTEGER_CST
4973 && tree_int_cst_lt (index
, set_low_bound
))
4974 || (TREE_CODE (set_high_bound
) == INTEGER_CST
4975 && TREE_CODE (index
) == INTEGER_CST
4976 && tree_int_cst_lt (set_high_bound
, index
))))
4980 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
4982 /* If we get here, we have to generate the code for both cases
4983 (in range and out of range). */
4985 op0
= gen_label_rtx ();
4986 op1
= gen_label_rtx ();
4988 if (! (GET_CODE (index_val
) == CONST_INT
4989 && GET_CODE (lo_r
) == CONST_INT
))
4991 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
4992 GET_MODE (index_val
), iunsignedp
, 0);
4993 emit_jump_insn (gen_blt (op1
));
4996 if (! (GET_CODE (index_val
) == CONST_INT
4997 && GET_CODE (hi_r
) == CONST_INT
))
4999 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5000 GET_MODE (index_val
), iunsignedp
, 0);
5001 emit_jump_insn (gen_bgt (op1
));
5004 /* Calculate the element number of bit zero in the first word
5006 if (GET_CODE (lo_r
) == CONST_INT
)
5007 rlow
= GEN_INT (INTVAL (lo_r
)
5008 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5010 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5011 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5012 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5014 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5015 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5017 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5018 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5019 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5020 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5022 addr
= memory_address (byte_mode
,
5023 expand_binop (index_mode
, add_optab
, diff
,
5024 setaddr
, NULL_RTX
, iunsignedp
,
5027 /* Extract the bit we want to examine */
5028 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5029 gen_rtx (MEM
, byte_mode
, addr
),
5030 make_tree (TREE_TYPE (index
), rem
),
5032 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
5033 GET_MODE (target
) == byte_mode
? target
: 0,
5034 1, OPTAB_LIB_WIDEN
);
5036 if (result
!= target
)
5037 convert_move (target
, result
, 1);
5039 /* Output the code to handle the out-of-range case. */
5042 emit_move_insn (target
, const0_rtx
);
5047 case WITH_CLEANUP_EXPR
:
5048 if (RTL_EXPR_RTL (exp
) == 0)
5051 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5053 = tree_cons (NULL_TREE
, TREE_OPERAND (exp
, 2), cleanups_this_call
);
5054 /* That's it for this cleanup. */
5055 TREE_OPERAND (exp
, 2) = 0;
5056 (*interim_eh_hook
) (NULL_TREE
);
5058 return RTL_EXPR_RTL (exp
);
5060 case CLEANUP_POINT_EXPR
:
5062 extern int temp_slot_level
;
5063 tree old_cleanups
= cleanups_this_call
;
5064 int old_temp_level
= target_temp_slot_level
;
5066 target_temp_slot_level
= temp_slot_level
;
5067 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5068 /* If we're going to use this value, load it up now. */
5070 op0
= force_not_mem (op0
);
5071 expand_cleanups_to (old_cleanups
);
5072 preserve_temp_slots (op0
);
5075 target_temp_slot_level
= old_temp_level
;
5080 /* Check for a built-in function. */
5081 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5082 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5084 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5085 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
5087 /* If this call was expanded already by preexpand_calls,
5088 just return the result we got. */
5089 if (CALL_EXPR_RTL (exp
) != 0)
5090 return CALL_EXPR_RTL (exp
);
5092 return expand_call (exp
, target
, ignore
);
5094 case NON_LVALUE_EXPR
:
5097 case REFERENCE_EXPR
:
5098 if (TREE_CODE (type
) == UNION_TYPE
)
5100 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5103 if (mode
== BLKmode
)
5105 if (TYPE_SIZE (type
) == 0
5106 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5108 target
= assign_stack_temp (BLKmode
,
5109 (TREE_INT_CST_LOW (TYPE_SIZE (type
))
5110 + BITS_PER_UNIT
- 1)
5111 / BITS_PER_UNIT
, 0);
5112 MEM_IN_STRUCT_P (target
) = AGGREGATE_TYPE_P (type
);
5115 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5118 if (GET_CODE (target
) == MEM
)
5119 /* Store data into beginning of memory target. */
5120 store_expr (TREE_OPERAND (exp
, 0),
5121 change_address (target
, TYPE_MODE (valtype
), 0), 0);
5123 else if (GET_CODE (target
) == REG
)
5124 /* Store this field into a union of the proper type. */
5125 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
5126 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
5128 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5132 /* Return the entire union. */
5136 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5138 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
5141 /* If the signedness of the conversion differs and OP0 is
5142 a promoted SUBREG, clear that indication since we now
5143 have to do the proper extension. */
5144 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
5145 && GET_CODE (op0
) == SUBREG
)
5146 SUBREG_PROMOTED_VAR_P (op0
) = 0;
5151 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
5152 if (GET_MODE (op0
) == mode
)
5155 /* If OP0 is a constant, just convert it into the proper mode. */
5156 if (CONSTANT_P (op0
))
5158 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5159 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5161 if (modifier
== EXPAND_INITIALIZER
)
5162 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
5164 if (flag_force_mem
&& GET_CODE (op0
) == MEM
)
5165 op0
= copy_to_reg (op0
);
5169 convert_to_mode (mode
, op0
,
5170 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5172 convert_move (target
, op0
,
5173 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5177 /* We come here from MINUS_EXPR when the second operand is a constant. */
5179 this_optab
= add_optab
;
5181 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5182 something else, make sure we add the register to the constant and
5183 then to the other thing. This case can occur during strength
5184 reduction and doing it this way will produce better code if the
5185 frame pointer or argument pointer is eliminated.
5187 fold-const.c will ensure that the constant is always in the inner
5188 PLUS_EXPR, so the only case we need to do anything about is if
5189 sp, ap, or fp is our second argument, in which case we must swap
5190 the innermost first argument and our second argument. */
5192 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5193 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
5194 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
5195 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
5196 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
5197 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
5199 tree t
= TREE_OPERAND (exp
, 1);
5201 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5202 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
5205 /* If the result is to be ptr_mode and we are adding an integer to
5206 something, we might be forming a constant. So try to use
5207 plus_constant. If it produces a sum and we can't accept it,
5208 use force_operand. This allows P = &ARR[const] to generate
5209 efficient code on machines where a SYMBOL_REF is not a valid
5212 If this is an EXPAND_SUM call, always return the sum. */
5213 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
5214 || mode
== ptr_mode
)
5216 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
5217 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5218 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
5220 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
5222 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
5223 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5224 op1
= force_operand (op1
, target
);
5228 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5229 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
5230 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
5232 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
5234 if (! CONSTANT_P (op0
))
5236 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5237 VOIDmode
, modifier
);
5238 /* Don't go to both_summands if modifier
5239 says it's not right to return a PLUS. */
5240 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5244 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
5245 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5246 op0
= force_operand (op0
, target
);
5251 /* No sense saving up arithmetic to be done
5252 if it's all in the wrong mode to form part of an address.
5253 And force_operand won't know whether to sign-extend or
5255 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5256 || mode
!= ptr_mode
)
5259 preexpand_calls (exp
);
5260 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5263 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
5264 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
5267 /* Make sure any term that's a sum with a constant comes last. */
5268 if (GET_CODE (op0
) == PLUS
5269 && CONSTANT_P (XEXP (op0
, 1)))
5275 /* If adding to a sum including a constant,
5276 associate it to put the constant outside. */
5277 if (GET_CODE (op1
) == PLUS
5278 && CONSTANT_P (XEXP (op1
, 1)))
5280 rtx constant_term
= const0_rtx
;
5282 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
5285 /* Ensure that MULT comes first if there is one. */
5286 else if (GET_CODE (op0
) == MULT
)
5287 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
5289 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
5291 /* Let's also eliminate constants from op0 if possible. */
5292 op0
= eliminate_constant_term (op0
, &constant_term
);
5294 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5295 their sum should be a constant. Form it into OP1, since the
5296 result we want will then be OP0 + OP1. */
5298 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
5303 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
5306 /* Put a constant term last and put a multiplication first. */
5307 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
5308 temp
= op1
, op1
= op0
, op0
= temp
;
5310 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
5311 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
5314 /* For initializers, we are allowed to return a MINUS of two
5315 symbolic constants. Here we handle all cases when both operands
5317 /* Handle difference of two symbolic constants,
5318 for the sake of an initializer. */
5319 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5320 && really_constant_p (TREE_OPERAND (exp
, 0))
5321 && really_constant_p (TREE_OPERAND (exp
, 1)))
5323 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
5324 VOIDmode
, modifier
);
5325 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5326 VOIDmode
, modifier
);
5328 /* If the last operand is a CONST_INT, use plus_constant of
5329 the negated constant. Else make the MINUS. */
5330 if (GET_CODE (op1
) == CONST_INT
)
5331 return plus_constant (op0
, - INTVAL (op1
));
5333 return gen_rtx (MINUS
, mode
, op0
, op1
);
5335 /* Convert A - const to A + (-const). */
5336 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
5338 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
5339 TREE_OPERAND (exp
, 1)));
5341 /* Deal with the case where we can't negate the constant
5343 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
5345 tree newtype
= signed_type (type
);
5346 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
5347 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
5348 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
5350 if (! TREE_OVERFLOW (newneg
))
5351 return expand_expr (convert (type
,
5352 build (PLUS_EXPR
, newtype
,
5354 target
, tmode
, modifier
);
5358 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
5362 this_optab
= sub_optab
;
5366 preexpand_calls (exp
);
5367 /* If first operand is constant, swap them.
5368 Thus the following special case checks need only
5369 check the second operand. */
5370 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5372 register tree t1
= TREE_OPERAND (exp
, 0);
5373 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
5374 TREE_OPERAND (exp
, 1) = t1
;
5377 /* Attempt to return something suitable for generating an
5378 indexed address, for machines that support that. */
5380 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
5381 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5382 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
5384 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
5386 /* Apply distributive law if OP0 is x+c. */
5387 if (GET_CODE (op0
) == PLUS
5388 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
5389 return gen_rtx (PLUS
, mode
,
5390 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
5391 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
5392 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
5393 * INTVAL (XEXP (op0
, 1))));
5395 if (GET_CODE (op0
) != REG
)
5396 op0
= force_operand (op0
, NULL_RTX
);
5397 if (GET_CODE (op0
) != REG
)
5398 op0
= copy_to_mode_reg (mode
, op0
);
5400 return gen_rtx (MULT
, mode
, op0
,
5401 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
5404 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5407 /* Check for multiplying things that have been extended
5408 from a narrower type. If this machine supports multiplying
5409 in that narrower type with a result in the desired type,
5410 do it that way, and avoid the explicit type-conversion. */
5411 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
5412 && TREE_CODE (type
) == INTEGER_TYPE
5413 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5414 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5415 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5416 && int_fits_type_p (TREE_OPERAND (exp
, 1),
5417 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5418 /* Don't use a widening multiply if a shift will do. */
5419 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5420 > HOST_BITS_PER_WIDE_INT
)
5421 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
5423 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
5424 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
5426 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
5427 /* If both operands are extended, they must either both
5428 be zero-extended or both be sign-extended. */
5429 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
5431 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
5433 enum machine_mode innermode
5434 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
5435 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5436 ? umul_widen_optab
: smul_widen_optab
);
5437 if (mode
== GET_MODE_WIDER_MODE (innermode
)
5438 && this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
5440 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
5441 NULL_RTX
, VOIDmode
, 0);
5442 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
5443 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5446 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
5447 NULL_RTX
, VOIDmode
, 0);
5451 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5452 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5453 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
5455 case TRUNC_DIV_EXPR
:
5456 case FLOOR_DIV_EXPR
:
5458 case ROUND_DIV_EXPR
:
5459 case EXACT_DIV_EXPR
:
5460 preexpand_calls (exp
);
5461 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5463 /* Possible optimization: compute the dividend with EXPAND_SUM
5464 then if the divisor is constant can optimize the case
5465 where some terms of the dividend have coeffs divisible by it. */
5466 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5467 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5468 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
5471 this_optab
= flodiv_optab
;
5474 case TRUNC_MOD_EXPR
:
5475 case FLOOR_MOD_EXPR
:
5477 case ROUND_MOD_EXPR
:
5478 preexpand_calls (exp
);
5479 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5481 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5482 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5483 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
5485 case FIX_ROUND_EXPR
:
5486 case FIX_FLOOR_EXPR
:
5488 abort (); /* Not used for C. */
5490 case FIX_TRUNC_EXPR
:
5491 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
5493 target
= gen_reg_rtx (mode
);
5494 expand_fix (target
, op0
, unsignedp
);
5498 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
5500 target
= gen_reg_rtx (mode
);
5501 /* expand_float can't figure out what to do if FROM has VOIDmode.
5502 So give it the correct mode. With -O, cse will optimize this. */
5503 if (GET_MODE (op0
) == VOIDmode
)
5504 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5506 expand_float (target
, op0
,
5507 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5511 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5512 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
5518 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5520 /* Handle complex values specially. */
5521 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
5522 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
5523 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
5525 /* Unsigned abs is simply the operand. Testing here means we don't
5526 risk generating incorrect code below. */
5527 if (TREE_UNSIGNED (type
))
5530 return expand_abs (mode
, op0
, target
, unsignedp
,
5531 safe_from_p (target
, TREE_OPERAND (exp
, 0)));
5535 target
= original_target
;
5536 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
5537 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
5538 || GET_MODE (target
) != mode
5539 || (GET_CODE (target
) == REG
5540 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5541 target
= gen_reg_rtx (mode
);
5542 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
5543 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
5545 /* First try to do it with a special MIN or MAX instruction.
5546 If that does not win, use a conditional jump to select the proper
5548 this_optab
= (TREE_UNSIGNED (type
)
5549 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
5550 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
5552 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
5557 /* At this point, a MEM target is no longer useful; we will get better
5560 if (GET_CODE (target
) == MEM
)
5561 target
= gen_reg_rtx (mode
);
5564 emit_move_insn (target
, op0
);
5566 op0
= gen_label_rtx ();
5568 /* If this mode is an integer too wide to compare properly,
5569 compare word by word. Rely on cse to optimize constant cases. */
5570 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
5572 if (code
== MAX_EXPR
)
5573 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
5574 target
, op1
, NULL_RTX
, op0
);
5576 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
5577 op1
, target
, NULL_RTX
, op0
);
5578 emit_move_insn (target
, op1
);
5582 if (code
== MAX_EXPR
)
5583 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
5584 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
5585 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
5587 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
5588 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
5589 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
5590 if (temp
== const0_rtx
)
5591 emit_move_insn (target
, op1
);
5592 else if (temp
!= const_true_rtx
)
5594 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
5595 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
5598 emit_move_insn (target
, op1
);
5605 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5606 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
5612 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5613 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
5618 /* ??? Can optimize bitwise operations with one arg constant.
5619 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5620 and (a bitwise1 b) bitwise2 b (etc)
5621 but that is probably not worth while. */
5623 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5624 boolean values when we want in all cases to compute both of them. In
5625 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5626 as actual zero-or-1 values and then bitwise anding. In cases where
5627 there cannot be any side effects, better code would be made by
5628 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5629 how to recognize those cases. */
5631 case TRUTH_AND_EXPR
:
5633 this_optab
= and_optab
;
5638 this_optab
= ior_optab
;
5641 case TRUTH_XOR_EXPR
:
5643 this_optab
= xor_optab
;
5650 preexpand_calls (exp
);
5651 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5653 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
5654 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
5657 /* Could determine the answer when only additive constants differ. Also,
5658 the addition of one can be handled by changing the condition. */
5665 preexpand_calls (exp
);
5666 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
5670 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5671 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
5673 && GET_CODE (original_target
) == REG
5674 && (GET_MODE (original_target
)
5675 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
5677 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5680 if (temp
!= original_target
)
5681 temp
= copy_to_reg (temp
);
5683 op1
= gen_label_rtx ();
5684 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
5685 GET_MODE (temp
), unsignedp
, 0);
5686 emit_jump_insn (gen_beq (op1
));
5687 emit_move_insn (temp
, const1_rtx
);
5692 /* If no set-flag instruction, must generate a conditional
5693 store into a temporary variable. Drop through
5694 and handle this like && and ||. */
5696 case TRUTH_ANDIF_EXPR
:
5697 case TRUTH_ORIF_EXPR
:
5699 && (target
== 0 || ! safe_from_p (target
, exp
)
5700 /* Make sure we don't have a hard reg (such as function's return
5701 value) live across basic blocks, if not optimizing. */
5702 || (!optimize
&& GET_CODE (target
) == REG
5703 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
5704 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5707 emit_clr_insn (target
);
5709 op1
= gen_label_rtx ();
5710 jumpifnot (exp
, op1
);
5713 emit_0_to_1_insn (target
);
5716 return ignore
? const0_rtx
: target
;
5718 case TRUTH_NOT_EXPR
:
5719 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
5720 /* The parser is careful to generate TRUTH_NOT_EXPR
5721 only with operands that are always zero or one. */
5722 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
5723 target
, 1, OPTAB_LIB_WIDEN
);
5729 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
5731 return expand_expr (TREE_OPERAND (exp
, 1),
5732 (ignore
? const0_rtx
: target
),
5737 rtx flag
= NULL_RTX
;
5738 tree left_cleanups
= NULL_TREE
;
5739 tree right_cleanups
= NULL_TREE
;
5741 /* Used to save a pointer to the place to put the setting of
5742 the flag that indicates if this side of the conditional was
5743 taken. We backpatch the code, if we find out later that we
5744 have any conditional cleanups that need to be performed. */
5745 rtx dest_right_flag
= NULL_RTX
;
5746 rtx dest_left_flag
= NULL_RTX
;
5748 /* Note that COND_EXPRs whose type is a structure or union
5749 are required to be constructed to contain assignments of
5750 a temporary variable, so that we can evaluate them here
5751 for side effect only. If type is void, we must do likewise. */
5753 /* If an arm of the branch requires a cleanup,
5754 only that cleanup is performed. */
5757 tree binary_op
= 0, unary_op
= 0;
5758 tree old_cleanups
= cleanups_this_call
;
5760 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5761 convert it to our mode, if necessary. */
5762 if (integer_onep (TREE_OPERAND (exp
, 1))
5763 && integer_zerop (TREE_OPERAND (exp
, 2))
5764 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
5768 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5773 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
5774 if (GET_MODE (op0
) == mode
)
5778 target
= gen_reg_rtx (mode
);
5779 convert_move (target
, op0
, unsignedp
);
5783 /* If we are not to produce a result, we have no target. Otherwise,
5784 if a target was specified use it; it will not be used as an
5785 intermediate target unless it is safe. If no target, use a
5790 else if (original_target
5791 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0))
5792 && GET_MODE (original_target
) == mode
5793 && ! (GET_CODE (original_target
) == MEM
5794 && MEM_VOLATILE_P (original_target
)))
5795 temp
= original_target
;
5796 else if (mode
== BLKmode
)
5798 if (TYPE_SIZE (type
) == 0
5799 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5802 temp
= assign_stack_temp (BLKmode
,
5803 (TREE_INT_CST_LOW (TYPE_SIZE (type
))
5804 + BITS_PER_UNIT
- 1)
5805 / BITS_PER_UNIT
, 0);
5806 MEM_IN_STRUCT_P (temp
) = AGGREGATE_TYPE_P (type
);
5809 temp
= gen_reg_rtx (mode
);
5811 /* Check for X ? A + B : A. If we have this, we can copy
5812 A to the output and conditionally add B. Similarly for unary
5813 operations. Don't do this if X has side-effects because
5814 those side effects might affect A or B and the "?" operation is
5815 a sequence point in ANSI. (We test for side effects later.) */
5817 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
5818 && operand_equal_p (TREE_OPERAND (exp
, 2),
5819 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
5820 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
5821 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
5822 && operand_equal_p (TREE_OPERAND (exp
, 1),
5823 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
5824 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
5825 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
5826 && operand_equal_p (TREE_OPERAND (exp
, 2),
5827 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
5828 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
5829 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
5830 && operand_equal_p (TREE_OPERAND (exp
, 1),
5831 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
5832 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
5834 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5835 operation, do this as A + (X != 0). Similarly for other simple
5836 binary operators. */
5837 if (temp
&& singleton
&& binary_op
5838 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
5839 && (TREE_CODE (binary_op
) == PLUS_EXPR
5840 || TREE_CODE (binary_op
) == MINUS_EXPR
5841 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
5842 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
5843 && integer_onep (TREE_OPERAND (binary_op
, 1))
5844 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
5847 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
5848 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
5849 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
5852 /* If we had X ? A : A + 1, do this as A + (X == 0).
5854 We have to invert the truth value here and then put it
5855 back later if do_store_flag fails. We cannot simply copy
5856 TREE_OPERAND (exp, 0) to another variable and modify that
5857 because invert_truthvalue can modify the tree pointed to
5859 if (singleton
== TREE_OPERAND (exp
, 1))
5860 TREE_OPERAND (exp
, 0)
5861 = invert_truthvalue (TREE_OPERAND (exp
, 0));
5863 result
= do_store_flag (TREE_OPERAND (exp
, 0),
5864 (safe_from_p (temp
, singleton
)
5866 mode
, BRANCH_COST
<= 1);
5870 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
5871 return expand_binop (mode
, boptab
, op1
, result
, temp
,
5872 unsignedp
, OPTAB_LIB_WIDEN
);
5874 else if (singleton
== TREE_OPERAND (exp
, 1))
5875 TREE_OPERAND (exp
, 0)
5876 = invert_truthvalue (TREE_OPERAND (exp
, 0));
5879 do_pending_stack_adjust ();
5881 op0
= gen_label_rtx ();
5883 flag
= gen_reg_rtx (word_mode
);
5884 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
5888 /* If the target conflicts with the other operand of the
5889 binary op, we can't use it. Also, we can't use the target
5890 if it is a hard register, because evaluating the condition
5891 might clobber it. */
5893 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
5894 || (GET_CODE (temp
) == REG
5895 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
5896 temp
= gen_reg_rtx (mode
);
5897 store_expr (singleton
, temp
, 0);
5900 expand_expr (singleton
,
5901 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
5902 dest_left_flag
= get_last_insn ();
5903 if (singleton
== TREE_OPERAND (exp
, 1))
5904 jumpif (TREE_OPERAND (exp
, 0), op0
);
5906 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
5908 /* Allows cleanups up to here. */
5909 old_cleanups
= cleanups_this_call
;
5910 if (binary_op
&& temp
== 0)
5911 /* Just touch the other operand. */
5912 expand_expr (TREE_OPERAND (binary_op
, 1),
5913 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
5915 store_expr (build (TREE_CODE (binary_op
), type
,
5916 make_tree (type
, temp
),
5917 TREE_OPERAND (binary_op
, 1)),
5920 store_expr (build1 (TREE_CODE (unary_op
), type
,
5921 make_tree (type
, temp
)),
5924 dest_right_flag
= get_last_insn ();
5927 /* This is now done in jump.c and is better done there because it
5928 produces shorter register lifetimes. */
5930 /* Check for both possibilities either constants or variables
5931 in registers (but not the same as the target!). If so, can
5932 save branches by assigning one, branching, and assigning the
5934 else if (temp
&& GET_MODE (temp
) != BLKmode
5935 && (TREE_CONSTANT (TREE_OPERAND (exp
, 1))
5936 || ((TREE_CODE (TREE_OPERAND (exp
, 1)) == PARM_DECL
5937 || TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
)
5938 && DECL_RTL (TREE_OPERAND (exp
, 1))
5939 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 1))) == REG
5940 && DECL_RTL (TREE_OPERAND (exp
, 1)) != temp
))
5941 && (TREE_CONSTANT (TREE_OPERAND (exp
, 2))
5942 || ((TREE_CODE (TREE_OPERAND (exp
, 2)) == PARM_DECL
5943 || TREE_CODE (TREE_OPERAND (exp
, 2)) == VAR_DECL
)
5944 && DECL_RTL (TREE_OPERAND (exp
, 2))
5945 && GET_CODE (DECL_RTL (TREE_OPERAND (exp
, 2))) == REG
5946 && DECL_RTL (TREE_OPERAND (exp
, 2)) != temp
)))
5948 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
5949 temp
= gen_reg_rtx (mode
);
5950 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
5951 dest_left_flag
= get_last_insn ();
5952 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
5954 /* Allows cleanups up to here. */
5955 old_cleanups
= cleanups_this_call
;
5956 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
5958 dest_right_flag
= get_last_insn ();
5961 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5962 comparison operator. If we have one of these cases, set the
5963 output to A, branch on A (cse will merge these two references),
5964 then set the output to FOO. */
5966 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
5967 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
5968 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
5969 TREE_OPERAND (exp
, 1), 0)
5970 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
5971 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
5973 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
5974 temp
= gen_reg_rtx (mode
);
5975 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
5976 dest_left_flag
= get_last_insn ();
5977 jumpif (TREE_OPERAND (exp
, 0), op0
);
5979 /* Allows cleanups up to here. */
5980 old_cleanups
= cleanups_this_call
;
5981 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
5983 dest_right_flag
= get_last_insn ();
5986 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
5987 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
5988 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
5989 TREE_OPERAND (exp
, 2), 0)
5990 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
5991 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
5993 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
5994 temp
= gen_reg_rtx (mode
);
5995 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
5996 dest_left_flag
= get_last_insn ();
5997 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
5999 /* Allows cleanups up to here. */
6000 old_cleanups
= cleanups_this_call
;
6001 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6003 dest_right_flag
= get_last_insn ();
6007 op1
= gen_label_rtx ();
6008 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6010 /* Allows cleanups up to here. */
6011 old_cleanups
= cleanups_this_call
;
6013 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6015 expand_expr (TREE_OPERAND (exp
, 1),
6016 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6017 dest_left_flag
= get_last_insn ();
6019 /* Handle conditional cleanups, if any. */
6020 left_cleanups
= defer_cleanups_to (old_cleanups
);
6023 emit_jump_insn (gen_jump (op1
));
6027 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6029 expand_expr (TREE_OPERAND (exp
, 2),
6030 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6031 dest_right_flag
= get_last_insn ();
6034 /* Handle conditional cleanups, if any. */
6035 right_cleanups
= defer_cleanups_to (old_cleanups
);
6041 /* Add back in, any conditional cleanups. */
6042 if (left_cleanups
|| right_cleanups
)
6048 /* Now that we know that a flag is needed, go back and add in the
6049 setting of the flag. */
6051 /* Do the left side flag. */
6052 last
= get_last_insn ();
6053 /* Flag left cleanups as needed. */
6054 emit_move_insn (flag
, const1_rtx
);
6055 /* ??? deprecated, use sequences instead. */
6056 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_left_flag
);
6058 /* Do the right side flag. */
6059 last
= get_last_insn ();
6060 /* Flag left cleanups as needed. */
6061 emit_move_insn (flag
, const0_rtx
);
6062 /* ??? deprecated, use sequences instead. */
6063 reorder_insns (NEXT_INSN (last
), get_last_insn (), dest_right_flag
);
6065 /* convert flag, which is an rtx, into a tree. */
6066 cond
= make_node (RTL_EXPR
);
6067 TREE_TYPE (cond
) = integer_type_node
;
6068 RTL_EXPR_RTL (cond
) = flag
;
6069 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
6070 cond
= save_expr (cond
);
6072 if (! left_cleanups
)
6073 left_cleanups
= integer_zero_node
;
6074 if (! right_cleanups
)
6075 right_cleanups
= integer_zero_node
;
6076 new_cleanups
= build (COND_EXPR
, void_type_node
,
6077 truthvalue_conversion (cond
),
6078 left_cleanups
, right_cleanups
);
6079 new_cleanups
= fold (new_cleanups
);
6081 /* Now add in the conditionalized cleanups. */
6083 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
6084 (*interim_eh_hook
) (NULL_TREE
);
6091 int need_exception_region
= 0;
6092 /* Something needs to be initialized, but we didn't know
6093 where that thing was when building the tree. For example,
6094 it could be the return value of a function, or a parameter
6095 to a function which lays down in the stack, or a temporary
6096 variable which must be passed by reference.
6098 We guarantee that the expression will either be constructed
6099 or copied into our original target. */
6101 tree slot
= TREE_OPERAND (exp
, 0);
6105 if (TREE_CODE (slot
) != VAR_DECL
)
6109 target
= original_target
;
6113 if (DECL_RTL (slot
) != 0)
6115 target
= DECL_RTL (slot
);
6116 /* If we have already expanded the slot, so don't do
6118 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6123 target
= assign_stack_temp (mode
, int_size_in_bytes (type
), 2);
6124 MEM_IN_STRUCT_P (target
) = AGGREGATE_TYPE_P (type
);
6125 /* All temp slots at this level must not conflict. */
6126 preserve_temp_slots (target
);
6127 DECL_RTL (slot
) = target
;
6129 /* Since SLOT is not known to the called function
6130 to belong to its stack frame, we must build an explicit
6131 cleanup. This case occurs when we must build up a reference
6132 to pass the reference as an argument. In this case,
6133 it is very likely that such a reference need not be
6136 if (TREE_OPERAND (exp
, 2) == 0)
6137 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
6138 if (TREE_OPERAND (exp
, 2))
6140 cleanups_this_call
= tree_cons (NULL_TREE
,
6141 TREE_OPERAND (exp
, 2),
6142 cleanups_this_call
);
6143 need_exception_region
= 1;
6149 /* This case does occur, when expanding a parameter which
6150 needs to be constructed on the stack. The target
6151 is the actual stack address that we want to initialize.
6152 The function we call will perform the cleanup in this case. */
6154 /* If we have already assigned it space, use that space,
6155 not target that we were passed in, as our target
6156 parameter is only a hint. */
6157 if (DECL_RTL (slot
) != 0)
6159 target
= DECL_RTL (slot
);
6160 /* If we have already expanded the slot, so don't do
6162 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6166 DECL_RTL (slot
) = target
;
6169 exp1
= TREE_OPERAND (exp
, 1);
6170 /* Mark it as expanded. */
6171 TREE_OPERAND (exp
, 1) = NULL_TREE
;
6173 temp
= expand_expr (exp1
, target
, tmode
, modifier
);
6175 if (need_exception_region
)
6176 (*interim_eh_hook
) (NULL_TREE
);
6183 tree lhs
= TREE_OPERAND (exp
, 0);
6184 tree rhs
= TREE_OPERAND (exp
, 1);
6185 tree noncopied_parts
= 0;
6186 tree lhs_type
= TREE_TYPE (lhs
);
6188 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6189 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
6190 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
6191 TYPE_NONCOPIED_PARTS (lhs_type
));
6192 while (noncopied_parts
!= 0)
6194 expand_assignment (TREE_VALUE (noncopied_parts
),
6195 TREE_PURPOSE (noncopied_parts
), 0, 0);
6196 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6203 /* If lhs is complex, expand calls in rhs before computing it.
6204 That's so we don't compute a pointer and save it over a call.
6205 If lhs is simple, compute it first so we can give it as a
6206 target if the rhs is just a call. This avoids an extra temp and copy
6207 and that prevents a partial-subsumption which makes bad code.
6208 Actually we could treat component_ref's of vars like vars. */
6210 tree lhs
= TREE_OPERAND (exp
, 0);
6211 tree rhs
= TREE_OPERAND (exp
, 1);
6212 tree noncopied_parts
= 0;
6213 tree lhs_type
= TREE_TYPE (lhs
);
6217 if (TREE_CODE (lhs
) != VAR_DECL
6218 && TREE_CODE (lhs
) != RESULT_DECL
6219 && TREE_CODE (lhs
) != PARM_DECL
)
6220 preexpand_calls (exp
);
6222 /* Check for |= or &= of a bitfield of size one into another bitfield
6223 of size 1. In this case, (unless we need the result of the
6224 assignment) we can do this more efficiently with a
6225 test followed by an assignment, if necessary.
6227 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6228 things change so we do, this code should be enhanced to
6231 && TREE_CODE (lhs
) == COMPONENT_REF
6232 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
6233 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
6234 && TREE_OPERAND (rhs
, 0) == lhs
6235 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
6236 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
6237 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
6239 rtx label
= gen_label_rtx ();
6241 do_jump (TREE_OPERAND (rhs
, 1),
6242 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
6243 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
6244 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
6245 (TREE_CODE (rhs
) == BIT_IOR_EXPR
6247 : integer_zero_node
)),
6249 do_pending_stack_adjust ();
6254 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
6255 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
6256 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
6257 TYPE_NONCOPIED_PARTS (lhs_type
));
6259 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6260 while (noncopied_parts
!= 0)
6262 expand_assignment (TREE_PURPOSE (noncopied_parts
),
6263 TREE_VALUE (noncopied_parts
), 0, 0);
6264 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6269 case PREINCREMENT_EXPR
:
6270 case PREDECREMENT_EXPR
:
6271 return expand_increment (exp
, 0);
6273 case POSTINCREMENT_EXPR
:
6274 case POSTDECREMENT_EXPR
:
6275 /* Faster to treat as pre-increment if result is not used. */
6276 return expand_increment (exp
, ! ignore
);
6279 /* If nonzero, TEMP will be set to the address of something that might
6280 be a MEM corresponding to a stack slot. */
6283 /* Are we taking the address of a nested function? */
6284 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
6285 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0)
6287 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
6288 op0
= force_operand (op0
, target
);
6290 /* If we are taking the address of something erroneous, just
6292 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
6296 /* We make sure to pass const0_rtx down if we came in with
6297 ignore set, to avoid doing the cleanups twice for something. */
6298 op0
= expand_expr (TREE_OPERAND (exp
, 0),
6299 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
6300 (modifier
== EXPAND_INITIALIZER
6301 ? modifier
: EXPAND_CONST_ADDRESS
));
6303 /* If we are going to ignore the result, OP0 will have been set
6304 to const0_rtx, so just return it. Don't get confused and
6305 think we are taking the address of the constant. */
6309 /* We would like the object in memory. If it is a constant,
6310 we can have it be statically allocated into memory. For
6311 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6312 memory and store the value into it. */
6314 if (CONSTANT_P (op0
))
6315 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6317 else if (GET_CODE (op0
) == MEM
)
6319 mark_temp_addr_taken (op0
);
6320 temp
= XEXP (op0
, 0);
6323 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6324 || GET_CODE (op0
) == CONCAT
)
6326 /* If this object is in a register, it must be not
6328 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6329 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
6331 = assign_stack_temp (inner_mode
,
6332 int_size_in_bytes (inner_type
), 1);
6333 MEM_IN_STRUCT_P (memloc
) = AGGREGATE_TYPE_P (inner_type
);
6335 mark_temp_addr_taken (memloc
);
6336 emit_move_insn (memloc
, op0
);
6340 if (GET_CODE (op0
) != MEM
)
6343 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6345 temp
= XEXP (op0
, 0);
6346 #ifdef POINTERS_EXTEND_UNSIGNED
6347 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
6348 && mode
== ptr_mode
)
6349 temp
= convert_memory_address (ptr_mode
, temp
);
6354 op0
= force_operand (XEXP (op0
, 0), target
);
6357 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
6358 op0
= force_reg (Pmode
, op0
);
6360 if (GET_CODE (op0
) == REG
)
6361 mark_reg_pointer (op0
);
6363 /* If we might have had a temp slot, add an equivalent address
6366 update_temp_slot_address (temp
, op0
);
6368 #ifdef POINTERS_EXTEND_UNSIGNED
6369 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
6370 && mode
== ptr_mode
)
6371 op0
= convert_memory_address (ptr_mode
, op0
);
6376 case ENTRY_VALUE_EXPR
:
6379 /* COMPLEX type for Extended Pascal & Fortran */
6382 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6385 /* Get the rtx code of the operands. */
6386 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
6387 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
6390 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6394 /* Move the real (op0) and imaginary (op1) parts to their location. */
6395 emit_move_insn (gen_realpart (mode
, target
), op0
);
6396 emit_move_insn (gen_imagpart (mode
, target
), op1
);
6398 insns
= get_insns ();
6401 /* Complex construction should appear as a single unit. */
6402 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6403 each with a separate pseudo as destination.
6404 It's not correct for flow to treat them as a unit. */
6405 if (GET_CODE (target
) != CONCAT
)
6406 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
6414 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
6415 return gen_realpart (mode
, op0
);
6418 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
6419 return gen_imagpart (mode
, op0
);
6423 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
6427 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
6430 target
= gen_reg_rtx (mode
);
6434 /* Store the realpart and the negated imagpart to target. */
6435 emit_move_insn (gen_realpart (partmode
, target
),
6436 gen_realpart (partmode
, op0
));
6438 imag_t
= gen_imagpart (partmode
, target
);
6439 temp
= expand_unop (partmode
, neg_optab
,
6440 gen_imagpart (partmode
, op0
), imag_t
, 0);
6442 emit_move_insn (imag_t
, temp
);
6444 insns
= get_insns ();
6447 /* Conjugate should appear as a single unit
6448 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6449 each with a separate pseudo as destination.
6450 It's not correct for flow to treat them as a unit. */
6451 if (GET_CODE (target
) != CONCAT
)
6452 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
6460 op0
= CONST0_RTX (tmode
);
6466 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
6469 /* Here to do an ordinary binary operator, generating an instruction
6470 from the optab already placed in `this_optab'. */
6472 preexpand_calls (exp
);
6473 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6475 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6476 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6478 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
6479 unsignedp
, OPTAB_LIB_WIDEN
);
6486 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6488 bc_expand_expr (exp
)
6491 enum tree_code code
;
6494 struct binary_operator
*binoptab
;
6495 struct unary_operator
*unoptab
;
6496 struct increment_operator
*incroptab
;
6497 struct bc_label
*lab
, *lab1
;
6498 enum bytecode_opcode opcode
;
6501 code
= TREE_CODE (exp
);
6507 if (DECL_RTL (exp
) == 0)
6509 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6513 bc_load_parmaddr (DECL_RTL (exp
));
6514 bc_load_memory (TREE_TYPE (exp
), exp
);
6520 if (DECL_RTL (exp
) == 0)
6524 if (BYTECODE_LABEL (DECL_RTL (exp
)))
6525 bc_load_externaddr (DECL_RTL (exp
));
6527 bc_load_localaddr (DECL_RTL (exp
));
6529 if (TREE_PUBLIC (exp
))
6530 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
6531 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
6533 bc_load_localaddr (DECL_RTL (exp
));
6535 bc_load_memory (TREE_TYPE (exp
), exp
);
6540 #ifdef DEBUG_PRINT_CODE
6541 fprintf (stderr
, " [%x]\n", TREE_INT_CST_LOW (exp
));
6543 bc_emit_instruction (mode_to_const_map
[(int) (DECL_BIT_FIELD (exp
)
6545 : TYPE_MODE (TREE_TYPE (exp
)))],
6546 (HOST_WIDE_INT
) TREE_INT_CST_LOW (exp
));
6552 #ifdef DEBUG_PRINT_CODE
6553 fprintf (stderr
, " [%g]\n", (double) TREE_INT_CST_LOW (exp
));
6555 /* FIX THIS: find a better way to pass real_cst's. -bson */
6556 bc_emit_instruction (mode_to_const_map
[TYPE_MODE (TREE_TYPE (exp
))],
6557 (double) TREE_REAL_CST (exp
));
6566 /* We build a call description vector describing the type of
6567 the return value and of the arguments; this call vector,
6568 together with a pointer to a location for the return value
6569 and the base of the argument list, is passed to the low
6570 level machine dependent call subroutine, which is responsible
6571 for putting the arguments wherever real functions expect
6572 them, as well as getting the return value back. */
6574 tree calldesc
= 0, arg
;
6578 /* Push the evaluated args on the evaluation stack in reverse
6579 order. Also make an entry for each arg in the calldesc
6580 vector while we're at it. */
6582 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
6584 for (arg
= TREE_OPERAND (exp
, 1); arg
; arg
= TREE_CHAIN (arg
))
6587 bc_expand_expr (TREE_VALUE (arg
));
6589 calldesc
= tree_cons ((tree
) 0,
6590 size_in_bytes (TREE_TYPE (TREE_VALUE (arg
))),
6592 calldesc
= tree_cons ((tree
) 0,
6593 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg
))),
6597 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
6599 /* Allocate a location for the return value and push its
6600 address on the evaluation stack. Also make an entry
6601 at the front of the calldesc for the return value type. */
6603 type
= TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6604 retval
= bc_allocate_local (int_size_in_bytes (type
), TYPE_ALIGN (type
));
6605 bc_load_localaddr (retval
);
6607 calldesc
= tree_cons ((tree
) 0, size_in_bytes (type
), calldesc
);
6608 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (type
), calldesc
);
6610 /* Prepend the argument count. */
6611 calldesc
= tree_cons ((tree
) 0,
6612 build_int_2 (nargs
, 0),
6615 /* Push the address of the call description vector on the stack. */
6616 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
6617 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
6618 build_index_type (build_int_2 (nargs
* 2, 0)));
6619 r
= output_constant_def (calldesc
);
6620 bc_load_externaddr (r
);
6622 /* Push the address of the function to be called. */
6623 bc_expand_expr (TREE_OPERAND (exp
, 0));
6625 /* Call the function, popping its address and the calldesc vector
6626 address off the evaluation stack in the process. */
6627 bc_emit_instruction (call
);
6629 /* Pop the arguments off the stack. */
6630 bc_adjust_stack (nargs
);
6632 /* Load the return value onto the stack. */
6633 bc_load_localaddr (retval
);
6634 bc_load_memory (type
, TREE_OPERAND (exp
, 0));
6640 if (!SAVE_EXPR_RTL (exp
))
6642 /* First time around: copy to local variable */
6643 SAVE_EXPR_RTL (exp
) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp
)),
6644 TYPE_ALIGN (TREE_TYPE(exp
)));
6645 bc_expand_expr (TREE_OPERAND (exp
, 0));
6646 bc_emit_instruction (duplicate
);
6648 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
6649 bc_store_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
6653 /* Consecutive reference: use saved copy */
6654 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
6655 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
6660 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6661 how are they handled instead? */
6664 TREE_USED (exp
) = 1;
6665 bc_expand_expr (STMT_BODY (exp
));
6672 bc_expand_expr (TREE_OPERAND (exp
, 0));
6673 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)), TREE_TYPE (exp
));
6678 expand_assignment (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1), 0, 0);
6683 bc_expand_address (TREE_OPERAND (exp
, 0));
6688 bc_expand_expr (TREE_OPERAND (exp
, 0));
6689 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
6694 bc_expand_expr (bc_canonicalize_array_ref (exp
));
6699 bc_expand_component_address (exp
);
6701 /* If we have a bitfield, generate a proper load */
6702 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp
, 1)), TREE_OPERAND (exp
, 1));
6707 bc_expand_expr (TREE_OPERAND (exp
, 0));
6708 bc_emit_instruction (drop
);
6709 bc_expand_expr (TREE_OPERAND (exp
, 1));
6714 bc_expand_expr (TREE_OPERAND (exp
, 0));
6715 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6716 lab
= bc_get_bytecode_label ();
6717 bc_emit_bytecode (xjumpifnot
);
6718 bc_emit_bytecode_labelref (lab
);
6720 #ifdef DEBUG_PRINT_CODE
6721 fputc ('\n', stderr
);
6723 bc_expand_expr (TREE_OPERAND (exp
, 1));
6724 lab1
= bc_get_bytecode_label ();
6725 bc_emit_bytecode (jump
);
6726 bc_emit_bytecode_labelref (lab1
);
6728 #ifdef DEBUG_PRINT_CODE
6729 fputc ('\n', stderr
);
6732 bc_emit_bytecode_labeldef (lab
);
6733 bc_expand_expr (TREE_OPERAND (exp
, 2));
6734 bc_emit_bytecode_labeldef (lab1
);
6737 case TRUTH_ANDIF_EXPR
:
6739 opcode
= xjumpifnot
;
6742 case TRUTH_ORIF_EXPR
:
6749 binoptab
= optab_plus_expr
;
6754 binoptab
= optab_minus_expr
;
6759 binoptab
= optab_mult_expr
;
6762 case TRUNC_DIV_EXPR
:
6763 case FLOOR_DIV_EXPR
:
6765 case ROUND_DIV_EXPR
:
6766 case EXACT_DIV_EXPR
:
6768 binoptab
= optab_trunc_div_expr
;
6771 case TRUNC_MOD_EXPR
:
6772 case FLOOR_MOD_EXPR
:
6774 case ROUND_MOD_EXPR
:
6776 binoptab
= optab_trunc_mod_expr
;
6779 case FIX_ROUND_EXPR
:
6780 case FIX_FLOOR_EXPR
:
6782 abort (); /* Not used for C. */
6784 case FIX_TRUNC_EXPR
:
6791 abort (); /* FIXME */
6795 binoptab
= optab_rdiv_expr
;
6800 binoptab
= optab_bit_and_expr
;
6805 binoptab
= optab_bit_ior_expr
;
6810 binoptab
= optab_bit_xor_expr
;
6815 binoptab
= optab_lshift_expr
;
6820 binoptab
= optab_rshift_expr
;
6823 case TRUTH_AND_EXPR
:
6825 binoptab
= optab_truth_and_expr
;
6830 binoptab
= optab_truth_or_expr
;
6835 binoptab
= optab_lt_expr
;
6840 binoptab
= optab_le_expr
;
6845 binoptab
= optab_ge_expr
;
6850 binoptab
= optab_gt_expr
;
6855 binoptab
= optab_eq_expr
;
6860 binoptab
= optab_ne_expr
;
6865 unoptab
= optab_negate_expr
;
6870 unoptab
= optab_bit_not_expr
;
6873 case TRUTH_NOT_EXPR
:
6875 unoptab
= optab_truth_not_expr
;
6878 case PREDECREMENT_EXPR
:
6880 incroptab
= optab_predecrement_expr
;
6883 case PREINCREMENT_EXPR
:
6885 incroptab
= optab_preincrement_expr
;
6888 case POSTDECREMENT_EXPR
:
6890 incroptab
= optab_postdecrement_expr
;
6893 case POSTINCREMENT_EXPR
:
6895 incroptab
= optab_postincrement_expr
;
6900 bc_expand_constructor (exp
);
6910 tree vars
= TREE_OPERAND (exp
, 0);
6911 int vars_need_expansion
= 0;
6913 /* Need to open a binding contour here because
6914 if there are any cleanups they most be contained here. */
6915 expand_start_bindings (0);
6917 /* Mark the corresponding BLOCK for output. */
6918 if (TREE_OPERAND (exp
, 2) != 0)
6919 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
6921 /* If VARS have not yet been expanded, expand them now. */
6924 if (DECL_RTL (vars
) == 0)
6926 vars_need_expansion
= 1;
6929 expand_decl_init (vars
);
6930 vars
= TREE_CHAIN (vars
);
6933 bc_expand_expr (TREE_OPERAND (exp
, 1));
6935 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6945 bc_expand_binary_operation (binoptab
, TREE_TYPE (exp
),
6946 TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1));
6952 bc_expand_unary_operation (unoptab
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
6958 bc_expand_expr (TREE_OPERAND (exp
, 0));
6959 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6960 lab
= bc_get_bytecode_label ();
6962 bc_emit_instruction (duplicate
);
6963 bc_emit_bytecode (opcode
);
6964 bc_emit_bytecode_labelref (lab
);
6966 #ifdef DEBUG_PRINT_CODE
6967 fputc ('\n', stderr
);
6970 bc_emit_instruction (drop
);
6972 bc_expand_expr (TREE_OPERAND (exp
, 1));
6973 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)));
6974 bc_emit_bytecode_labeldef (lab
);
6980 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6982 /* Push the quantum. */
6983 bc_expand_expr (TREE_OPERAND (exp
, 1));
6985 /* Convert it to the lvalue's type. */
6986 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)), type
);
6988 /* Push the address of the lvalue */
6989 bc_expand_expr (build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
), TREE_OPERAND (exp
, 0)));
6991 /* Perform actual increment */
6992 bc_expand_increment (incroptab
, type
);
6996 /* Return the alignment in bits of EXP, a pointer valued expression.
6997 But don't return more than MAX_ALIGN no matter what.
6998 The alignment returned is, by default, the alignment of the thing that
6999 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7001 Otherwise, look at the expression to see if we can do better, i.e., if the
7002 expression is actually pointing at an object whose alignment is tighter. */
7005 get_pointer_alignment (exp
, max_align
)
7009 unsigned align
, inner
;
7011 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7014 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7015 align
= MIN (align
, max_align
);
7019 switch (TREE_CODE (exp
))
7023 case NON_LVALUE_EXPR
:
7024 exp
= TREE_OPERAND (exp
, 0);
7025 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7027 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7028 align
= MIN (inner
, max_align
);
7032 /* If sum of pointer + int, restrict our maximum alignment to that
7033 imposed by the integer. If not, we can't do any better than
7035 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7038 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7043 exp
= TREE_OPERAND (exp
, 0);
7047 /* See what we are pointing at and look at its alignment. */
7048 exp
= TREE_OPERAND (exp
, 0);
7049 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7050 align
= FUNCTION_BOUNDARY
;
7051 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7052 align
= DECL_ALIGN (exp
);
7053 #ifdef CONSTANT_ALIGNMENT
7054 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7055 align
= CONSTANT_ALIGNMENT (exp
, align
);
7057 return MIN (align
, max_align
);
7065 /* Return the tree node and offset if a given argument corresponds to
7066 a string constant. */
7069 string_constant (arg
, ptr_offset
)
7075 if (TREE_CODE (arg
) == ADDR_EXPR
7076 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7078 *ptr_offset
= integer_zero_node
;
7079 return TREE_OPERAND (arg
, 0);
7081 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7083 tree arg0
= TREE_OPERAND (arg
, 0);
7084 tree arg1
= TREE_OPERAND (arg
, 1);
7089 if (TREE_CODE (arg0
) == ADDR_EXPR
7090 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7093 return TREE_OPERAND (arg0
, 0);
7095 else if (TREE_CODE (arg1
) == ADDR_EXPR
7096 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7099 return TREE_OPERAND (arg1
, 0);
7106 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7107 way, because it could contain a zero byte in the middle.
7108 TREE_STRING_LENGTH is the size of the character array, not the string.
7110 Unfortunately, string_constant can't access the values of const char
7111 arrays with initializers, so neither can we do so here. */
7121 src
= string_constant (src
, &offset_node
);
7124 max
= TREE_STRING_LENGTH (src
);
7125 ptr
= TREE_STRING_POINTER (src
);
7126 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7128 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7129 compute the offset to the following null if we don't know where to
7130 start searching for it. */
7132 for (i
= 0; i
< max
; i
++)
7135 /* We don't know the starting offset, but we do know that the string
7136 has no internal zero bytes. We can assume that the offset falls
7137 within the bounds of the string; otherwise, the programmer deserves
7138 what he gets. Subtract the offset from the length of the string,
7140 /* This would perhaps not be valid if we were dealing with named
7141 arrays in addition to literal string constants. */
7142 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7145 /* We have a known offset into the string. Start searching there for
7146 a null character. */
7147 if (offset_node
== 0)
7151 /* Did we get a long long offset? If so, punt. */
7152 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7154 offset
= TREE_INT_CST_LOW (offset_node
);
7156 /* If the offset is known to be out of bounds, warn, and call strlen at
7158 if (offset
< 0 || offset
> max
)
7160 warning ("offset outside bounds of constant string");
7163 /* Use strlen to search for the first zero byte. Since any strings
7164 constructed with build_string will have nulls appended, we win even
7165 if we get handed something like (char[4])"abcd".
7167 Since OFFSET is our starting index into the string, no further
7168 calculation is needed. */
7169 return size_int (strlen (ptr
+ offset
));
7173 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7174 enum built_in_function fndecl_code
;
7180 /* Some machines need special handling before we can access
7181 arbitrary frames. For example, on the sparc, we must first flush
7182 all register windows to the stack. */
7183 #ifdef SETUP_FRAME_ADDRESSES
7184 SETUP_FRAME_ADDRESSES ();
7187 /* On the sparc, the return address is not in the frame, it is in a
7188 register. There is no way to access it off of the current frame
7189 pointer, but it can be accessed off the previous frame pointer by
7190 reading the value from the register window save area. */
7191 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7192 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7196 /* Scan back COUNT frames to the specified frame. */
7197 for (i
= 0; i
< count
; i
++)
7199 /* Assume the dynamic chain pointer is in the word that the
7200 frame address points to, unless otherwise specified. */
7201 #ifdef DYNAMIC_CHAIN_ADDRESS
7202 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7204 tem
= memory_address (Pmode
, tem
);
7205 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
7208 /* For __builtin_frame_address, return what we've got. */
7209 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
7212 /* For __builtin_return_address, Get the return address from that
7214 #ifdef RETURN_ADDR_RTX
7215 tem
= RETURN_ADDR_RTX (count
, tem
);
7217 tem
= memory_address (Pmode
,
7218 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
7219 tem
= gen_rtx (MEM
, Pmode
, tem
);
7224 /* Expand an expression EXP that calls a built-in function,
7225 with result going to TARGET if that's convenient
7226 (and in mode MODE if that's convenient).
7227 SUBTARGET may be used as the target for computing one of EXP's operands.
7228 IGNORE is nonzero if the value is to be ignored. */
7230 #define CALLED_AS_BUILT_IN(NODE) \
7231 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7234 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
7238 enum machine_mode mode
;
7241 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7242 tree arglist
= TREE_OPERAND (exp
, 1);
7245 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
7246 optab builtin_optab
;
7248 switch (DECL_FUNCTION_CODE (fndecl
))
7253 /* build_function_call changes these into ABS_EXPR. */
7258 /* Treat these like sqrt, but only if the user asks for them. */
7259 if (! flag_fast_math
)
7261 case BUILT_IN_FSQRT
:
7262 /* If not optimizing, call the library function. */
7267 /* Arg could be wrong type if user redeclared this fcn wrong. */
7268 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
7271 /* Stabilize and compute the argument. */
7272 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
7273 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
7275 exp
= copy_node (exp
);
7276 arglist
= copy_node (arglist
);
7277 TREE_OPERAND (exp
, 1) = arglist
;
7278 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
7280 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
7282 /* Make a suitable register to place result in. */
7283 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7288 switch (DECL_FUNCTION_CODE (fndecl
))
7291 builtin_optab
= sin_optab
; break;
7293 builtin_optab
= cos_optab
; break;
7294 case BUILT_IN_FSQRT
:
7295 builtin_optab
= sqrt_optab
; break;
7300 /* Compute into TARGET.
7301 Set TARGET to wherever the result comes back. */
7302 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
7303 builtin_optab
, op0
, target
, 0);
7305 /* If we were unable to expand via the builtin, stop the
7306 sequence (without outputting the insns) and break, causing
7307 a call the the library function. */
7314 /* Check the results by default. But if flag_fast_math is turned on,
7315 then assume sqrt will always be called with valid arguments. */
7317 if (! flag_fast_math
)
7319 /* Don't define the builtin FP instructions
7320 if your machine is not IEEE. */
7321 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
7324 lab1
= gen_label_rtx ();
7326 /* Test the result; if it is NaN, set errno=EDOM because
7327 the argument was not in the domain. */
7328 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
7329 emit_jump_insn (gen_beq (lab1
));
7333 #ifdef GEN_ERRNO_RTX
7334 rtx errno_rtx
= GEN_ERRNO_RTX
;
7337 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "errno"));
7340 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
7343 /* We can't set errno=EDOM directly; let the library call do it.
7344 Pop the arguments right away in case the call gets deleted. */
7346 expand_call (exp
, target
, 0);
7353 /* Output the entire sequence. */
7354 insns
= get_insns ();
7360 /* __builtin_apply_args returns block of memory allocated on
7361 the stack into which is stored the arg pointer, structure
7362 value address, static chain, and all the registers that might
7363 possibly be used in performing a function call. The code is
7364 moved to the start of the function so the incoming values are
7366 case BUILT_IN_APPLY_ARGS
:
7367 /* Don't do __builtin_apply_args more than once in a function.
7368 Save the result of the first call and reuse it. */
7369 if (apply_args_value
!= 0)
7370 return apply_args_value
;
7372 /* When this function is called, it means that registers must be
7373 saved on entry to this function. So we migrate the
7374 call to the first insn of this function. */
7379 temp
= expand_builtin_apply_args ();
7383 apply_args_value
= temp
;
7385 /* Put the sequence after the NOTE that starts the function.
7386 If this is inside a SEQUENCE, make the outer-level insn
7387 chain current, so the code is placed at the start of the
7389 push_topmost_sequence ();
7390 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
7391 pop_topmost_sequence ();
7395 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7396 FUNCTION with a copy of the parameters described by
7397 ARGUMENTS, and ARGSIZE. It returns a block of memory
7398 allocated on the stack into which is stored all the registers
7399 that might possibly be used for returning the result of a
7400 function. ARGUMENTS is the value returned by
7401 __builtin_apply_args. ARGSIZE is the number of bytes of
7402 arguments that must be copied. ??? How should this value be
7403 computed? We'll also need a safe worst case value for varargs
7405 case BUILT_IN_APPLY
:
7407 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7408 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
7409 || TREE_CHAIN (arglist
) == 0
7410 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
7411 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
7412 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
7420 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
7421 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
7423 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
7426 /* __builtin_return (RESULT) causes the function to return the
7427 value described by RESULT. RESULT is address of the block of
7428 memory returned by __builtin_apply. */
7429 case BUILT_IN_RETURN
:
7431 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7432 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
7433 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
7434 NULL_RTX
, VOIDmode
, 0));
7437 case BUILT_IN_SAVEREGS
:
7438 /* Don't do __builtin_saveregs more than once in a function.
7439 Save the result of the first call and reuse it. */
7440 if (saveregs_value
!= 0)
7441 return saveregs_value
;
7443 /* When this function is called, it means that registers must be
7444 saved on entry to this function. So we migrate the
7445 call to the first insn of this function. */
7449 /* Now really call the function. `expand_call' does not call
7450 expand_builtin, so there is no danger of infinite recursion here. */
7453 #ifdef EXPAND_BUILTIN_SAVEREGS
7454 /* Do whatever the machine needs done in this case. */
7455 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
7457 /* The register where the function returns its value
7458 is likely to have something else in it, such as an argument.
7459 So preserve that register around the call. */
7461 if (value_mode
!= VOIDmode
)
7463 rtx valreg
= hard_libcall_value (value_mode
);
7464 rtx saved_valreg
= gen_reg_rtx (value_mode
);
7466 emit_move_insn (saved_valreg
, valreg
);
7467 temp
= expand_call (exp
, target
, ignore
);
7468 emit_move_insn (valreg
, saved_valreg
);
7471 /* Generate the call, putting the value in a pseudo. */
7472 temp
= expand_call (exp
, target
, ignore
);
7478 saveregs_value
= temp
;
7480 /* Put the sequence after the NOTE that starts the function.
7481 If this is inside a SEQUENCE, make the outer-level insn
7482 chain current, so the code is placed at the start of the
7484 push_topmost_sequence ();
7485 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
7486 pop_topmost_sequence ();
7490 /* __builtin_args_info (N) returns word N of the arg space info
7491 for the current function. The number and meanings of words
7492 is controlled by the definition of CUMULATIVE_ARGS. */
7493 case BUILT_IN_ARGS_INFO
:
7495 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
7497 int *word_ptr
= (int *) ¤t_function_args_info
;
7498 tree type
, elts
, result
;
7500 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
7501 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7502 __FILE__
, __LINE__
);
7506 tree arg
= TREE_VALUE (arglist
);
7507 if (TREE_CODE (arg
) != INTEGER_CST
)
7508 error ("argument of `__builtin_args_info' must be constant");
7511 int wordnum
= TREE_INT_CST_LOW (arg
);
7513 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
7514 error ("argument of `__builtin_args_info' out of range");
7516 return GEN_INT (word_ptr
[wordnum
]);
7520 error ("missing argument in `__builtin_args_info'");
7525 for (i
= 0; i
< nwords
; i
++)
7526 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
7528 type
= build_array_type (integer_type_node
,
7529 build_index_type (build_int_2 (nwords
, 0)));
7530 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
7531 TREE_CONSTANT (result
) = 1;
7532 TREE_STATIC (result
) = 1;
7533 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
7534 TREE_CONSTANT (result
) = 1;
7535 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
7539 /* Return the address of the first anonymous stack arg. */
7540 case BUILT_IN_NEXT_ARG
:
7542 tree fntype
= TREE_TYPE (current_function_decl
);
7544 if ((TYPE_ARG_TYPES (fntype
) == 0
7545 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
7547 && ! current_function_varargs
)
7549 error ("`va_start' used in function with fixed args");
7555 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
7556 tree arg
= TREE_VALUE (arglist
);
7558 /* Strip off all nops for the sake of the comparison. This
7559 is not quite the same as STRIP_NOPS. It does more. */
7560 while (TREE_CODE (arg
) == NOP_EXPR
7561 || TREE_CODE (arg
) == CONVERT_EXPR
7562 || TREE_CODE (arg
) == NON_LVALUE_EXPR
)
7563 arg
= TREE_OPERAND (arg
, 0);
7564 if (arg
!= last_parm
)
7565 warning ("second parameter of `va_start' not last named argument");
7567 else if (! current_function_varargs
)
7568 /* Evidently an out of date version of <stdarg.h>; can't validate
7569 va_start's second argument, but can still work as intended. */
7570 warning ("`__builtin_next_arg' called without an argument");
7573 return expand_binop (Pmode
, add_optab
,
7574 current_function_internal_arg_pointer
,
7575 current_function_arg_offset_rtx
,
7576 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
7578 case BUILT_IN_CLASSIFY_TYPE
:
7581 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
7582 enum tree_code code
= TREE_CODE (type
);
7583 if (code
== VOID_TYPE
)
7584 return GEN_INT (void_type_class
);
7585 if (code
== INTEGER_TYPE
)
7586 return GEN_INT (integer_type_class
);
7587 if (code
== CHAR_TYPE
)
7588 return GEN_INT (char_type_class
);
7589 if (code
== ENUMERAL_TYPE
)
7590 return GEN_INT (enumeral_type_class
);
7591 if (code
== BOOLEAN_TYPE
)
7592 return GEN_INT (boolean_type_class
);
7593 if (code
== POINTER_TYPE
)
7594 return GEN_INT (pointer_type_class
);
7595 if (code
== REFERENCE_TYPE
)
7596 return GEN_INT (reference_type_class
);
7597 if (code
== OFFSET_TYPE
)
7598 return GEN_INT (offset_type_class
);
7599 if (code
== REAL_TYPE
)
7600 return GEN_INT (real_type_class
);
7601 if (code
== COMPLEX_TYPE
)
7602 return GEN_INT (complex_type_class
);
7603 if (code
== FUNCTION_TYPE
)
7604 return GEN_INT (function_type_class
);
7605 if (code
== METHOD_TYPE
)
7606 return GEN_INT (method_type_class
);
7607 if (code
== RECORD_TYPE
)
7608 return GEN_INT (record_type_class
);
7609 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
7610 return GEN_INT (union_type_class
);
7611 if (code
== ARRAY_TYPE
)
7613 if (TYPE_STRING_FLAG (type
))
7614 return GEN_INT (string_type_class
);
7616 return GEN_INT (array_type_class
);
7618 if (code
== SET_TYPE
)
7619 return GEN_INT (set_type_class
);
7620 if (code
== FILE_TYPE
)
7621 return GEN_INT (file_type_class
);
7622 if (code
== LANG_TYPE
)
7623 return GEN_INT (lang_type_class
);
7625 return GEN_INT (no_type_class
);
7627 case BUILT_IN_CONSTANT_P
:
7632 tree arg
= TREE_VALUE (arglist
);
7635 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
7636 || (TREE_CODE (arg
) == ADDR_EXPR
7637 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7638 ? const1_rtx
: const0_rtx
);
7641 case BUILT_IN_FRAME_ADDRESS
:
7642 /* The argument must be a nonnegative integer constant.
7643 It counts the number of frames to scan up the stack.
7644 The value is the address of that frame. */
7645 case BUILT_IN_RETURN_ADDRESS
:
7646 /* The argument must be a nonnegative integer constant.
7647 It counts the number of frames to scan up the stack.
7648 The value is the return address saved in that frame. */
7650 /* Warning about missing arg was already issued. */
7652 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
)
7654 error ("invalid arg to `__builtin_return_address'");
7657 else if (tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
7659 error ("invalid arg to `__builtin_return_address'");
7664 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
7665 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
7666 hard_frame_pointer_rtx
);
7668 /* For __builtin_frame_address, return what we've got. */
7669 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
7672 if (GET_CODE (tem
) != REG
)
7673 tem
= copy_to_reg (tem
);
7677 case BUILT_IN_ALLOCA
:
7679 /* Arg could be non-integer if user redeclared this fcn wrong. */
7680 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
7683 /* Compute the argument. */
7684 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
7686 /* Allocate the desired space. */
7687 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
7690 /* If not optimizing, call the library function. */
7691 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
7695 /* Arg could be non-integer if user redeclared this fcn wrong. */
7696 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
7699 /* Compute the argument. */
7700 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
7701 /* Compute ffs, into TARGET if possible.
7702 Set TARGET to wherever the result comes back. */
7703 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
7704 ffs_optab
, op0
, target
, 1);
7709 case BUILT_IN_STRLEN
:
7710 /* If not optimizing, call the library function. */
7711 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
7715 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7716 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
7720 tree src
= TREE_VALUE (arglist
);
7721 tree len
= c_strlen (src
);
7724 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
7726 rtx result
, src_rtx
, char_rtx
;
7727 enum machine_mode insn_mode
= value_mode
, char_mode
;
7728 enum insn_code icode
;
7730 /* If the length is known, just return it. */
7732 return expand_expr (len
, target
, mode
, 0);
7734 /* If SRC is not a pointer type, don't do this operation inline. */
7738 /* Call a function if we can't compute strlen in the right mode. */
7740 while (insn_mode
!= VOIDmode
)
7742 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
7743 if (icode
!= CODE_FOR_nothing
)
7746 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
7748 if (insn_mode
== VOIDmode
)
7751 /* Make a place to write the result of the instruction. */
7754 && GET_CODE (result
) == REG
7755 && GET_MODE (result
) == insn_mode
7756 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
7757 result
= gen_reg_rtx (insn_mode
);
7759 /* Make sure the operands are acceptable to the predicates. */
7761 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
7762 result
= gen_reg_rtx (insn_mode
);
7764 src_rtx
= memory_address (BLKmode
,
7765 expand_expr (src
, NULL_RTX
, ptr_mode
,
7767 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
7768 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
7770 char_rtx
= const0_rtx
;
7771 char_mode
= insn_operand_mode
[(int)icode
][2];
7772 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
7773 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
7775 emit_insn (GEN_FCN (icode
) (result
,
7776 gen_rtx (MEM
, BLKmode
, src_rtx
),
7777 char_rtx
, GEN_INT (align
)));
7779 /* Return the value in the proper mode for this function. */
7780 if (GET_MODE (result
) == value_mode
)
7782 else if (target
!= 0)
7784 convert_move (target
, result
, 0);
7788 return convert_to_mode (value_mode
, result
, 0);
7791 case BUILT_IN_STRCPY
:
7792 /* If not optimizing, call the library function. */
7793 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
7797 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7798 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
7799 || TREE_CHAIN (arglist
) == 0
7800 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
7804 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
7809 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
7811 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
7815 case BUILT_IN_MEMCPY
:
7816 /* If not optimizing, call the library function. */
7817 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
7821 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7822 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
7823 || TREE_CHAIN (arglist
) == 0
7824 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
7825 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
7826 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
7830 tree dest
= TREE_VALUE (arglist
);
7831 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
7832 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7836 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
7838 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
7839 rtx dest_rtx
, dest_mem
, src_mem
;
7841 /* If either SRC or DEST is not a pointer type, don't do
7842 this operation in-line. */
7843 if (src_align
== 0 || dest_align
== 0)
7845 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
7846 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
7850 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
7851 dest_mem
= gen_rtx (MEM
, BLKmode
,
7852 memory_address (BLKmode
, dest_rtx
));
7853 /* There could be a void* cast on top of the object. */
7854 while (TREE_CODE (dest
) == NOP_EXPR
)
7855 dest
= TREE_OPERAND (dest
, 0);
7856 type
= TREE_TYPE (TREE_TYPE (dest
));
7857 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
7858 src_mem
= gen_rtx (MEM
, BLKmode
,
7859 memory_address (BLKmode
,
7860 expand_expr (src
, NULL_RTX
,
7863 /* There could be a void* cast on top of the object. */
7864 while (TREE_CODE (src
) == NOP_EXPR
)
7865 src
= TREE_OPERAND (src
, 0);
7866 type
= TREE_TYPE (TREE_TYPE (src
));
7867 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
7869 /* Copy word part most expediently. */
7870 emit_block_move (dest_mem
, src_mem
,
7871 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
7872 MIN (src_align
, dest_align
));
7873 return force_operand (dest_rtx
, NULL_RTX
);
7876 /* These comparison functions need an instruction that returns an actual
7877 index. An ordinary compare that just sets the condition codes
7879 #ifdef HAVE_cmpstrsi
7880 case BUILT_IN_STRCMP
:
7881 /* If not optimizing, call the library function. */
7882 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
7886 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7887 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
7888 || TREE_CHAIN (arglist
) == 0
7889 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
7891 else if (!HAVE_cmpstrsi
)
7894 tree arg1
= TREE_VALUE (arglist
);
7895 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
7899 len
= c_strlen (arg1
);
7901 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
7902 len2
= c_strlen (arg2
);
7904 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
7906 /* If we don't have a constant length for the first, use the length
7907 of the second, if we know it. We don't require a constant for
7908 this case; some cost analysis could be done if both are available
7909 but neither is constant. For now, assume they're equally cheap.
7911 If both strings have constant lengths, use the smaller. This
7912 could arise if optimization results in strcpy being called with
7913 two fixed strings, or if the code was machine-generated. We should
7914 add some code to the `memcmp' handler below to deal with such
7915 situations, someday. */
7916 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
7923 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
7925 if (tree_int_cst_lt (len2
, len
))
7929 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
7933 case BUILT_IN_MEMCMP
:
7934 /* If not optimizing, call the library function. */
7935 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
7939 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7940 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
7941 || TREE_CHAIN (arglist
) == 0
7942 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
7943 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
7944 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
7946 else if (!HAVE_cmpstrsi
)
7949 tree arg1
= TREE_VALUE (arglist
);
7950 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
7951 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7955 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
7957 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
7958 enum machine_mode insn_mode
7959 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
7961 /* If we don't have POINTER_TYPE, call the function. */
7962 if (arg1_align
== 0 || arg2_align
== 0)
7964 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
7965 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
7969 /* Make a place to write the result of the instruction. */
7972 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
7973 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
7974 result
= gen_reg_rtx (insn_mode
);
7976 emit_insn (gen_cmpstrsi (result
,
7977 gen_rtx (MEM
, BLKmode
,
7978 expand_expr (arg1
, NULL_RTX
,
7981 gen_rtx (MEM
, BLKmode
,
7982 expand_expr (arg2
, NULL_RTX
,
7985 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
7986 GEN_INT (MIN (arg1_align
, arg2_align
))));
7988 /* Return the value in the proper mode for this function. */
7989 mode
= TYPE_MODE (TREE_TYPE (exp
));
7990 if (GET_MODE (result
) == mode
)
7992 else if (target
!= 0)
7994 convert_move (target
, result
, 0);
7998 return convert_to_mode (mode
, result
, 0);
8001 case BUILT_IN_STRCMP
:
8002 case BUILT_IN_MEMCMP
:
8006 default: /* just do library call, if unknown builtin */
8007 error ("built-in function `%s' not currently supported",
8008 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8011 /* The switch statement above can drop through to cause the function
8012 to be called normally. */
8014 return expand_call (exp
, target
, ignore
);
8017 /* Built-in functions to perform an untyped call and return. */
8019 /* For each register that may be used for calling a function, this
8020 gives a mode used to copy the register's value. VOIDmode indicates
8021 the register is not used for calling a function. If the machine
8022 has register windows, this gives only the outbound registers.
8023 INCOMING_REGNO gives the corresponding inbound register. */
8024 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
8026 /* For each register that may be used for returning values, this gives
8027 a mode used to copy the register's value. VOIDmode indicates the
8028 register is not used for returning values. If the machine has
8029 register windows, this gives only the outbound registers.
8030 INCOMING_REGNO gives the corresponding inbound register. */
8031 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
8033 /* For each register that may be used for calling a function, this
8034 gives the offset of that register into the block returned by
8035 __builtin_apply_args. 0 indicates that the register is not
8036 used for calling a function. */
8037 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
8039 /* Return the offset of register REGNO into the block returned by
8040 __builtin_apply_args. This is not declared static, since it is
8041 needed in objc-act.c. */
8044 apply_args_register_offset (regno
)
8049 /* Arguments are always put in outgoing registers (in the argument
8050 block) if such make sense. */
8051 #ifdef OUTGOING_REGNO
8052 regno
= OUTGOING_REGNO(regno
);
8054 return apply_args_reg_offset
[regno
];
8057 /* Return the size required for the block returned by __builtin_apply_args,
8058 and initialize apply_args_mode. */
8063 static int size
= -1;
8065 enum machine_mode mode
;
8067 /* The values computed by this function never change. */
8070 /* The first value is the incoming arg-pointer. */
8071 size
= GET_MODE_SIZE (Pmode
);
8073 /* The second value is the structure value address unless this is
8074 passed as an "invisible" first argument. */
8075 if (struct_value_rtx
)
8076 size
+= GET_MODE_SIZE (Pmode
);
8078 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8079 if (FUNCTION_ARG_REGNO_P (regno
))
8081 /* Search for the proper mode for copying this register's
8082 value. I'm not sure this is right, but it works so far. */
8083 enum machine_mode best_mode
= VOIDmode
;
8085 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
8087 mode
= GET_MODE_WIDER_MODE (mode
))
8088 if (HARD_REGNO_MODE_OK (regno
, mode
)
8089 && HARD_REGNO_NREGS (regno
, mode
) == 1)
8092 if (best_mode
== VOIDmode
)
8093 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
8095 mode
= GET_MODE_WIDER_MODE (mode
))
8096 if (HARD_REGNO_MODE_OK (regno
, mode
)
8097 && (mov_optab
->handlers
[(int) mode
].insn_code
8098 != CODE_FOR_nothing
))
8102 if (mode
== VOIDmode
)
8105 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8106 if (size
% align
!= 0)
8107 size
= CEIL (size
, align
) * align
;
8108 apply_args_reg_offset
[regno
] = size
;
8109 size
+= GET_MODE_SIZE (mode
);
8110 apply_args_mode
[regno
] = mode
;
8114 apply_args_mode
[regno
] = VOIDmode
;
8115 apply_args_reg_offset
[regno
] = 0;
8121 /* Return the size required for the block returned by __builtin_apply,
8122 and initialize apply_result_mode. */
8125 apply_result_size ()
8127 static int size
= -1;
8129 enum machine_mode mode
;
8131 /* The values computed by this function never change. */
8136 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8137 if (FUNCTION_VALUE_REGNO_P (regno
))
8139 /* Search for the proper mode for copying this register's
8140 value. I'm not sure this is right, but it works so far. */
8141 enum machine_mode best_mode
= VOIDmode
;
8143 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
8145 mode
= GET_MODE_WIDER_MODE (mode
))
8146 if (HARD_REGNO_MODE_OK (regno
, mode
))
8149 if (best_mode
== VOIDmode
)
8150 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
8152 mode
= GET_MODE_WIDER_MODE (mode
))
8153 if (HARD_REGNO_MODE_OK (regno
, mode
)
8154 && (mov_optab
->handlers
[(int) mode
].insn_code
8155 != CODE_FOR_nothing
))
8159 if (mode
== VOIDmode
)
8162 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8163 if (size
% align
!= 0)
8164 size
= CEIL (size
, align
) * align
;
8165 size
+= GET_MODE_SIZE (mode
);
8166 apply_result_mode
[regno
] = mode
;
8169 apply_result_mode
[regno
] = VOIDmode
;
8171 /* Allow targets that use untyped_call and untyped_return to override
8172 the size so that machine-specific information can be stored here. */
8173 #ifdef APPLY_RESULT_SIZE
8174 size
= APPLY_RESULT_SIZE
;
8180 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8181 /* Create a vector describing the result block RESULT. If SAVEP is true,
8182 the result block is used to save the values; otherwise it is used to
8183 restore the values. */
8186 result_vector (savep
, result
)
8190 int regno
, size
, align
, nelts
;
8191 enum machine_mode mode
;
8193 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
8196 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8197 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
8199 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8200 if (size
% align
!= 0)
8201 size
= CEIL (size
, align
) * align
;
8202 reg
= gen_rtx (REG
, mode
, savep
? regno
: INCOMING_REGNO (regno
));
8203 mem
= change_address (result
, mode
,
8204 plus_constant (XEXP (result
, 0), size
));
8205 savevec
[nelts
++] = (savep
8206 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
8207 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
8208 size
+= GET_MODE_SIZE (mode
);
8210 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
8212 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8214 /* Save the state required to perform an untyped call with the same
8215 arguments as were passed to the current function. */
8218 expand_builtin_apply_args ()
8221 int size
, align
, regno
;
8222 enum machine_mode mode
;
8224 /* Create a block where the arg-pointer, structure value address,
8225 and argument registers can be saved. */
8226 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
8228 /* Walk past the arg-pointer and structure value address. */
8229 size
= GET_MODE_SIZE (Pmode
);
8230 if (struct_value_rtx
)
8231 size
+= GET_MODE_SIZE (Pmode
);
8233 /* Save each register used in calling a function to the block. */
8234 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8235 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
8239 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8240 if (size
% align
!= 0)
8241 size
= CEIL (size
, align
) * align
;
8243 tem
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
8246 /* For reg-stack.c's stack register household.
8247 Compare with a similar piece of code in function.c. */
8249 emit_insn (gen_rtx (USE
, mode
, tem
));
8252 emit_move_insn (change_address (registers
, mode
,
8253 plus_constant (XEXP (registers
, 0),
8256 size
+= GET_MODE_SIZE (mode
);
8259 /* Save the arg pointer to the block. */
8260 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
8261 copy_to_reg (virtual_incoming_args_rtx
));
8262 size
= GET_MODE_SIZE (Pmode
);
8264 /* Save the structure value address unless this is passed as an
8265 "invisible" first argument. */
8266 if (struct_value_incoming_rtx
)
8268 emit_move_insn (change_address (registers
, Pmode
,
8269 plus_constant (XEXP (registers
, 0),
8271 copy_to_reg (struct_value_incoming_rtx
));
8272 size
+= GET_MODE_SIZE (Pmode
);
8275 /* Return the address of the block. */
8276 return copy_addr_to_reg (XEXP (registers
, 0));
8279 /* Perform an untyped call and save the state required to perform an
8280 untyped return of whatever value was returned by the given function. */
8283 expand_builtin_apply (function
, arguments
, argsize
)
8284 rtx function
, arguments
, argsize
;
8286 int size
, align
, regno
;
8287 enum machine_mode mode
;
8288 rtx incoming_args
, result
, reg
, dest
, call_insn
;
8289 rtx old_stack_level
= 0;
8290 rtx call_fusage
= 0;
8292 /* Create a block where the return registers can be saved. */
8293 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
8295 /* ??? The argsize value should be adjusted here. */
8297 /* Fetch the arg pointer from the ARGUMENTS block. */
8298 incoming_args
= gen_reg_rtx (Pmode
);
8299 emit_move_insn (incoming_args
,
8300 gen_rtx (MEM
, Pmode
, arguments
));
8301 #ifndef STACK_GROWS_DOWNWARD
8302 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
8303 incoming_args
, 0, OPTAB_LIB_WIDEN
);
8306 /* Perform postincrements before actually calling the function. */
8309 /* Push a new argument block and copy the arguments. */
8310 do_pending_stack_adjust ();
8311 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
8313 /* Push a block of memory onto the stack to store the memory arguments.
8314 Save the address in a register, and copy the memory arguments. ??? I
8315 haven't figured out how the calling convention macros effect this,
8316 but it's likely that the source and/or destination addresses in
8317 the block copy will need updating in machine specific ways. */
8318 dest
= copy_addr_to_reg (push_block (argsize
, 0, 0));
8319 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
8320 gen_rtx (MEM
, BLKmode
, incoming_args
),
8322 PARM_BOUNDARY
/ BITS_PER_UNIT
);
8324 /* Refer to the argument block. */
8326 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
8328 /* Walk past the arg-pointer and structure value address. */
8329 size
= GET_MODE_SIZE (Pmode
);
8330 if (struct_value_rtx
)
8331 size
+= GET_MODE_SIZE (Pmode
);
8333 /* Restore each of the registers previously saved. Make USE insns
8334 for each of these registers for use in making the call. */
8335 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8336 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
8338 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8339 if (size
% align
!= 0)
8340 size
= CEIL (size
, align
) * align
;
8341 reg
= gen_rtx (REG
, mode
, regno
);
8342 emit_move_insn (reg
,
8343 change_address (arguments
, mode
,
8344 plus_constant (XEXP (arguments
, 0),
8347 use_reg (&call_fusage
, reg
);
8348 size
+= GET_MODE_SIZE (mode
);
8351 /* Restore the structure value address unless this is passed as an
8352 "invisible" first argument. */
8353 size
= GET_MODE_SIZE (Pmode
);
8354 if (struct_value_rtx
)
8356 rtx value
= gen_reg_rtx (Pmode
);
8357 emit_move_insn (value
,
8358 change_address (arguments
, Pmode
,
8359 plus_constant (XEXP (arguments
, 0),
8361 emit_move_insn (struct_value_rtx
, value
);
8362 if (GET_CODE (struct_value_rtx
) == REG
)
8363 use_reg (&call_fusage
, struct_value_rtx
);
8364 size
+= GET_MODE_SIZE (Pmode
);
8367 /* All arguments and registers used for the call are set up by now! */
8368 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
8370 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8371 and we don't want to load it into a register as an optimization,
8372 because prepare_call_address already did it if it should be done. */
8373 if (GET_CODE (function
) != SYMBOL_REF
)
8374 function
= memory_address (FUNCTION_MODE
, function
);
8376 /* Generate the actual call instruction and save the return value. */
8377 #ifdef HAVE_untyped_call
8378 if (HAVE_untyped_call
)
8379 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
8380 result
, result_vector (1, result
)));
8383 #ifdef HAVE_call_value
8384 if (HAVE_call_value
)
8388 /* Locate the unique return register. It is not possible to
8389 express a call that sets more than one return register using
8390 call_value; use untyped_call for that. In fact, untyped_call
8391 only needs to save the return registers in the given block. */
8392 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8393 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
8396 abort (); /* HAVE_untyped_call required. */
8397 valreg
= gen_rtx (REG
, mode
, regno
);
8400 emit_call_insn (gen_call_value (valreg
,
8401 gen_rtx (MEM
, FUNCTION_MODE
, function
),
8402 const0_rtx
, NULL_RTX
, const0_rtx
));
8404 emit_move_insn (change_address (result
, GET_MODE (valreg
),
8412 /* Find the CALL insn we just emitted. */
8413 for (call_insn
= get_last_insn ();
8414 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
8415 call_insn
= PREV_INSN (call_insn
))
8421 /* Put the register usage information on the CALL. If there is already
8422 some usage information, put ours at the end. */
8423 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
8427 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
8428 link
= XEXP (link
, 1))
8431 XEXP (link
, 1) = call_fusage
;
8434 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
8436 /* Restore the stack. */
8437 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
8439 /* Return the address of the result block. */
8440 return copy_addr_to_reg (XEXP (result
, 0));
8443 /* Perform an untyped return. */
8446 expand_builtin_return (result
)
8449 int size
, align
, regno
;
8450 enum machine_mode mode
;
8452 rtx call_fusage
= 0;
8454 apply_result_size ();
8455 result
= gen_rtx (MEM
, BLKmode
, result
);
8457 #ifdef HAVE_untyped_return
8458 if (HAVE_untyped_return
)
8460 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
8466 /* Restore the return value and note that each value is used. */
8468 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
8469 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
8471 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
8472 if (size
% align
!= 0)
8473 size
= CEIL (size
, align
) * align
;
8474 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
8475 emit_move_insn (reg
,
8476 change_address (result
, mode
,
8477 plus_constant (XEXP (result
, 0),
8480 push_to_sequence (call_fusage
);
8481 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
8482 call_fusage
= get_insns ();
8484 size
+= GET_MODE_SIZE (mode
);
8487 /* Put the USE insns before the return. */
8488 emit_insns (call_fusage
);
8490 /* Return whatever values was restored by jumping directly to the end
8492 expand_null_return ();
8495 /* Expand code for a post- or pre- increment or decrement
8496 and return the RTX for the result.
8497 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8500 expand_increment (exp
, post
)
8504 register rtx op0
, op1
;
8505 register rtx temp
, value
;
8506 register tree incremented
= TREE_OPERAND (exp
, 0);
8507 optab this_optab
= add_optab
;
8509 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
8510 int op0_is_copy
= 0;
8511 int single_insn
= 0;
8512 /* 1 means we can't store into OP0 directly,
8513 because it is a subreg narrower than a word,
8514 and we don't dare clobber the rest of the word. */
8517 if (output_bytecode
)
8519 bc_expand_expr (exp
);
8523 /* Stabilize any component ref that might need to be
8524 evaluated more than once below. */
8526 || TREE_CODE (incremented
) == BIT_FIELD_REF
8527 || (TREE_CODE (incremented
) == COMPONENT_REF
8528 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
8529 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
8530 incremented
= stabilize_reference (incremented
);
8531 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8532 ones into save exprs so that they don't accidentally get evaluated
8533 more than once by the code below. */
8534 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
8535 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
8536 incremented
= save_expr (incremented
);
8538 /* Compute the operands as RTX.
8539 Note whether OP0 is the actual lvalue or a copy of it:
8540 I believe it is a copy iff it is a register or subreg
8541 and insns were generated in computing it. */
8543 temp
= get_last_insn ();
8544 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
8546 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8547 in place but instead must do sign- or zero-extension during assignment,
8548 so we copy it into a new register and let the code below use it as
8551 Note that we can safely modify this SUBREG since it is know not to be
8552 shared (it was made by the expand_expr call above). */
8554 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
8557 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
8561 else if (GET_CODE (op0
) == SUBREG
8562 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
8564 /* We cannot increment this SUBREG in place. If we are
8565 post-incrementing, get a copy of the old value. Otherwise,
8566 just mark that we cannot increment in place. */
8568 op0
= copy_to_reg (op0
);
8573 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
8574 && temp
!= get_last_insn ());
8575 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8577 /* Decide whether incrementing or decrementing. */
8578 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
8579 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8580 this_optab
= sub_optab
;
8582 /* Convert decrement by a constant into a negative increment. */
8583 if (this_optab
== sub_optab
8584 && GET_CODE (op1
) == CONST_INT
)
8586 op1
= GEN_INT (- INTVAL (op1
));
8587 this_optab
= add_optab
;
8590 /* For a preincrement, see if we can do this with a single instruction. */
8593 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8594 if (icode
!= (int) CODE_FOR_nothing
8595 /* Make sure that OP0 is valid for operands 0 and 1
8596 of the insn we want to queue. */
8597 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
8598 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
8599 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
8603 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8604 then we cannot just increment OP0. We must therefore contrive to
8605 increment the original value. Then, for postincrement, we can return
8606 OP0 since it is a copy of the old value. For preincrement, expand here
8607 unless we can do it with a single insn.
8609 Likewise if storing directly into OP0 would clobber high bits
8610 we need to preserve (bad_subreg). */
8611 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
8613 /* This is the easiest way to increment the value wherever it is.
8614 Problems with multiple evaluation of INCREMENTED are prevented
8615 because either (1) it is a component_ref or preincrement,
8616 in which case it was stabilized above, or (2) it is an array_ref
8617 with constant index in an array in a register, which is
8618 safe to reevaluate. */
8619 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
8620 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
8621 ? MINUS_EXPR
: PLUS_EXPR
),
8624 TREE_OPERAND (exp
, 1));
8626 while (TREE_CODE (incremented
) == NOP_EXPR
8627 || TREE_CODE (incremented
) == CONVERT_EXPR
)
8629 newexp
= convert (TREE_TYPE (incremented
), newexp
);
8630 incremented
= TREE_OPERAND (incremented
, 0);
8633 temp
= expand_assignment (incremented
, newexp
, ! post
, 0);
8634 return post
? op0
: temp
;
8639 /* We have a true reference to the value in OP0.
8640 If there is an insn to add or subtract in this mode, queue it.
8641 Queueing the increment insn avoids the register shuffling
8642 that often results if we must increment now and first save
8643 the old value for subsequent use. */
8645 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8646 op0
= stabilize (op0
);
8649 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
8650 if (icode
!= (int) CODE_FOR_nothing
8651 /* Make sure that OP0 is valid for operands 0 and 1
8652 of the insn we want to queue. */
8653 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
8654 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
8656 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
8657 op1
= force_reg (mode
, op1
);
8659 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
8663 /* Preincrement, or we can't increment with one simple insn. */
8665 /* Save a copy of the value before inc or dec, to return it later. */
8666 temp
= value
= copy_to_reg (op0
);
8668 /* Arrange to return the incremented value. */
8669 /* Copy the rtx because expand_binop will protect from the queue,
8670 and the results of that would be invalid for us to return
8671 if our caller does emit_queue before using our result. */
8672 temp
= copy_rtx (value
= op0
);
8674 /* Increment however we can. */
8675 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
8676 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
8677 /* Make sure the value is stored into OP0. */
8679 emit_move_insn (op0
, op1
);
8684 /* Expand all function calls contained within EXP, innermost ones first.
8685 But don't look within expressions that have sequence points.
8686 For each CALL_EXPR, record the rtx for its value
8687 in the CALL_EXPR_RTL field. */
8690 preexpand_calls (exp
)
8693 register int nops
, i
;
8694 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
8696 if (! do_preexpand_calls
)
8699 /* Only expressions and references can contain calls. */
8701 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
8704 switch (TREE_CODE (exp
))
8707 /* Do nothing if already expanded. */
8708 if (CALL_EXPR_RTL (exp
) != 0)
8711 /* Do nothing to built-in functions. */
8712 if (TREE_CODE (TREE_OPERAND (exp
, 0)) != ADDR_EXPR
8713 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != FUNCTION_DECL
8714 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
8715 /* Do nothing if the call returns a variable-sized object. */
8716 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
)
8717 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
8722 case TRUTH_ANDIF_EXPR
:
8723 case TRUTH_ORIF_EXPR
:
8724 /* If we find one of these, then we can be sure
8725 the adjust will be done for it (since it makes jumps).
8726 Do it now, so that if this is inside an argument
8727 of a function, we don't get the stack adjustment
8728 after some other args have already been pushed. */
8729 do_pending_stack_adjust ();
8734 case WITH_CLEANUP_EXPR
:
8735 case CLEANUP_POINT_EXPR
:
8739 if (SAVE_EXPR_RTL (exp
) != 0)
8743 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
8744 for (i
= 0; i
< nops
; i
++)
8745 if (TREE_OPERAND (exp
, i
) != 0)
8747 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
8748 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
8750 preexpand_calls (TREE_OPERAND (exp
, i
));
8754 /* At the start of a function, record that we have no previously-pushed
8755 arguments waiting to be popped. */
8758 init_pending_stack_adjust ()
8760 pending_stack_adjust
= 0;
8763 /* When exiting from function, if safe, clear out any pending stack adjust
8764 so the adjustment won't get done. */
8767 clear_pending_stack_adjust ()
8769 #ifdef EXIT_IGNORE_STACK
8770 if (! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
8771 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
8772 && ! flag_inline_functions
)
8773 pending_stack_adjust
= 0;
8777 /* Pop any previously-pushed arguments that have not been popped yet. */
8780 do_pending_stack_adjust ()
8782 if (inhibit_defer_pop
== 0)
8784 if (pending_stack_adjust
!= 0)
8785 adjust_stack (GEN_INT (pending_stack_adjust
));
8786 pending_stack_adjust
= 0;
8790 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8791 Returns the cleanups to be performed. */
8794 defer_cleanups_to (old_cleanups
)
8797 tree new_cleanups
= NULL_TREE
;
8798 tree cleanups
= cleanups_this_call
;
8799 tree last
= NULL_TREE
;
8801 while (cleanups_this_call
!= old_cleanups
)
8803 (*interim_eh_hook
) (TREE_VALUE (cleanups_this_call
));
8804 last
= cleanups_this_call
;
8805 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
8810 /* Remove the list from the chain of cleanups. */
8811 TREE_CHAIN (last
) = NULL_TREE
;
8813 /* reverse them so that we can build them in the right order. */
8814 cleanups
= nreverse (cleanups
);
8819 new_cleanups
= build (COMPOUND_EXPR
, TREE_TYPE (new_cleanups
),
8820 TREE_VALUE (cleanups
), new_cleanups
);
8822 new_cleanups
= TREE_VALUE (cleanups
);
8824 cleanups
= TREE_CHAIN (cleanups
);
8828 return new_cleanups
;
8831 /* Expand all cleanups up to OLD_CLEANUPS.
8832 Needed here, and also for language-dependent calls. */
8835 expand_cleanups_to (old_cleanups
)
8838 while (cleanups_this_call
!= old_cleanups
)
8840 (*interim_eh_hook
) (TREE_VALUE (cleanups_this_call
));
8841 expand_expr (TREE_VALUE (cleanups_this_call
), const0_rtx
, VOIDmode
, 0);
8842 cleanups_this_call
= TREE_CHAIN (cleanups_this_call
);
8846 /* Expand conditional expressions. */
8848 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8849 LABEL is an rtx of code CODE_LABEL, in this function and all the
8853 jumpifnot (exp
, label
)
8857 do_jump (exp
, label
, NULL_RTX
);
8860 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8867 do_jump (exp
, NULL_RTX
, label
);
8870 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8871 the result is zero, or IF_TRUE_LABEL if the result is one.
8872 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8873 meaning fall through in that case.
8875 do_jump always does any pending stack adjust except when it does not
8876 actually perform a jump. An example where there is no jump
8877 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8879 This function is responsible for optimizing cases such as
8880 &&, || and comparison operators in EXP. */
8883 do_jump (exp
, if_false_label
, if_true_label
)
8885 rtx if_false_label
, if_true_label
;
8887 register enum tree_code code
= TREE_CODE (exp
);
8888 /* Some cases need to create a label to jump to
8889 in order to properly fall through.
8890 These cases set DROP_THROUGH_LABEL nonzero. */
8891 rtx drop_through_label
= 0;
8896 enum machine_mode mode
;
8906 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
8912 /* This is not true with #pragma weak */
8914 /* The address of something can never be zero. */
8916 emit_jump (if_true_label
);
8921 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
8922 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
8923 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
8926 /* If we are narrowing the operand, we have to do the compare in the
8928 if ((TYPE_PRECISION (TREE_TYPE (exp
))
8929 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
8931 case NON_LVALUE_EXPR
:
8932 case REFERENCE_EXPR
:
8937 /* These cannot change zero->non-zero or vice versa. */
8938 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
8942 /* This is never less insns than evaluating the PLUS_EXPR followed by
8943 a test and can be longer if the test is eliminated. */
8945 /* Reduce to minus. */
8946 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
8947 TREE_OPERAND (exp
, 0),
8948 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
8949 TREE_OPERAND (exp
, 1))));
8950 /* Process as MINUS. */
8954 /* Non-zero iff operands of minus differ. */
8955 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
8956 TREE_OPERAND (exp
, 0),
8957 TREE_OPERAND (exp
, 1)),
8962 /* If we are AND'ing with a small constant, do this comparison in the
8963 smallest type that fits. If the machine doesn't have comparisons
8964 that small, it will be converted back to the wider comparison.
8965 This helps if we are testing the sign bit of a narrower object.
8966 combine can't do this for us because it can't know whether a
8967 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8969 if (! SLOW_BYTE_ACCESS
8970 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8971 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
8972 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
8973 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
8974 && (type
= type_for_mode (mode
, 1)) != 0
8975 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
8976 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
8977 != CODE_FOR_nothing
))
8979 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
8984 case TRUTH_NOT_EXPR
:
8985 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
8988 case TRUTH_ANDIF_EXPR
:
8991 tree cleanups
, old_cleanups
;
8993 if (if_false_label
== 0)
8994 if_false_label
= drop_through_label
= gen_label_rtx ();
8996 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
8997 seq1
= get_insns ();
9000 old_cleanups
= cleanups_this_call
;
9002 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9003 seq2
= get_insns ();
9006 cleanups
= defer_cleanups_to (old_cleanups
);
9009 rtx flag
= gen_reg_rtx (word_mode
);
9013 /* Flag cleanups as not needed. */
9014 emit_move_insn (flag
, const0_rtx
);
9017 /* Flag cleanups as needed. */
9018 emit_move_insn (flag
, const1_rtx
);
9021 /* convert flag, which is an rtx, into a tree. */
9022 cond
= make_node (RTL_EXPR
);
9023 TREE_TYPE (cond
) = integer_type_node
;
9024 RTL_EXPR_RTL (cond
) = flag
;
9025 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
9026 cond
= save_expr (cond
);
9028 new_cleanups
= build (COND_EXPR
, void_type_node
,
9029 truthvalue_conversion (cond
),
9030 cleanups
, integer_zero_node
);
9031 new_cleanups
= fold (new_cleanups
);
9033 /* Now add in the conditionalized cleanups. */
9035 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
9036 (*interim_eh_hook
) (NULL_TREE
);
9046 case TRUTH_ORIF_EXPR
:
9049 tree cleanups
, old_cleanups
;
9051 if (if_true_label
== 0)
9052 if_true_label
= drop_through_label
= gen_label_rtx ();
9054 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9055 seq1
= get_insns ();
9058 old_cleanups
= cleanups_this_call
;
9060 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9061 seq2
= get_insns ();
9064 cleanups
= defer_cleanups_to (old_cleanups
);
9067 rtx flag
= gen_reg_rtx (word_mode
);
9071 /* Flag cleanups as not needed. */
9072 emit_move_insn (flag
, const0_rtx
);
9075 /* Flag cleanups as needed. */
9076 emit_move_insn (flag
, const1_rtx
);
9079 /* convert flag, which is an rtx, into a tree. */
9080 cond
= make_node (RTL_EXPR
);
9081 TREE_TYPE (cond
) = integer_type_node
;
9082 RTL_EXPR_RTL (cond
) = flag
;
9083 RTL_EXPR_SEQUENCE (cond
) = NULL_RTX
;
9084 cond
= save_expr (cond
);
9086 new_cleanups
= build (COND_EXPR
, void_type_node
,
9087 truthvalue_conversion (cond
),
9088 cleanups
, integer_zero_node
);
9089 new_cleanups
= fold (new_cleanups
);
9091 /* Now add in the conditionalized cleanups. */
9093 = tree_cons (NULL_TREE
, new_cleanups
, cleanups_this_call
);
9094 (*interim_eh_hook
) (NULL_TREE
);
9106 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9110 do_pending_stack_adjust ();
9111 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9118 int bitsize
, bitpos
, unsignedp
;
9119 enum machine_mode mode
;
9124 /* Get description of this reference. We don't actually care
9125 about the underlying object here. */
9126 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9127 &mode
, &unsignedp
, &volatilep
);
9129 type
= type_for_size (bitsize
, unsignedp
);
9130 if (! SLOW_BYTE_ACCESS
9131 && type
!= 0 && bitsize
>= 0
9132 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9133 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9134 != CODE_FOR_nothing
))
9136 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9143 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9144 if (integer_onep (TREE_OPERAND (exp
, 1))
9145 && integer_zerop (TREE_OPERAND (exp
, 2)))
9146 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9148 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9149 && integer_onep (TREE_OPERAND (exp
, 2)))
9150 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9154 register rtx label1
= gen_label_rtx ();
9155 drop_through_label
= gen_label_rtx ();
9156 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9157 /* Now the THEN-expression. */
9158 do_jump (TREE_OPERAND (exp
, 1),
9159 if_false_label
? if_false_label
: drop_through_label
,
9160 if_true_label
? if_true_label
: drop_through_label
);
9161 /* In case the do_jump just above never jumps. */
9162 do_pending_stack_adjust ();
9163 emit_label (label1
);
9164 /* Now the ELSE-expression. */
9165 do_jump (TREE_OPERAND (exp
, 2),
9166 if_false_label
? if_false_label
: drop_through_label
,
9167 if_true_label
? if_true_label
: drop_through_label
);
9173 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9175 if (integer_zerop (TREE_OPERAND (exp
, 1)))
9176 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9177 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9178 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9181 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9182 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9183 fold (build1 (REALPART_EXPR
,
9184 TREE_TYPE (inner_type
),
9185 TREE_OPERAND (exp
, 0))),
9186 fold (build1 (REALPART_EXPR
,
9187 TREE_TYPE (inner_type
),
9188 TREE_OPERAND (exp
, 1))))),
9189 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9190 fold (build1 (IMAGPART_EXPR
,
9191 TREE_TYPE (inner_type
),
9192 TREE_OPERAND (exp
, 0))),
9193 fold (build1 (IMAGPART_EXPR
,
9194 TREE_TYPE (inner_type
),
9195 TREE_OPERAND (exp
, 1))))))),
9196 if_false_label
, if_true_label
);
9197 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9198 && !can_compare_p (TYPE_MODE (inner_type
)))
9199 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9201 comparison
= compare (exp
, EQ
, EQ
);
9207 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9209 if (integer_zerop (TREE_OPERAND (exp
, 1)))
9210 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9211 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9212 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9215 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9216 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9217 fold (build1 (REALPART_EXPR
,
9218 TREE_TYPE (inner_type
),
9219 TREE_OPERAND (exp
, 0))),
9220 fold (build1 (REALPART_EXPR
,
9221 TREE_TYPE (inner_type
),
9222 TREE_OPERAND (exp
, 1))))),
9223 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9224 fold (build1 (IMAGPART_EXPR
,
9225 TREE_TYPE (inner_type
),
9226 TREE_OPERAND (exp
, 0))),
9227 fold (build1 (IMAGPART_EXPR
,
9228 TREE_TYPE (inner_type
),
9229 TREE_OPERAND (exp
, 1))))))),
9230 if_false_label
, if_true_label
);
9231 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9232 && !can_compare_p (TYPE_MODE (inner_type
)))
9233 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9235 comparison
= compare (exp
, NE
, NE
);
9240 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9242 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9243 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9245 comparison
= compare (exp
, LT
, LTU
);
9249 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9251 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9252 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9254 comparison
= compare (exp
, LE
, LEU
);
9258 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9260 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9261 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9263 comparison
= compare (exp
, GT
, GTU
);
9267 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9269 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9270 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9272 comparison
= compare (exp
, GE
, GEU
);
9277 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9279 /* This is not needed any more and causes poor code since it causes
9280 comparisons and tests from non-SI objects to have different code
9282 /* Copy to register to avoid generating bad insns by cse
9283 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9284 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9285 temp
= copy_to_reg (temp
);
9287 do_pending_stack_adjust ();
9288 if (GET_CODE (temp
) == CONST_INT
)
9289 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
9290 else if (GET_CODE (temp
) == LABEL_REF
)
9291 comparison
= const_true_rtx
;
9292 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9293 && !can_compare_p (GET_MODE (temp
)))
9294 /* Note swapping the labels gives us not-equal. */
9295 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9296 else if (GET_MODE (temp
) != VOIDmode
)
9297 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
9298 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9299 GET_MODE (temp
), NULL_RTX
, 0);
9304 /* Do any postincrements in the expression that was tested. */
9307 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9308 straight into a conditional jump instruction as the jump condition.
9309 Otherwise, all the work has been done already. */
9311 if (comparison
== const_true_rtx
)
9314 emit_jump (if_true_label
);
9316 else if (comparison
== const0_rtx
)
9319 emit_jump (if_false_label
);
9321 else if (comparison
)
9322 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
9324 if (drop_through_label
)
9326 /* If do_jump produces code that might be jumped around,
9327 do any stack adjusts from that code, before the place
9328 where control merges in. */
9329 do_pending_stack_adjust ();
9330 emit_label (drop_through_label
);
9334 /* Given a comparison expression EXP for values too wide to be compared
9335 with one insn, test the comparison and jump to the appropriate label.
9336 The code of EXP is ignored; we always test GT if SWAP is 0,
9337 and LT if SWAP is 1. */
9340 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9343 rtx if_false_label
, if_true_label
;
9345 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9346 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9347 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9348 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9349 rtx drop_through_label
= 0;
9350 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9353 if (! if_true_label
|| ! if_false_label
)
9354 drop_through_label
= gen_label_rtx ();
9355 if (! if_true_label
)
9356 if_true_label
= drop_through_label
;
9357 if (! if_false_label
)
9358 if_false_label
= drop_through_label
;
9360 /* Compare a word at a time, high order first. */
9361 for (i
= 0; i
< nwords
; i
++)
9364 rtx op0_word
, op1_word
;
9366 if (WORDS_BIG_ENDIAN
)
9368 op0_word
= operand_subword_force (op0
, i
, mode
);
9369 op1_word
= operand_subword_force (op1
, i
, mode
);
9373 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9374 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9377 /* All but high-order word must be compared as unsigned. */
9378 comp
= compare_from_rtx (op0_word
, op1_word
,
9379 (unsignedp
|| i
> 0) ? GTU
: GT
,
9380 unsignedp
, word_mode
, NULL_RTX
, 0);
9381 if (comp
== const_true_rtx
)
9382 emit_jump (if_true_label
);
9383 else if (comp
!= const0_rtx
)
9384 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
9386 /* Consider lower words only if these are equal. */
9387 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9389 if (comp
== const_true_rtx
)
9390 emit_jump (if_false_label
);
9391 else if (comp
!= const0_rtx
)
9392 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
9396 emit_jump (if_false_label
);
9397 if (drop_through_label
)
9398 emit_label (drop_through_label
);
9401 /* Compare OP0 with OP1, word at a time, in mode MODE.
9402 UNSIGNEDP says to do unsigned comparison.
9403 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9406 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9407 enum machine_mode mode
;
9410 rtx if_false_label
, if_true_label
;
9412 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9413 rtx drop_through_label
= 0;
9416 if (! if_true_label
|| ! if_false_label
)
9417 drop_through_label
= gen_label_rtx ();
9418 if (! if_true_label
)
9419 if_true_label
= drop_through_label
;
9420 if (! if_false_label
)
9421 if_false_label
= drop_through_label
;
9423 /* Compare a word at a time, high order first. */
9424 for (i
= 0; i
< nwords
; i
++)
9427 rtx op0_word
, op1_word
;
9429 if (WORDS_BIG_ENDIAN
)
9431 op0_word
= operand_subword_force (op0
, i
, mode
);
9432 op1_word
= operand_subword_force (op1
, i
, mode
);
9436 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9437 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9440 /* All but high-order word must be compared as unsigned. */
9441 comp
= compare_from_rtx (op0_word
, op1_word
,
9442 (unsignedp
|| i
> 0) ? GTU
: GT
,
9443 unsignedp
, word_mode
, NULL_RTX
, 0);
9444 if (comp
== const_true_rtx
)
9445 emit_jump (if_true_label
);
9446 else if (comp
!= const0_rtx
)
9447 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
9449 /* Consider lower words only if these are equal. */
9450 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9452 if (comp
== const_true_rtx
)
9453 emit_jump (if_false_label
);
9454 else if (comp
!= const0_rtx
)
9455 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
9459 emit_jump (if_false_label
);
9460 if (drop_through_label
)
9461 emit_label (drop_through_label
);
9464 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9465 with one insn, test the comparison and jump to the appropriate label. */
9468 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9470 rtx if_false_label
, if_true_label
;
9472 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9473 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9474 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9475 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9477 rtx drop_through_label
= 0;
9479 if (! if_false_label
)
9480 drop_through_label
= if_false_label
= gen_label_rtx ();
9482 for (i
= 0; i
< nwords
; i
++)
9484 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
9485 operand_subword_force (op1
, i
, mode
),
9486 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9487 word_mode
, NULL_RTX
, 0);
9488 if (comp
== const_true_rtx
)
9489 emit_jump (if_false_label
);
9490 else if (comp
!= const0_rtx
)
9491 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
9495 emit_jump (if_true_label
);
9496 if (drop_through_label
)
9497 emit_label (drop_through_label
);
9500 /* Jump according to whether OP0 is 0.
9501 We assume that OP0 has an integer mode that is too wide
9502 for the available compare insns. */
9505 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9507 rtx if_false_label
, if_true_label
;
9509 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9511 rtx drop_through_label
= 0;
9513 if (! if_false_label
)
9514 drop_through_label
= if_false_label
= gen_label_rtx ();
9516 for (i
= 0; i
< nwords
; i
++)
9518 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
9520 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
9521 if (comp
== const_true_rtx
)
9522 emit_jump (if_false_label
);
9523 else if (comp
!= const0_rtx
)
9524 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
9528 emit_jump (if_true_label
);
9529 if (drop_through_label
)
9530 emit_label (drop_through_label
);
9533 /* Given a comparison expression in rtl form, output conditional branches to
9534 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9537 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
9538 rtx comparison
, if_false_label
, if_true_label
;
9542 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
9543 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
9548 emit_jump (if_false_label
);
9550 else if (if_false_label
)
9553 rtx prev
= get_last_insn ();
9556 /* Output the branch with the opposite condition. Then try to invert
9557 what is generated. If more than one insn is a branch, or if the
9558 branch is not the last insn written, abort. If we can't invert
9559 the branch, emit make a true label, redirect this jump to that,
9560 emit a jump to the false label and define the true label. */
9562 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
9563 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
9567 /* Here we get the first insn that was just emitted. It used to be the
9568 case that, on some machines, emitting the branch would discard
9569 the previous compare insn and emit a replacement. This isn't
9570 done anymore, but abort if we see that PREV is deleted. */
9573 insn
= get_insns ();
9574 else if (INSN_DELETED_P (prev
))
9577 insn
= NEXT_INSN (prev
);
9579 for (; insn
; insn
= NEXT_INSN (insn
))
9580 if (GET_CODE (insn
) == JUMP_INSN
)
9587 if (branch
!= get_last_insn ())
9590 JUMP_LABEL (branch
) = if_false_label
;
9591 if (! invert_jump (branch
, if_false_label
))
9593 if_true_label
= gen_label_rtx ();
9594 redirect_jump (branch
, if_true_label
);
9595 emit_jump (if_false_label
);
9596 emit_label (if_true_label
);
9601 /* Generate code for a comparison expression EXP
9602 (including code to compute the values to be compared)
9603 and set (CC0) according to the result.
9604 SIGNED_CODE should be the rtx operation for this comparison for
9605 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9607 We force a stack adjustment unless there are currently
9608 things pushed on the stack that aren't yet used. */
9611 compare (exp
, signed_code
, unsigned_code
)
9613 enum rtx_code signed_code
, unsigned_code
;
9616 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9618 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9619 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9620 register enum machine_mode mode
= TYPE_MODE (type
);
9621 int unsignedp
= TREE_UNSIGNED (type
);
9622 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
9624 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
9626 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
9627 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
9630 /* Like compare but expects the values to compare as two rtx's.
9631 The decision as to signed or unsigned comparison must be made by the caller.
9633 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9636 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9637 size of MODE should be used. */
9640 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
9641 register rtx op0
, op1
;
9644 enum machine_mode mode
;
9650 /* If one operand is constant, make it the second one. Only do this
9651 if the other operand is not constant as well. */
9653 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
9654 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
9659 code
= swap_condition (code
);
9664 op0
= force_not_mem (op0
);
9665 op1
= force_not_mem (op1
);
9668 do_pending_stack_adjust ();
9670 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
9671 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
9675 /* There's no need to do this now that combine.c can eliminate lots of
9676 sign extensions. This can be less efficient in certain cases on other
9679 /* If this is a signed equality comparison, we can do it as an
9680 unsigned comparison since zero-extension is cheaper than sign
9681 extension and comparisons with zero are done as unsigned. This is
9682 the case even on machines that can do fast sign extension, since
9683 zero-extension is easier to combine with other operations than
9684 sign-extension is. If we are comparing against a constant, we must
9685 convert it to what it would look like unsigned. */
9686 if ((code
== EQ
|| code
== NE
) && ! unsignedp
9687 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
9689 if (GET_CODE (op1
) == CONST_INT
9690 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
9691 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
9696 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
9698 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
9701 /* Generate code to calculate EXP using a store-flag instruction
9702 and return an rtx for the result. EXP is either a comparison
9703 or a TRUTH_NOT_EXPR whose operand is a comparison.
9705 If TARGET is nonzero, store the result there if convenient.
9707 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9710 Return zero if there is no suitable set-flag instruction
9711 available on this machine.
9713 Once expand_expr has been called on the arguments of the comparison,
9714 we are committed to doing the store flag, since it is not safe to
9715 re-evaluate the expression. We emit the store-flag insn by calling
9716 emit_store_flag, but only expand the arguments if we have a reason
9717 to believe that emit_store_flag will be successful. If we think that
9718 it will, but it isn't, we have to simulate the store-flag with a
9719 set/jump/set sequence. */
9722 do_store_flag (exp
, target
, mode
, only_cheap
)
9725 enum machine_mode mode
;
9729 tree arg0
, arg1
, type
;
9731 enum machine_mode operand_mode
;
9735 enum insn_code icode
;
9736 rtx subtarget
= target
;
9737 rtx result
, label
, pattern
, jump_pat
;
9739 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9740 result at the end. We can't simply invert the test since it would
9741 have already been inverted if it were valid. This case occurs for
9742 some floating-point comparisons. */
9744 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9745 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9747 arg0
= TREE_OPERAND (exp
, 0);
9748 arg1
= TREE_OPERAND (exp
, 1);
9749 type
= TREE_TYPE (arg0
);
9750 operand_mode
= TYPE_MODE (type
);
9751 unsignedp
= TREE_UNSIGNED (type
);
9753 /* We won't bother with BLKmode store-flag operations because it would mean
9754 passing a lot of information to emit_store_flag. */
9755 if (operand_mode
== BLKmode
)
9761 /* Get the rtx comparison code to use. We know that EXP is a comparison
9762 operation of some type. Some comparisons against 1 and -1 can be
9763 converted to comparisons with zero. Do so here so that the tests
9764 below will be aware that we have a comparison with zero. These
9765 tests will not catch constants in the first operand, but constants
9766 are rarely passed as the first operand. */
9768 switch (TREE_CODE (exp
))
9777 if (integer_onep (arg1
))
9778 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9780 code
= unsignedp
? LTU
: LT
;
9783 if (! unsignedp
&& integer_all_onesp (arg1
))
9784 arg1
= integer_zero_node
, code
= LT
;
9786 code
= unsignedp
? LEU
: LE
;
9789 if (! unsignedp
&& integer_all_onesp (arg1
))
9790 arg1
= integer_zero_node
, code
= GE
;
9792 code
= unsignedp
? GTU
: GT
;
9795 if (integer_onep (arg1
))
9796 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9798 code
= unsignedp
? GEU
: GE
;
9804 /* Put a constant second. */
9805 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
9807 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9808 code
= swap_condition (code
);
9811 /* If this is an equality or inequality test of a single bit, we can
9812 do this by shifting the bit being tested to the low-order bit and
9813 masking the result with the constant 1. If the condition was EQ,
9814 we xor it with 1. This does not require an scc insn and is faster
9815 than an scc insn even if we have it. */
9817 if ((code
== NE
|| code
== EQ
)
9818 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9819 && integer_pow2p (TREE_OPERAND (arg0
, 1))
9820 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
9822 tree inner
= TREE_OPERAND (arg0
, 0);
9823 int bitnum
= exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
9824 NULL_RTX
, VOIDmode
, 0)));
9827 /* If INNER is a right shift of a constant and it plus BITNUM does
9828 not overflow, adjust BITNUM and INNER. */
9830 if (TREE_CODE (inner
) == RSHIFT_EXPR
9831 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
9832 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
9833 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
9834 < TYPE_PRECISION (type
)))
9836 bitnum
+=TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
9837 inner
= TREE_OPERAND (inner
, 0);
9840 /* If we are going to be able to omit the AND below, we must do our
9841 operations as unsigned. If we must use the AND, we have a choice.
9842 Normally unsigned is faster, but for some machines signed is. */
9843 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
9844 #ifdef LOAD_EXTEND_OP
9845 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
9851 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
9852 || GET_MODE (subtarget
) != operand_mode
9853 || ! safe_from_p (subtarget
, inner
))
9856 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
9859 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
9860 size_int (bitnum
), subtarget
, ops_unsignedp
);
9862 if (GET_MODE (op0
) != mode
)
9863 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
9865 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
9866 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
9867 ops_unsignedp
, OPTAB_LIB_WIDEN
);
9869 /* Put the AND last so it can combine with more things. */
9870 if (bitnum
!= TYPE_PRECISION (type
) - 1)
9871 op0
= expand_and (op0
, const1_rtx
, subtarget
);
9876 /* Now see if we are likely to be able to do this. Return if not. */
9877 if (! can_compare_p (operand_mode
))
9879 icode
= setcc_gen_code
[(int) code
];
9880 if (icode
== CODE_FOR_nothing
9881 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
9883 /* We can only do this if it is one of the special cases that
9884 can be handled without an scc insn. */
9885 if ((code
== LT
&& integer_zerop (arg1
))
9886 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
9888 else if (BRANCH_COST
>= 0
9889 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
9890 && TREE_CODE (type
) != REAL_TYPE
9891 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
9892 != CODE_FOR_nothing
)
9893 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
9894 != CODE_FOR_nothing
)))
9900 preexpand_calls (exp
);
9901 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
9902 || GET_MODE (subtarget
) != operand_mode
9903 || ! safe_from_p (subtarget
, arg1
))
9906 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
9907 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
9910 target
= gen_reg_rtx (mode
);
9912 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9913 because, if the emit_store_flag does anything it will succeed and
9914 OP0 and OP1 will not be used subsequently. */
9916 result
= emit_store_flag (target
, code
,
9917 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
9918 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
9919 operand_mode
, unsignedp
, 1);
9924 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9925 result
, 0, OPTAB_LIB_WIDEN
);
9929 /* If this failed, we have to do this with set/compare/jump/set code. */
9930 if (target
== 0 || GET_CODE (target
) != REG
9931 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9932 target
= gen_reg_rtx (GET_MODE (target
));
9934 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9935 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
9936 operand_mode
, NULL_RTX
, 0);
9937 if (GET_CODE (result
) == CONST_INT
)
9938 return (((result
== const0_rtx
&& ! invert
)
9939 || (result
!= const0_rtx
&& invert
))
9940 ? const0_rtx
: const1_rtx
);
9942 label
= gen_label_rtx ();
9943 if (bcc_gen_fctn
[(int) code
] == 0)
9946 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
9947 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9953 /* Generate a tablejump instruction (used for switch statements). */
9955 #ifdef HAVE_tablejump
9957 /* INDEX is the value being switched on, with the lowest value
9958 in the table already subtracted.
9959 MODE is its expected mode (needed if INDEX is constant).
9960 RANGE is the length of the jump table.
9961 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9963 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9964 index value is out of range. */
9967 do_tablejump (index
, mode
, range
, table_label
, default_label
)
9968 rtx index
, range
, table_label
, default_label
;
9969 enum machine_mode mode
;
9971 register rtx temp
, vector
;
9973 /* Do an unsigned comparison (in the proper mode) between the index
9974 expression and the value which represents the length of the range.
9975 Since we just finished subtracting the lower bound of the range
9976 from the index expression, this comparison allows us to simultaneously
9977 check that the original index expression value is both greater than
9978 or equal to the minimum value of the range and less than or equal to
9979 the maximum value of the range. */
9981 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
9982 emit_jump_insn (gen_bgtu (default_label
));
9984 /* If index is in range, it must fit in Pmode.
9985 Convert to Pmode so we can index with it. */
9987 index
= convert_to_mode (Pmode
, index
, 1);
9989 /* Don't let a MEM slip thru, because then INDEX that comes
9990 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9991 and break_out_memory_refs will go to work on it and mess it up. */
9992 #ifdef PIC_CASE_VECTOR_ADDRESS
9993 if (flag_pic
&& GET_CODE (index
) != REG
)
9994 index
= copy_to_mode_reg (Pmode
, index
);
9997 /* If flag_force_addr were to affect this address
9998 it could interfere with the tricky assumptions made
9999 about addresses that contain label-refs,
10000 which may be valid only very near the tablejump itself. */
10001 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10002 GET_MODE_SIZE, because this indicates how large insns are. The other
10003 uses should all be Pmode, because they are addresses. This code
10004 could fail if addresses and insns are not the same size. */
10005 index
= gen_rtx (PLUS
, Pmode
,
10006 gen_rtx (MULT
, Pmode
, index
,
10007 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10008 gen_rtx (LABEL_REF
, Pmode
, table_label
));
10009 #ifdef PIC_CASE_VECTOR_ADDRESS
10011 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10014 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10015 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10016 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
10017 RTX_UNCHANGING_P (vector
) = 1;
10018 convert_move (temp
, vector
, 0);
10020 emit_jump_insn (gen_tablejump (temp
, table_label
));
10022 #ifndef CASE_VECTOR_PC_RELATIVE
10023 /* If we are generating PIC code or if the table is PC-relative, the
10024 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10030 #endif /* HAVE_tablejump */
10033 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10034 to that value is on the top of the stack. The resulting type is TYPE, and
10035 the source declaration is DECL. */
10038 bc_load_memory (type
, decl
)
10041 enum bytecode_opcode opcode
;
10044 /* Bit fields are special. We only know about signed and
10045 unsigned ints, and enums. The latter are treated as
10046 signed integers. */
10048 if (DECL_BIT_FIELD (decl
))
10049 if (TREE_CODE (type
) == ENUMERAL_TYPE
10050 || TREE_CODE (type
) == INTEGER_TYPE
)
10051 opcode
= TREE_UNSIGNED (type
) ? zxloadBI
: sxloadBI
;
10055 /* See corresponding comment in bc_store_memory(). */
10056 if (TYPE_MODE (type
) == BLKmode
10057 || TYPE_MODE (type
) == VOIDmode
)
10060 opcode
= mode_to_load_map
[(int) TYPE_MODE (type
)];
10062 if (opcode
== neverneverland
)
10065 bc_emit_bytecode (opcode
);
10067 #ifdef DEBUG_PRINT_CODE
10068 fputc ('\n', stderr
);
10073 /* Store the contents of the second stack slot to the address in the
10074 top stack slot. DECL is the declaration of the destination and is used
10075 to determine whether we're dealing with a bitfield. */
10078 bc_store_memory (type
, decl
)
10081 enum bytecode_opcode opcode
;
10084 if (DECL_BIT_FIELD (decl
))
10086 if (TREE_CODE (type
) == ENUMERAL_TYPE
10087 || TREE_CODE (type
) == INTEGER_TYPE
)
10093 if (TYPE_MODE (type
) == BLKmode
)
10095 /* Copy structure. This expands to a block copy instruction, storeBLK.
10096 In addition to the arguments expected by the other store instructions,
10097 it also expects a type size (SImode) on top of the stack, which is the
10098 structure size in size units (usually bytes). The two first arguments
10099 are already on the stack; so we just put the size on level 1. For some
10100 other languages, the size may be variable, this is why we don't encode
10101 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10103 bc_expand_expr (TYPE_SIZE (type
));
10107 opcode
= mode_to_store_map
[(int) TYPE_MODE (type
)];
10109 if (opcode
== neverneverland
)
10112 bc_emit_bytecode (opcode
);
10114 #ifdef DEBUG_PRINT_CODE
10115 fputc ('\n', stderr
);
10120 /* Allocate local stack space sufficient to hold a value of the given
10121 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10122 integral power of 2. A special case is locals of type VOID, which
10123 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10124 remapped into the corresponding attribute of SI. */
10127 bc_allocate_local (size
, alignment
)
10128 int size
, alignment
;
10131 int byte_alignment
;
10136 /* Normalize size and alignment */
10138 size
= UNITS_PER_WORD
;
10140 if (alignment
< BITS_PER_UNIT
)
10141 byte_alignment
= 1 << (INT_ALIGN
- 1);
10144 byte_alignment
= alignment
/ BITS_PER_UNIT
;
10146 if (local_vars_size
& (byte_alignment
- 1))
10147 local_vars_size
+= byte_alignment
- (local_vars_size
& (byte_alignment
- 1));
10149 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
10150 local_vars_size
+= size
;
10156 /* Allocate variable-sized local array. Variable-sized arrays are
10157 actually pointers to the address in memory where they are stored. */
10160 bc_allocate_variable_array (size
)
10164 const int ptralign
= (1 << (PTR_ALIGN
- 1));
10166 /* Align pointer */
10167 if (local_vars_size
& ptralign
)
10168 local_vars_size
+= ptralign
- (local_vars_size
& ptralign
);
10170 /* Note down local space needed: pointer to block; also return
10173 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
10174 local_vars_size
+= POINTER_SIZE
/ BITS_PER_UNIT
;
10179 /* Push the machine address for the given external variable offset. */
10181 bc_load_externaddr (externaddr
)
10184 bc_emit_bytecode (constP
);
10185 bc_emit_code_labelref (BYTECODE_LABEL (externaddr
),
10186 BYTECODE_BC_LABEL (externaddr
)->offset
);
10188 #ifdef DEBUG_PRINT_CODE
10189 fputc ('\n', stderr
);
10198 char *new = (char *) xmalloc ((strlen (s
) + 1) * sizeof *s
);
10204 /* Like above, but expects an IDENTIFIER. */
10206 bc_load_externaddr_id (id
, offset
)
10210 if (!IDENTIFIER_POINTER (id
))
10213 bc_emit_bytecode (constP
);
10214 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id
)), offset
);
10216 #ifdef DEBUG_PRINT_CODE
10217 fputc ('\n', stderr
);
10222 /* Push the machine address for the given local variable offset. */
10224 bc_load_localaddr (localaddr
)
10227 bc_emit_instruction (localP
, (HOST_WIDE_INT
) BYTECODE_BC_LABEL (localaddr
)->offset
);
10231 /* Push the machine address for the given parameter offset.
10232 NOTE: offset is in bits. */
10234 bc_load_parmaddr (parmaddr
)
10237 bc_emit_instruction (argP
, ((HOST_WIDE_INT
) BYTECODE_BC_LABEL (parmaddr
)->offset
10242 /* Convert a[i] into *(a + i). */
10244 bc_canonicalize_array_ref (exp
)
10247 tree type
= TREE_TYPE (exp
);
10248 tree array_adr
= build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
),
10249 TREE_OPERAND (exp
, 0));
10250 tree index
= TREE_OPERAND (exp
, 1);
10253 /* Convert the integer argument to a type the same size as a pointer
10254 so the multiply won't overflow spuriously. */
10256 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
10257 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
10259 /* The array address isn't volatile even if the array is.
10260 (Of course this isn't terribly relevant since the bytecode
10261 translator treats nearly everything as volatile anyway.) */
10262 TREE_THIS_VOLATILE (array_adr
) = 0;
10264 return build1 (INDIRECT_REF
, type
,
10265 fold (build (PLUS_EXPR
,
10266 TYPE_POINTER_TO (type
),
10268 fold (build (MULT_EXPR
,
10269 TYPE_POINTER_TO (type
),
10271 size_in_bytes (type
))))));
10275 /* Load the address of the component referenced by the given
10276 COMPONENT_REF expression.
10278 Returns innermost lvalue. */
10281 bc_expand_component_address (exp
)
10285 enum machine_mode mode
;
10287 HOST_WIDE_INT SIval
;
10290 tem
= TREE_OPERAND (exp
, 1);
10291 mode
= DECL_MODE (tem
);
10294 /* Compute cumulative bit offset for nested component refs
10295 and array refs, and find the ultimate containing object. */
10297 for (tem
= exp
;; tem
= TREE_OPERAND (tem
, 0))
10299 if (TREE_CODE (tem
) == COMPONENT_REF
)
10300 bitpos
+= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem
, 1)));
10302 if (TREE_CODE (tem
) == ARRAY_REF
10303 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
10304 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
))) == INTEGER_CST
)
10306 bitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1))
10307 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)))
10308 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10313 bc_expand_expr (tem
);
10316 /* For bitfields also push their offset and size */
10317 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
10318 bc_push_offset_and_size (bitpos
, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp
, 1)));
10320 if (SIval
= bitpos
/ BITS_PER_UNIT
)
10321 bc_emit_instruction (addconstPSI
, SIval
);
10323 return (TREE_OPERAND (exp
, 1));
10327 /* Emit code to push two SI constants */
10329 bc_push_offset_and_size (offset
, size
)
10330 HOST_WIDE_INT offset
, size
;
10332 bc_emit_instruction (constSI
, offset
);
10333 bc_emit_instruction (constSI
, size
);
10337 /* Emit byte code to push the address of the given lvalue expression to
10338 the stack. If it's a bit field, we also push offset and size info.
10340 Returns innermost component, which allows us to determine not only
10341 its type, but also whether it's a bitfield. */
10344 bc_expand_address (exp
)
10348 if (!exp
|| TREE_CODE (exp
) == ERROR_MARK
)
10352 switch (TREE_CODE (exp
))
10356 return (bc_expand_address (bc_canonicalize_array_ref (exp
)));
10358 case COMPONENT_REF
:
10360 return (bc_expand_component_address (exp
));
10364 bc_expand_expr (TREE_OPERAND (exp
, 0));
10366 /* For variable-sized types: retrieve pointer. Sometimes the
10367 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10368 also make sure we have an operand, just in case... */
10370 if (TREE_OPERAND (exp
, 0)
10371 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
10372 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))) != INTEGER_CST
)
10373 bc_emit_instruction (loadP
);
10375 /* If packed, also return offset and size */
10376 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 0)))
10378 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 0))),
10379 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp
, 0))));
10381 return (TREE_OPERAND (exp
, 0));
10383 case FUNCTION_DECL
:
10385 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
10386 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
10391 bc_load_parmaddr (DECL_RTL (exp
));
10393 /* For variable-sized types: retrieve pointer */
10394 if (TYPE_SIZE (TREE_TYPE (exp
))
10395 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
10396 bc_emit_instruction (loadP
);
10398 /* If packed, also return offset and size */
10399 if (DECL_BIT_FIELD (exp
))
10400 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
10401 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
10407 bc_emit_instruction (returnP
);
10413 if (BYTECODE_LABEL (DECL_RTL (exp
)))
10414 bc_load_externaddr (DECL_RTL (exp
));
10417 if (DECL_EXTERNAL (exp
))
10418 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
10419 (BYTECODE_BC_LABEL (DECL_RTL (exp
)))->offset
);
10421 bc_load_localaddr (DECL_RTL (exp
));
10423 /* For variable-sized types: retrieve pointer */
10424 if (TYPE_SIZE (TREE_TYPE (exp
))
10425 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
10426 bc_emit_instruction (loadP
);
10428 /* If packed, also return offset and size */
10429 if (DECL_BIT_FIELD (exp
))
10430 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
10431 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
10439 bc_emit_bytecode (constP
);
10440 r
= output_constant_def (exp
);
10441 bc_emit_code_labelref (BYTECODE_LABEL (r
), BYTECODE_BC_LABEL (r
)->offset
);
10443 #ifdef DEBUG_PRINT_CODE
10444 fputc ('\n', stderr
);
10455 /* Most lvalues don't have components. */
10460 /* Emit a type code to be used by the runtime support in handling
10461 parameter passing. The type code consists of the machine mode
10462 plus the minimal alignment shifted left 8 bits. */
10465 bc_runtime_type_code (type
)
10470 switch (TREE_CODE (type
))
10476 case ENUMERAL_TYPE
:
10480 val
= (int) TYPE_MODE (type
) | TYPE_ALIGN (type
) << 8;
10492 return build_int_2 (val
, 0);
10496 /* Generate constructor label */
10498 bc_gen_constr_label ()
10500 static int label_counter
;
10501 static char label
[20];
10503 sprintf (label
, "*LR%d", label_counter
++);
10505 return (obstack_copy0 (&permanent_obstack
, label
, strlen (label
)));
10509 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10510 expand the constructor data as static data, and push a pointer to it.
10511 The pointer is put in the pointer table and is retrieved by a constP
10512 bytecode instruction. We then loop and store each constructor member in
10513 the corresponding component. Finally, we return the original pointer on
10517 bc_expand_constructor (constr
)
10521 HOST_WIDE_INT ptroffs
;
10525 /* Literal constructors are handled as constants, whereas
10526 non-literals are evaluated and stored element by element
10527 into the data segment. */
10529 /* Allocate space in proper segment and push pointer to space on stack.
10532 l
= bc_gen_constr_label ();
10534 if (TREE_CONSTANT (constr
))
10538 bc_emit_const_labeldef (l
);
10539 bc_output_constructor (constr
, int_size_in_bytes (TREE_TYPE (constr
)));
10545 bc_emit_data_labeldef (l
);
10546 bc_output_data_constructor (constr
);
10550 /* Add reference to pointer table and recall pointer to stack;
10551 this code is common for both types of constructors: literals
10552 and non-literals. */
10554 ptroffs
= bc_define_pointer (l
);
10555 bc_emit_instruction (constP
, ptroffs
);
10557 /* This is all that has to be done if it's a literal. */
10558 if (TREE_CONSTANT (constr
))
10562 /* At this point, we have the pointer to the structure on top of the stack.
10563 Generate sequences of store_memory calls for the constructor. */
10565 /* constructor type is structure */
10566 if (TREE_CODE (TREE_TYPE (constr
)) == RECORD_TYPE
)
10570 /* If the constructor has fewer fields than the structure,
10571 clear the whole structure first. */
10573 if (list_length (CONSTRUCTOR_ELTS (constr
))
10574 != list_length (TYPE_FIELDS (TREE_TYPE (constr
))))
10576 bc_emit_instruction (duplicate
);
10577 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
10578 bc_emit_instruction (clearBLK
);
10581 /* Store each element of the constructor into the corresponding
10582 field of TARGET. */
10584 for (elt
= CONSTRUCTOR_ELTS (constr
); elt
; elt
= TREE_CHAIN (elt
))
10586 register tree field
= TREE_PURPOSE (elt
);
10587 register enum machine_mode mode
;
10592 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
)) /* * DECL_SIZE_UNIT (field) */;
10593 mode
= DECL_MODE (field
);
10594 unsignedp
= TREE_UNSIGNED (field
);
10596 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
10598 bc_store_field (elt
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
10599 /* The alignment of TARGET is
10600 at least what its type requires. */
10602 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
10603 int_size_in_bytes (TREE_TYPE (constr
)));
10608 /* Constructor type is array */
10609 if (TREE_CODE (TREE_TYPE (constr
)) == ARRAY_TYPE
)
10613 tree domain
= TYPE_DOMAIN (TREE_TYPE (constr
));
10614 int minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
10615 int maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
10616 tree elttype
= TREE_TYPE (TREE_TYPE (constr
));
10618 /* If the constructor has fewer fields than the structure,
10619 clear the whole structure first. */
10621 if (list_length (CONSTRUCTOR_ELTS (constr
)) < maxelt
- minelt
+ 1)
10623 bc_emit_instruction (duplicate
);
10624 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
10625 bc_emit_instruction (clearBLK
);
10629 /* Store each element of the constructor into the corresponding
10630 element of TARGET, determined by counting the elements. */
10632 for (elt
= CONSTRUCTOR_ELTS (constr
), i
= 0;
10634 elt
= TREE_CHAIN (elt
), i
++)
10636 register enum machine_mode mode
;
10641 mode
= TYPE_MODE (elttype
);
10642 bitsize
= GET_MODE_BITSIZE (mode
);
10643 unsignedp
= TREE_UNSIGNED (elttype
);
10645 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
))
10646 /* * TYPE_SIZE_UNIT (elttype) */ );
10648 bc_store_field (elt
, bitsize
, bitpos
, mode
,
10649 TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
10650 /* The alignment of TARGET is
10651 at least what its type requires. */
10653 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
10654 int_size_in_bytes (TREE_TYPE (constr
)));
10661 /* Store the value of EXP (an expression tree) into member FIELD of
10662 structure at address on stack, which has type TYPE, mode MODE and
10663 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10666 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10667 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10670 bc_store_field (field
, bitsize
, bitpos
, mode
, exp
, type
,
10671 value_mode
, unsignedp
, align
, total_size
)
10672 int bitsize
, bitpos
;
10673 enum machine_mode mode
;
10674 tree field
, exp
, type
;
10675 enum machine_mode value_mode
;
10681 /* Expand expression and copy pointer */
10682 bc_expand_expr (exp
);
10683 bc_emit_instruction (over
);
10686 /* If the component is a bit field, we cannot use addressing to access
10687 it. Use bit-field techniques to store in it. */
10689 if (DECL_BIT_FIELD (field
))
10691 bc_store_bit_field (bitpos
, bitsize
, unsignedp
);
10695 /* Not bit field */
10697 HOST_WIDE_INT offset
= bitpos
/ BITS_PER_UNIT
;
10699 /* Advance pointer to the desired member */
10701 bc_emit_instruction (addconstPSI
, offset
);
10704 bc_store_memory (type
, field
);
10709 /* Store SI/SU in bitfield */
10711 bc_store_bit_field (offset
, size
, unsignedp
)
10712 int offset
, size
, unsignedp
;
10714 /* Push bitfield offset and size */
10715 bc_push_offset_and_size (offset
, size
);
10718 bc_emit_instruction (sstoreBI
);
10722 /* Load SI/SU from bitfield */
10724 bc_load_bit_field (offset
, size
, unsignedp
)
10725 int offset
, size
, unsignedp
;
10727 /* Push bitfield offset and size */
10728 bc_push_offset_and_size (offset
, size
);
10730 /* Load: sign-extend if signed, else zero-extend */
10731 bc_emit_instruction (unsignedp
? zxloadBI
: sxloadBI
);
10735 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10736 (adjust stack pointer upwards), negative means add that number of
10737 levels (adjust the stack pointer downwards). Only positive values
10738 normally make sense. */
10741 bc_adjust_stack (nlevels
)
10750 bc_emit_instruction (drop
);
10753 bc_emit_instruction (drop
);
10758 bc_emit_instruction (adjstackSI
, (HOST_WIDE_INT
) nlevels
);
10759 stack_depth
-= nlevels
;
10762 #if defined (VALIDATE_STACK_FOR_BC)
10763 VALIDATE_STACK_FOR_BC ();