1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls
= 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust
;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop
;
97 /* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
101 int target_temp_slot_level
;
103 /* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
106 static rtx saveregs_value
;
108 /* Similarly for __builtin_apply_args. */
109 static rtx apply_args_value
;
111 /* This structure is used by move_by_pieces to describe the move to
114 struct move_by_pieces
124 int explicit_inc_from
;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 /* Used to generate bytecodes: keep track of size of local variables,
147 as well as depth of arithmetic stack. (Notice that variables are
148 stored on the machine's stack, not the arithmetic stack.) */
150 extern int local_vars_size
;
151 extern int stack_depth
;
152 extern int max_stack_depth
;
153 extern struct obstack permanent_obstack
;
154 extern rtx arg_pointer_save_area
;
156 static rtx enqueue_insn
PROTO((rtx
, rtx
));
157 static int queued_subexp_p
PROTO((rtx
));
158 static void init_queue
PROTO((void));
159 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
160 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
161 static void move_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
162 struct move_by_pieces
*));
163 static void clear_by_pieces
PROTO((rtx
, int, int));
164 static void clear_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
165 struct clear_by_pieces
*));
166 static int is_zeros_p
PROTO((tree
));
167 static int mostly_zeros_p
PROTO((tree
));
168 static void store_constructor
PROTO((tree
, rtx
, int));
169 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
170 enum machine_mode
, int, int, int));
171 static int get_inner_unaligned_p
PROTO((tree
));
172 static tree save_noncopied_parts
PROTO((tree
, tree
));
173 static tree init_noncopied_parts
PROTO((tree
, tree
));
174 static int safe_from_p
PROTO((rtx
, tree
));
175 static int fixed_type_p
PROTO((tree
));
176 static rtx var_rtx
PROTO((tree
));
177 static int get_pointer_alignment
PROTO((tree
, unsigned));
178 static tree string_constant
PROTO((tree
, tree
*));
179 static tree c_strlen
PROTO((tree
));
180 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
181 enum machine_mode
, int));
182 static int apply_args_size
PROTO((void));
183 static int apply_result_size
PROTO((void));
184 static rtx result_vector
PROTO((int, rtx
));
185 static rtx expand_builtin_apply_args
PROTO((void));
186 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
187 static void expand_builtin_return
PROTO((rtx
));
188 static rtx expand_increment
PROTO((tree
, int, int));
189 void bc_expand_increment
PROTO((struct increment_operator
*, tree
));
190 rtx bc_allocate_local
PROTO((int, int));
191 void bc_store_memory
PROTO((tree
, tree
));
192 tree bc_expand_component_address
PROTO((tree
));
193 tree bc_expand_address
PROTO((tree
));
194 void bc_expand_constructor
PROTO((tree
));
195 void bc_adjust_stack
PROTO((int));
196 tree bc_canonicalize_array_ref
PROTO((tree
));
197 void bc_load_memory
PROTO((tree
, tree
));
198 void bc_load_externaddr
PROTO((rtx
));
199 void bc_load_externaddr_id
PROTO((tree
, int));
200 void bc_load_localaddr
PROTO((rtx
));
201 void bc_load_parmaddr
PROTO((rtx
));
202 static void preexpand_calls
PROTO((tree
));
203 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
204 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
205 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
206 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
207 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
208 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
209 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
210 extern tree truthvalue_conversion
PROTO((tree
));
212 /* Record for each mode whether we can move a register directly to or
213 from an object of that mode in memory. If we can't, we won't try
214 to use that mode directly when accessing a field of that mode. */
216 static char direct_load
[NUM_MACHINE_MODES
];
217 static char direct_store
[NUM_MACHINE_MODES
];
219 /* MOVE_RATIO is the number of move instructions that is better than
223 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
226 /* A value of around 6 would minimize code size; infinity would minimize
228 #define MOVE_RATIO 15
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
238 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
240 #ifndef SLOW_UNALIGNED_ACCESS
241 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
244 /* Register mappings for target machines without register windows. */
245 #ifndef INCOMING_REGNO
246 #define INCOMING_REGNO(OUT) (OUT)
248 #ifndef OUTGOING_REGNO
249 #define OUTGOING_REGNO(IN) (IN)
252 /* Maps used to convert modes to const, load, and store bytecodes. */
253 enum bytecode_opcode mode_to_const_map
[MAX_MACHINE_MODE
];
254 enum bytecode_opcode mode_to_load_map
[MAX_MACHINE_MODE
];
255 enum bytecode_opcode mode_to_store_map
[MAX_MACHINE_MODE
];
257 /* Initialize maps used to convert modes to const, load, and store
261 bc_init_mode_to_opcode_maps ()
265 for (mode
= 0; mode
< (int) MAX_MACHINE_MODE
; mode
++)
266 mode_to_const_map
[mode
]
267 = mode_to_load_map
[mode
]
268 = mode_to_store_map
[mode
] = neverneverland
;
270 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
271 mode_to_const_map[(int) SYM] = CONST; \
272 mode_to_load_map[(int) SYM] = LOAD; \
273 mode_to_store_map[(int) SYM] = STORE;
275 #include "modemap.def"
279 /* This is run once per compilation to set up which modes can be used
280 directly in memory and to initialize the block move optab. */
286 enum machine_mode mode
;
287 /* Try indexing by frame ptr and try by stack ptr.
288 It is known that on the Convex the stack ptr isn't a valid index.
289 With luck, one or the other is valid on any machine. */
290 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
291 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
294 insn
= emit_insn (gen_rtx (SET
, 0, 0));
295 pat
= PATTERN (insn
);
297 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
298 mode
= (enum machine_mode
) ((int) mode
+ 1))
304 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
305 PUT_MODE (mem
, mode
);
306 PUT_MODE (mem1
, mode
);
308 /* See if there is some register that can be used in this mode and
309 directly loaded or stored from memory. */
311 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
312 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
313 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
316 if (! HARD_REGNO_MODE_OK (regno
, mode
))
319 reg
= gen_rtx (REG
, mode
, regno
);
322 SET_DEST (pat
) = reg
;
323 if (recog (pat
, insn
, &num_clobbers
) >= 0)
324 direct_load
[(int) mode
] = 1;
326 SET_SRC (pat
) = mem1
;
327 SET_DEST (pat
) = reg
;
328 if (recog (pat
, insn
, &num_clobbers
) >= 0)
329 direct_load
[(int) mode
] = 1;
332 SET_DEST (pat
) = mem
;
333 if (recog (pat
, insn
, &num_clobbers
) >= 0)
334 direct_store
[(int) mode
] = 1;
337 SET_DEST (pat
) = mem1
;
338 if (recog (pat
, insn
, &num_clobbers
) >= 0)
339 direct_store
[(int) mode
] = 1;
346 /* This is run at the start of compiling a function. */
353 pending_stack_adjust
= 0;
354 inhibit_defer_pop
= 0;
356 apply_args_value
= 0;
360 /* Save all variables describing the current status into the structure *P.
361 This is used before starting a nested function. */
367 /* Instead of saving the postincrement queue, empty it. */
370 p
->pending_stack_adjust
= pending_stack_adjust
;
371 p
->inhibit_defer_pop
= inhibit_defer_pop
;
372 p
->saveregs_value
= saveregs_value
;
373 p
->apply_args_value
= apply_args_value
;
374 p
->forced_labels
= forced_labels
;
376 pending_stack_adjust
= 0;
377 inhibit_defer_pop
= 0;
379 apply_args_value
= 0;
383 /* Restore all variables describing the current status from the structure *P.
384 This is used after a nested function. */
387 restore_expr_status (p
)
390 pending_stack_adjust
= p
->pending_stack_adjust
;
391 inhibit_defer_pop
= p
->inhibit_defer_pop
;
392 saveregs_value
= p
->saveregs_value
;
393 apply_args_value
= p
->apply_args_value
;
394 forced_labels
= p
->forced_labels
;
397 /* Manage the queue of increment instructions to be output
398 for POSTINCREMENT_EXPR expressions, etc. */
400 static rtx pending_chain
;
402 /* Queue up to increment (or change) VAR later. BODY says how:
403 BODY should be the same thing you would pass to emit_insn
404 to increment right away. It will go to emit_insn later on.
406 The value is a QUEUED expression to be used in place of VAR
407 where you want to guarantee the pre-incrementation value of VAR. */
410 enqueue_insn (var
, body
)
413 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
414 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
415 return pending_chain
;
418 /* Use protect_from_queue to convert a QUEUED expression
419 into something that you can put immediately into an instruction.
420 If the queued incrementation has not happened yet,
421 protect_from_queue returns the variable itself.
422 If the incrementation has happened, protect_from_queue returns a temp
423 that contains a copy of the old value of the variable.
425 Any time an rtx which might possibly be a QUEUED is to be put
426 into an instruction, it must be passed through protect_from_queue first.
427 QUEUED expressions are not meaningful in instructions.
429 Do not pass a value through protect_from_queue and then hold
430 on to it for a while before putting it in an instruction!
431 If the queue is flushed in between, incorrect code will result. */
434 protect_from_queue (x
, modify
)
438 register RTX_CODE code
= GET_CODE (x
);
440 #if 0 /* A QUEUED can hang around after the queue is forced out. */
441 /* Shortcut for most common case. */
442 if (pending_chain
== 0)
448 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
449 use of autoincrement. Make a copy of the contents of the memory
450 location rather than a copy of the address, but not if the value is
451 of mode BLKmode. Don't modify X in place since it might be
453 if (code
== MEM
&& GET_MODE (x
) != BLKmode
454 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
456 register rtx y
= XEXP (x
, 0);
457 register rtx
new = gen_rtx (MEM
, GET_MODE (x
), QUEUED_VAR (y
));
459 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
460 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
461 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
465 register rtx temp
= gen_reg_rtx (GET_MODE (new));
466 emit_insn_before (gen_move_insn (temp
, new),
472 /* Otherwise, recursively protect the subexpressions of all
473 the kinds of rtx's that can contain a QUEUED. */
476 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
477 if (tem
!= XEXP (x
, 0))
483 else if (code
== PLUS
|| code
== MULT
)
485 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
486 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
487 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
496 /* If the increment has not happened, use the variable itself. */
497 if (QUEUED_INSN (x
) == 0)
498 return QUEUED_VAR (x
);
499 /* If the increment has happened and a pre-increment copy exists,
501 if (QUEUED_COPY (x
) != 0)
502 return QUEUED_COPY (x
);
503 /* The increment has happened but we haven't set up a pre-increment copy.
504 Set one up now, and use it. */
505 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
506 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
508 return QUEUED_COPY (x
);
511 /* Return nonzero if X contains a QUEUED expression:
512 if it contains anything that will be altered by a queued increment.
513 We handle only combinations of MEM, PLUS, MINUS and MULT operators
514 since memory addresses generally contain only those. */
520 register enum rtx_code code
= GET_CODE (x
);
526 return queued_subexp_p (XEXP (x
, 0));
530 return queued_subexp_p (XEXP (x
, 0))
531 || queued_subexp_p (XEXP (x
, 1));
536 /* Perform all the pending incrementations. */
542 while (p
= pending_chain
)
544 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
545 pending_chain
= QUEUED_NEXT (p
);
556 /* Copy data from FROM to TO, where the machine modes are not the same.
557 Both modes may be integer, or both may be floating.
558 UNSIGNEDP should be nonzero if FROM is an unsigned type.
559 This causes zero-extension instead of sign-extension. */
562 convert_move (to
, from
, unsignedp
)
563 register rtx to
, from
;
566 enum machine_mode to_mode
= GET_MODE (to
);
567 enum machine_mode from_mode
= GET_MODE (from
);
568 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
569 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
573 /* rtx code for making an equivalent value. */
574 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
576 to
= protect_from_queue (to
, 1);
577 from
= protect_from_queue (from
, 0);
579 if (to_real
!= from_real
)
582 /* If FROM is a SUBREG that indicates that we have already done at least
583 the required extension, strip it. We don't handle such SUBREGs as
586 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
587 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
588 >= GET_MODE_SIZE (to_mode
))
589 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
590 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
592 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
595 if (to_mode
== from_mode
596 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
598 emit_move_insn (to
, from
);
606 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
608 /* Try converting directly if the insn is supported. */
609 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
612 emit_unop_insn (code
, to
, from
, UNKNOWN
);
617 #ifdef HAVE_trunchfqf2
618 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
620 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
624 #ifdef HAVE_truncsfqf2
625 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
627 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
631 #ifdef HAVE_truncdfqf2
632 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
634 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
638 #ifdef HAVE_truncxfqf2
639 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
641 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
645 #ifdef HAVE_trunctfqf2
646 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
648 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
653 #ifdef HAVE_trunctqfhf2
654 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
656 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
660 #ifdef HAVE_truncsfhf2
661 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
663 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
667 #ifdef HAVE_truncdfhf2
668 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
670 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
674 #ifdef HAVE_truncxfhf2
675 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
677 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
681 #ifdef HAVE_trunctfhf2
682 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
684 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
689 #ifdef HAVE_truncsftqf2
690 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
692 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
696 #ifdef HAVE_truncdftqf2
697 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
699 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
703 #ifdef HAVE_truncxftqf2
704 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
706 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
710 #ifdef HAVE_trunctftqf2
711 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
713 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
718 #ifdef HAVE_truncdfsf2
719 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
721 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
725 #ifdef HAVE_truncxfsf2
726 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
728 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
732 #ifdef HAVE_trunctfsf2
733 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
735 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
739 #ifdef HAVE_truncxfdf2
740 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
742 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
746 #ifdef HAVE_trunctfdf2
747 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
749 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
761 libcall
= extendsfdf2_libfunc
;
765 libcall
= extendsfxf2_libfunc
;
769 libcall
= extendsftf2_libfunc
;
778 libcall
= truncdfsf2_libfunc
;
782 libcall
= extenddfxf2_libfunc
;
786 libcall
= extenddftf2_libfunc
;
795 libcall
= truncxfsf2_libfunc
;
799 libcall
= truncxfdf2_libfunc
;
808 libcall
= trunctfsf2_libfunc
;
812 libcall
= trunctfdf2_libfunc
;
818 if (libcall
== (rtx
) 0)
819 /* This conversion is not implemented yet. */
822 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
824 emit_move_insn (to
, value
);
828 /* Now both modes are integers. */
830 /* Handle expanding beyond a word. */
831 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
832 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
839 enum machine_mode lowpart_mode
;
840 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
842 /* Try converting directly if the insn is supported. */
843 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
846 /* If FROM is a SUBREG, put it into a register. Do this
847 so that we always generate the same set of insns for
848 better cse'ing; if an intermediate assignment occurred,
849 we won't be doing the operation directly on the SUBREG. */
850 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
851 from
= force_reg (from_mode
, from
);
852 emit_unop_insn (code
, to
, from
, equiv_code
);
855 /* Next, try converting via full word. */
856 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
857 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
858 != CODE_FOR_nothing
))
860 if (GET_CODE (to
) == REG
)
861 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, to
));
862 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
863 emit_unop_insn (code
, to
,
864 gen_lowpart (word_mode
, to
), equiv_code
);
868 /* No special multiword conversion insn; do it by hand. */
871 /* Since we will turn this into a no conflict block, we must ensure
872 that the source does not overlap the target. */
874 if (reg_overlap_mentioned_p (to
, from
))
875 from
= force_reg (from_mode
, from
);
877 /* Get a copy of FROM widened to a word, if necessary. */
878 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
879 lowpart_mode
= word_mode
;
881 lowpart_mode
= from_mode
;
883 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
885 lowpart
= gen_lowpart (lowpart_mode
, to
);
886 emit_move_insn (lowpart
, lowfrom
);
888 /* Compute the value to put in each remaining word. */
890 fill_value
= const0_rtx
;
895 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
896 && STORE_FLAG_VALUE
== -1)
898 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
900 fill_value
= gen_reg_rtx (word_mode
);
901 emit_insn (gen_slt (fill_value
));
907 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
908 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
910 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
914 /* Fill the remaining words. */
915 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
917 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
918 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
923 if (fill_value
!= subword
)
924 emit_move_insn (subword
, fill_value
);
927 insns
= get_insns ();
930 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
931 gen_rtx (equiv_code
, to_mode
, copy_rtx (from
)));
935 /* Truncating multi-word to a word or less. */
936 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
937 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
939 if (!((GET_CODE (from
) == MEM
940 && ! MEM_VOLATILE_P (from
)
941 && direct_load
[(int) to_mode
]
942 && ! mode_dependent_address_p (XEXP (from
, 0)))
943 || GET_CODE (from
) == REG
944 || GET_CODE (from
) == SUBREG
))
945 from
= force_reg (from_mode
, from
);
946 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
950 /* Handle pointer conversion */ /* SPEE 900220 */
951 if (to_mode
== PSImode
)
953 if (from_mode
!= SImode
)
954 from
= convert_to_mode (SImode
, from
, unsignedp
);
956 #ifdef HAVE_truncsipsi2
957 if (HAVE_truncsipsi2
)
959 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
962 #endif /* HAVE_truncsipsi2 */
966 if (from_mode
== PSImode
)
968 if (to_mode
!= SImode
)
970 from
= convert_to_mode (SImode
, from
, unsignedp
);
975 #ifdef HAVE_extendpsisi2
976 if (HAVE_extendpsisi2
)
978 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
981 #endif /* HAVE_extendpsisi2 */
986 if (to_mode
== PDImode
)
988 if (from_mode
!= DImode
)
989 from
= convert_to_mode (DImode
, from
, unsignedp
);
991 #ifdef HAVE_truncdipdi2
992 if (HAVE_truncdipdi2
)
994 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
997 #endif /* HAVE_truncdipdi2 */
1001 if (from_mode
== PDImode
)
1003 if (to_mode
!= DImode
)
1005 from
= convert_to_mode (DImode
, from
, unsignedp
);
1010 #ifdef HAVE_extendpdidi2
1011 if (HAVE_extendpdidi2
)
1013 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1016 #endif /* HAVE_extendpdidi2 */
1021 /* Now follow all the conversions between integers
1022 no more than a word long. */
1024 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1025 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1026 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1027 GET_MODE_BITSIZE (from_mode
)))
1029 if (!((GET_CODE (from
) == MEM
1030 && ! MEM_VOLATILE_P (from
)
1031 && direct_load
[(int) to_mode
]
1032 && ! mode_dependent_address_p (XEXP (from
, 0)))
1033 || GET_CODE (from
) == REG
1034 || GET_CODE (from
) == SUBREG
))
1035 from
= force_reg (from_mode
, from
);
1036 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1037 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1038 from
= copy_to_reg (from
);
1039 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1043 /* Handle extension. */
1044 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1046 /* Convert directly if that works. */
1047 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1048 != CODE_FOR_nothing
)
1050 emit_unop_insn (code
, to
, from
, equiv_code
);
1055 enum machine_mode intermediate
;
1057 /* Search for a mode to convert via. */
1058 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1059 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1060 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1061 != CODE_FOR_nothing
)
1062 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1063 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1064 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1065 != CODE_FOR_nothing
))
1067 convert_move (to
, convert_to_mode (intermediate
, from
,
1068 unsignedp
), unsignedp
);
1072 /* No suitable intermediate mode. */
1077 /* Support special truncate insns for certain modes. */
1079 if (from_mode
== DImode
&& to_mode
== SImode
)
1081 #ifdef HAVE_truncdisi2
1082 if (HAVE_truncdisi2
)
1084 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1088 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1092 if (from_mode
== DImode
&& to_mode
== HImode
)
1094 #ifdef HAVE_truncdihi2
1095 if (HAVE_truncdihi2
)
1097 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1101 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1105 if (from_mode
== DImode
&& to_mode
== QImode
)
1107 #ifdef HAVE_truncdiqi2
1108 if (HAVE_truncdiqi2
)
1110 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1114 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1118 if (from_mode
== SImode
&& to_mode
== HImode
)
1120 #ifdef HAVE_truncsihi2
1121 if (HAVE_truncsihi2
)
1123 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1127 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1131 if (from_mode
== SImode
&& to_mode
== QImode
)
1133 #ifdef HAVE_truncsiqi2
1134 if (HAVE_truncsiqi2
)
1136 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1140 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1144 if (from_mode
== HImode
&& to_mode
== QImode
)
1146 #ifdef HAVE_trunchiqi2
1147 if (HAVE_trunchiqi2
)
1149 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1153 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1157 if (from_mode
== TImode
&& to_mode
== DImode
)
1159 #ifdef HAVE_trunctidi2
1160 if (HAVE_trunctidi2
)
1162 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1166 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1170 if (from_mode
== TImode
&& to_mode
== SImode
)
1172 #ifdef HAVE_trunctisi2
1173 if (HAVE_trunctisi2
)
1175 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1179 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1183 if (from_mode
== TImode
&& to_mode
== HImode
)
1185 #ifdef HAVE_trunctihi2
1186 if (HAVE_trunctihi2
)
1188 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1192 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1196 if (from_mode
== TImode
&& to_mode
== QImode
)
1198 #ifdef HAVE_trunctiqi2
1199 if (HAVE_trunctiqi2
)
1201 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1205 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1209 /* Handle truncation of volatile memrefs, and so on;
1210 the things that couldn't be truncated directly,
1211 and for which there was no special instruction. */
1212 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1214 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1215 emit_move_insn (to
, temp
);
1219 /* Mode combination is not recognized. */
1223 /* Return an rtx for a value that would result
1224 from converting X to mode MODE.
1225 Both X and MODE may be floating, or both integer.
1226 UNSIGNEDP is nonzero if X is an unsigned value.
1227 This can be done by referring to a part of X in place
1228 or by copying to a new temporary with conversion.
1230 This function *must not* call protect_from_queue
1231 except when putting X into an insn (in which case convert_move does it). */
1234 convert_to_mode (mode
, x
, unsignedp
)
1235 enum machine_mode mode
;
1239 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1242 /* Return an rtx for a value that would result
1243 from converting X from mode OLDMODE to mode MODE.
1244 Both modes may be floating, or both integer.
1245 UNSIGNEDP is nonzero if X is an unsigned value.
1247 This can be done by referring to a part of X in place
1248 or by copying to a new temporary with conversion.
1250 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1256 convert_modes (mode
, oldmode
, x
, unsignedp
)
1257 enum machine_mode mode
, oldmode
;
1263 /* If FROM is a SUBREG that indicates that we have already done at least
1264 the required extension, strip it. */
1266 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1267 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1268 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1269 x
= gen_lowpart (mode
, x
);
1271 if (GET_MODE (x
) != VOIDmode
)
1272 oldmode
= GET_MODE (x
);
1274 if (mode
== oldmode
)
1277 /* There is one case that we must handle specially: If we are converting
1278 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1279 we are to interpret the constant as unsigned, gen_lowpart will do
1280 the wrong if the constant appears negative. What we want to do is
1281 make the high-order word of the constant zero, not all ones. */
1283 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1284 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1285 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1287 HOST_WIDE_INT val
= INTVAL (x
);
1289 if (oldmode
!= VOIDmode
1290 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1292 int width
= GET_MODE_BITSIZE (oldmode
);
1294 /* We need to zero extend VAL. */
1295 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1298 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1301 /* We can do this with a gen_lowpart if both desired and current modes
1302 are integer, and this is either a constant integer, a register, or a
1303 non-volatile MEM. Except for the constant case where MODE is no
1304 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1306 if ((GET_CODE (x
) == CONST_INT
1307 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1308 || (GET_MODE_CLASS (mode
) == MODE_INT
1309 && GET_MODE_CLASS (oldmode
) == MODE_INT
1310 && (GET_CODE (x
) == CONST_DOUBLE
1311 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1312 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1313 && direct_load
[(int) mode
])
1314 || (GET_CODE (x
) == REG
1315 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1316 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1318 /* ?? If we don't know OLDMODE, we have to assume here that
1319 X does not need sign- or zero-extension. This may not be
1320 the case, but it's the best we can do. */
1321 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1322 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1324 HOST_WIDE_INT val
= INTVAL (x
);
1325 int width
= GET_MODE_BITSIZE (oldmode
);
1327 /* We must sign or zero-extend in this case. Start by
1328 zero-extending, then sign extend if we need to. */
1329 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1331 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1332 val
|= (HOST_WIDE_INT
) (-1) << width
;
1334 return GEN_INT (val
);
1337 return gen_lowpart (mode
, x
);
1340 temp
= gen_reg_rtx (mode
);
1341 convert_move (temp
, x
, unsignedp
);
1345 /* Generate several move instructions to copy LEN bytes
1346 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1347 The caller must pass FROM and TO
1348 through protect_from_queue before calling.
1349 ALIGN (in bytes) is maximum alignment we can assume. */
1352 move_by_pieces (to
, from
, len
, align
)
1356 struct move_by_pieces data
;
1357 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1358 int max_size
= MOVE_MAX
+ 1;
1361 data
.to_addr
= to_addr
;
1362 data
.from_addr
= from_addr
;
1366 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1367 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1369 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1370 || GET_CODE (from_addr
) == POST_INC
1371 || GET_CODE (from_addr
) == POST_DEC
);
1373 data
.explicit_inc_from
= 0;
1374 data
.explicit_inc_to
= 0;
1376 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1377 if (data
.reverse
) data
.offset
= len
;
1380 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1381 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1383 /* If copying requires more than two move insns,
1384 copy addresses to registers (to make displacements shorter)
1385 and use post-increment if available. */
1386 if (!(data
.autinc_from
&& data
.autinc_to
)
1387 && move_by_pieces_ninsns (len
, align
) > 2)
1389 #ifdef HAVE_PRE_DECREMENT
1390 if (data
.reverse
&& ! data
.autinc_from
)
1392 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1393 data
.autinc_from
= 1;
1394 data
.explicit_inc_from
= -1;
1397 #ifdef HAVE_POST_INCREMENT
1398 if (! data
.autinc_from
)
1400 data
.from_addr
= copy_addr_to_reg (from_addr
);
1401 data
.autinc_from
= 1;
1402 data
.explicit_inc_from
= 1;
1405 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1406 data
.from_addr
= copy_addr_to_reg (from_addr
);
1407 #ifdef HAVE_PRE_DECREMENT
1408 if (data
.reverse
&& ! data
.autinc_to
)
1410 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1412 data
.explicit_inc_to
= -1;
1415 #ifdef HAVE_POST_INCREMENT
1416 if (! data
.reverse
&& ! data
.autinc_to
)
1418 data
.to_addr
= copy_addr_to_reg (to_addr
);
1420 data
.explicit_inc_to
= 1;
1423 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1424 data
.to_addr
= copy_addr_to_reg (to_addr
);
1427 if (! SLOW_UNALIGNED_ACCESS
1428 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1431 /* First move what we can in the largest integer mode, then go to
1432 successively smaller modes. */
1434 while (max_size
> 1)
1436 enum machine_mode mode
= VOIDmode
, tmode
;
1437 enum insn_code icode
;
1439 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1440 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1441 if (GET_MODE_SIZE (tmode
) < max_size
)
1444 if (mode
== VOIDmode
)
1447 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1448 if (icode
!= CODE_FOR_nothing
1449 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1450 GET_MODE_SIZE (mode
)))
1451 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1453 max_size
= GET_MODE_SIZE (mode
);
1456 /* The code above should have handled everything. */
1461 /* Return number of insns required to move L bytes by pieces.
1462 ALIGN (in bytes) is maximum alignment we can assume. */
1465 move_by_pieces_ninsns (l
, align
)
1469 register int n_insns
= 0;
1470 int max_size
= MOVE_MAX
+ 1;
1472 if (! SLOW_UNALIGNED_ACCESS
1473 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1476 while (max_size
> 1)
1478 enum machine_mode mode
= VOIDmode
, tmode
;
1479 enum insn_code icode
;
1481 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1482 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1483 if (GET_MODE_SIZE (tmode
) < max_size
)
1486 if (mode
== VOIDmode
)
1489 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1490 if (icode
!= CODE_FOR_nothing
1491 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1492 GET_MODE_SIZE (mode
)))
1493 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1495 max_size
= GET_MODE_SIZE (mode
);
1501 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1502 with move instructions for mode MODE. GENFUN is the gen_... function
1503 to make a move insn for that mode. DATA has all the other info. */
1506 move_by_pieces_1 (genfun
, mode
, data
)
1508 enum machine_mode mode
;
1509 struct move_by_pieces
*data
;
1511 register int size
= GET_MODE_SIZE (mode
);
1512 register rtx to1
, from1
;
1514 while (data
->len
>= size
)
1516 if (data
->reverse
) data
->offset
-= size
;
1518 to1
= (data
->autinc_to
1519 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1520 : copy_rtx (change_address (data
->to
, mode
,
1521 plus_constant (data
->to_addr
,
1523 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1526 = (data
->autinc_from
1527 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1528 : copy_rtx (change_address (data
->from
, mode
,
1529 plus_constant (data
->from_addr
,
1531 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1533 #ifdef HAVE_PRE_DECREMENT
1534 if (data
->explicit_inc_to
< 0)
1535 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1536 if (data
->explicit_inc_from
< 0)
1537 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1540 emit_insn ((*genfun
) (to1
, from1
));
1541 #ifdef HAVE_POST_INCREMENT
1542 if (data
->explicit_inc_to
> 0)
1543 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1544 if (data
->explicit_inc_from
> 0)
1545 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1548 if (! data
->reverse
) data
->offset
+= size
;
1554 /* Emit code to move a block Y to a block X.
1555 This may be done with string-move instructions,
1556 with multiple scalar move instructions, or with a library call.
1558 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1560 SIZE is an rtx that says how long they are.
1561 ALIGN is the maximum alignment we can assume they have,
1562 measured in bytes. */
1565 emit_block_move (x
, y
, size
, align
)
1570 if (GET_MODE (x
) != BLKmode
)
1573 if (GET_MODE (y
) != BLKmode
)
1576 x
= protect_from_queue (x
, 1);
1577 y
= protect_from_queue (y
, 0);
1578 size
= protect_from_queue (size
, 0);
1580 if (GET_CODE (x
) != MEM
)
1582 if (GET_CODE (y
) != MEM
)
1587 if (GET_CODE (size
) == CONST_INT
1588 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1589 move_by_pieces (x
, y
, INTVAL (size
), align
);
1592 /* Try the most limited insn first, because there's no point
1593 including more than one in the machine description unless
1594 the more limited one has some advantage. */
1596 rtx opalign
= GEN_INT (align
);
1597 enum machine_mode mode
;
1599 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1600 mode
= GET_MODE_WIDER_MODE (mode
))
1602 enum insn_code code
= movstr_optab
[(int) mode
];
1604 if (code
!= CODE_FOR_nothing
1605 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1606 here because if SIZE is less than the mode mask, as it is
1607 returned by the macro, it will definitely be less than the
1608 actual mode mask. */
1609 && ((GET_CODE (size
) == CONST_INT
1610 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1611 <= GET_MODE_MASK (mode
)))
1612 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1613 && (insn_operand_predicate
[(int) code
][0] == 0
1614 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1615 && (insn_operand_predicate
[(int) code
][1] == 0
1616 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1617 && (insn_operand_predicate
[(int) code
][3] == 0
1618 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1622 rtx last
= get_last_insn ();
1625 op2
= convert_to_mode (mode
, size
, 1);
1626 if (insn_operand_predicate
[(int) code
][2] != 0
1627 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1628 op2
= copy_to_mode_reg (mode
, op2
);
1630 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1637 delete_insns_since (last
);
1641 #ifdef TARGET_MEM_FUNCTIONS
1642 emit_library_call (memcpy_libfunc
, 0,
1643 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1645 convert_to_mode (TYPE_MODE (sizetype
), size
,
1646 TREE_UNSIGNED (sizetype
)),
1647 TYPE_MODE (sizetype
));
1649 emit_library_call (bcopy_libfunc
, 0,
1650 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1652 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1653 TREE_UNSIGNED (integer_type_node
)),
1654 TYPE_MODE (integer_type_node
));
1659 /* Copy all or part of a value X into registers starting at REGNO.
1660 The number of registers to be filled is NREGS. */
1663 move_block_to_reg (regno
, x
, nregs
, mode
)
1667 enum machine_mode mode
;
1675 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1676 x
= validize_mem (force_const_mem (mode
, x
));
1678 /* See if the machine can do this with a load multiple insn. */
1679 #ifdef HAVE_load_multiple
1680 if (HAVE_load_multiple
)
1682 last
= get_last_insn ();
1683 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1691 delete_insns_since (last
);
1695 for (i
= 0; i
< nregs
; i
++)
1696 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1697 operand_subword_force (x
, i
, mode
));
1700 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1701 The number of registers to be filled is NREGS. SIZE indicates the number
1702 of bytes in the object X. */
1706 move_block_from_reg (regno
, x
, nregs
, size
)
1714 enum machine_mode mode
;
1716 /* If SIZE is that of a mode no bigger than a word, just use that
1717 mode's store operation. */
1718 if (size
<= UNITS_PER_WORD
1719 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1721 emit_move_insn (change_address (x
, mode
, NULL
),
1722 gen_rtx (REG
, mode
, regno
));
1726 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1727 to the left before storing to memory. Note that the previous test
1728 doesn't handle all cases (e.g. SIZE == 3). */
1729 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1731 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1737 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1738 gen_rtx (REG
, word_mode
, regno
),
1739 build_int_2 ((UNITS_PER_WORD
- size
)
1740 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1741 emit_move_insn (tem
, shift
);
1745 /* See if the machine can do this with a store multiple insn. */
1746 #ifdef HAVE_store_multiple
1747 if (HAVE_store_multiple
)
1749 last
= get_last_insn ();
1750 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1758 delete_insns_since (last
);
1762 for (i
= 0; i
< nregs
; i
++)
1764 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1769 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1773 /* Emit code to move a block Y to a block X, where X is non-consecutive
1774 registers represented by a PARALLEL. */
1777 emit_group_load (x
, y
)
1780 rtx target_reg
, source
;
1783 if (GET_CODE (x
) != PARALLEL
)
1786 /* Check for a NULL entry, used to indicate that the parameter goes
1787 both on the stack and in registers. */
1788 if (XEXP (XVECEXP (x
, 0, 0), 0))
1793 for (; i
< XVECLEN (x
, 0); i
++)
1795 rtx element
= XVECEXP (x
, 0, i
);
1797 target_reg
= XEXP (element
, 0);
1799 if (GET_CODE (y
) == MEM
)
1800 source
= change_address (y
, GET_MODE (target_reg
),
1801 plus_constant (XEXP (y
, 0),
1802 INTVAL (XEXP (element
, 1))));
1803 else if (XEXP (element
, 1) == const0_rtx
)
1805 if (GET_MODE (target_reg
) == GET_MODE (y
))
1807 /* Allow for the target_reg to be smaller than the input register
1808 to allow for AIX with 4 DF arguments after a single SI arg. The
1809 last DF argument will only load 1 word into the integer registers,
1810 but load a DF value into the float registers. */
1811 else if ((GET_MODE_SIZE (GET_MODE (target_reg
))
1812 <= GET_MODE_SIZE (GET_MODE (y
)))
1813 && GET_MODE (target_reg
) == word_mode
)
1814 /* This might be a const_double, so we can't just use SUBREG. */
1815 source
= operand_subword (y
, 0, 0, VOIDmode
);
1816 else if (GET_MODE_SIZE (GET_MODE (target_reg
))
1817 == GET_MODE_SIZE (GET_MODE (y
)))
1818 source
= gen_lowpart (GET_MODE (target_reg
), y
);
1825 emit_move_insn (target_reg
, source
);
1829 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1830 registers represented by a PARALLEL. */
1833 emit_group_store (x
, y
)
1836 rtx source_reg
, target
;
1839 if (GET_CODE (y
) != PARALLEL
)
1842 /* Check for a NULL entry, used to indicate that the parameter goes
1843 both on the stack and in registers. */
1844 if (XEXP (XVECEXP (y
, 0, 0), 0))
1849 for (; i
< XVECLEN (y
, 0); i
++)
1851 rtx element
= XVECEXP (y
, 0, i
);
1853 source_reg
= XEXP (element
, 0);
1855 if (GET_CODE (x
) == MEM
)
1856 target
= change_address (x
, GET_MODE (source_reg
),
1857 plus_constant (XEXP (x
, 0),
1858 INTVAL (XEXP (element
, 1))));
1859 else if (XEXP (element
, 1) == const0_rtx
)
1862 if (GET_MODE (target
) != GET_MODE (source_reg
))
1863 target
= gen_lowpart (GET_MODE (source_reg
), target
);
1868 emit_move_insn (target
, source_reg
);
1872 /* Add a USE expression for REG to the (possibly empty) list pointed
1873 to by CALL_FUSAGE. REG must denote a hard register. */
1876 use_reg (call_fusage
, reg
)
1877 rtx
*call_fusage
, reg
;
1879 if (GET_CODE (reg
) != REG
1880 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1884 = gen_rtx (EXPR_LIST
, VOIDmode
,
1885 gen_rtx (USE
, VOIDmode
, reg
), *call_fusage
);
1888 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1889 starting at REGNO. All of these registers must be hard registers. */
1892 use_regs (call_fusage
, regno
, nregs
)
1899 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1902 for (i
= 0; i
< nregs
; i
++)
1903 use_reg (call_fusage
, gen_rtx (REG
, reg_raw_mode
[regno
+ i
], regno
+ i
));
1906 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1907 PARALLEL REGS. This is for calls that pass values in multiple
1908 non-contiguous locations. The Irix 6 ABI has examples of this. */
1911 use_group_regs (call_fusage
, regs
)
1917 /* Check for a NULL entry, used to indicate that the parameter goes
1918 both on the stack and in registers. */
1919 if (XEXP (XVECEXP (regs
, 0, 0), 0))
1924 for (; i
< XVECLEN (regs
, 0); i
++)
1925 use_reg (call_fusage
, XEXP (XVECEXP (regs
, 0, i
), 0));
1928 /* Generate several move instructions to clear LEN bytes of block TO.
1929 (A MEM rtx with BLKmode). The caller must pass TO through
1930 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1934 clear_by_pieces (to
, len
, align
)
1938 struct clear_by_pieces data
;
1939 rtx to_addr
= XEXP (to
, 0);
1940 int max_size
= MOVE_MAX
+ 1;
1943 data
.to_addr
= to_addr
;
1946 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1947 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1949 data
.explicit_inc_to
= 0;
1951 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1952 if (data
.reverse
) data
.offset
= len
;
1955 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1957 /* If copying requires more than two move insns,
1958 copy addresses to registers (to make displacements shorter)
1959 and use post-increment if available. */
1961 && move_by_pieces_ninsns (len
, align
) > 2)
1963 #ifdef HAVE_PRE_DECREMENT
1964 if (data
.reverse
&& ! data
.autinc_to
)
1966 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1968 data
.explicit_inc_to
= -1;
1971 #ifdef HAVE_POST_INCREMENT
1972 if (! data
.reverse
&& ! data
.autinc_to
)
1974 data
.to_addr
= copy_addr_to_reg (to_addr
);
1976 data
.explicit_inc_to
= 1;
1979 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1980 data
.to_addr
= copy_addr_to_reg (to_addr
);
1983 if (! SLOW_UNALIGNED_ACCESS
1984 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1987 /* First move what we can in the largest integer mode, then go to
1988 successively smaller modes. */
1990 while (max_size
> 1)
1992 enum machine_mode mode
= VOIDmode
, tmode
;
1993 enum insn_code icode
;
1995 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1996 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1997 if (GET_MODE_SIZE (tmode
) < max_size
)
2000 if (mode
== VOIDmode
)
2003 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2004 if (icode
!= CODE_FOR_nothing
2005 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
2006 GET_MODE_SIZE (mode
)))
2007 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2009 max_size
= GET_MODE_SIZE (mode
);
2012 /* The code above should have handled everything. */
2017 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2018 with move instructions for mode MODE. GENFUN is the gen_... function
2019 to make a move insn for that mode. DATA has all the other info. */
2022 clear_by_pieces_1 (genfun
, mode
, data
)
2024 enum machine_mode mode
;
2025 struct clear_by_pieces
*data
;
2027 register int size
= GET_MODE_SIZE (mode
);
2030 while (data
->len
>= size
)
2032 if (data
->reverse
) data
->offset
-= size
;
2034 to1
= (data
->autinc_to
2035 ? gen_rtx (MEM
, mode
, data
->to_addr
)
2036 : copy_rtx (change_address (data
->to
, mode
,
2037 plus_constant (data
->to_addr
,
2039 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2041 #ifdef HAVE_PRE_DECREMENT
2042 if (data
->explicit_inc_to
< 0)
2043 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2046 emit_insn ((*genfun
) (to1
, const0_rtx
));
2047 #ifdef HAVE_POST_INCREMENT
2048 if (data
->explicit_inc_to
> 0)
2049 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2052 if (! data
->reverse
) data
->offset
+= size
;
2058 /* Write zeros through the storage of OBJECT.
2059 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2060 the maximum alignment we can is has, measured in bytes. */
2063 clear_storage (object
, size
, align
)
2068 if (GET_MODE (object
) == BLKmode
)
2070 object
= protect_from_queue (object
, 1);
2071 size
= protect_from_queue (size
, 0);
2073 if (GET_CODE (size
) == CONST_INT
2074 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2075 clear_by_pieces (object
, INTVAL (size
), align
);
2079 /* Try the most limited insn first, because there's no point
2080 including more than one in the machine description unless
2081 the more limited one has some advantage. */
2083 rtx opalign
= GEN_INT (align
);
2084 enum machine_mode mode
;
2086 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2087 mode
= GET_MODE_WIDER_MODE (mode
))
2089 enum insn_code code
= clrstr_optab
[(int) mode
];
2091 if (code
!= CODE_FOR_nothing
2092 /* We don't need MODE to be narrower than
2093 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2094 the mode mask, as it is returned by the macro, it will
2095 definitely be less than the actual mode mask. */
2096 && ((GET_CODE (size
) == CONST_INT
2097 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2098 <= GET_MODE_MASK (mode
)))
2099 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2100 && (insn_operand_predicate
[(int) code
][0] == 0
2101 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2103 && (insn_operand_predicate
[(int) code
][2] == 0
2104 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2108 rtx last
= get_last_insn ();
2111 op1
= convert_to_mode (mode
, size
, 1);
2112 if (insn_operand_predicate
[(int) code
][1] != 0
2113 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2115 op1
= copy_to_mode_reg (mode
, op1
);
2117 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2124 delete_insns_since (last
);
2129 #ifdef TARGET_MEM_FUNCTIONS
2130 emit_library_call (memset_libfunc
, 0,
2132 XEXP (object
, 0), Pmode
,
2133 const0_rtx
, TYPE_MODE (integer_type_node
),
2134 convert_to_mode (TYPE_MODE (sizetype
),
2135 size
, TREE_UNSIGNED (sizetype
)),
2136 TYPE_MODE (sizetype
));
2138 emit_library_call (bzero_libfunc
, 0,
2140 XEXP (object
, 0), Pmode
,
2141 convert_to_mode (TYPE_MODE (integer_type_node
),
2143 TREE_UNSIGNED (integer_type_node
)),
2144 TYPE_MODE (integer_type_node
));
2149 emit_move_insn (object
, const0_rtx
);
2152 /* Generate code to copy Y into X.
2153 Both Y and X must have the same mode, except that
2154 Y can be a constant with VOIDmode.
2155 This mode cannot be BLKmode; use emit_block_move for that.
2157 Return the last instruction emitted. */
2160 emit_move_insn (x
, y
)
2163 enum machine_mode mode
= GET_MODE (x
);
2165 x
= protect_from_queue (x
, 1);
2166 y
= protect_from_queue (y
, 0);
2168 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2171 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2172 y
= force_const_mem (mode
, y
);
2174 /* If X or Y are memory references, verify that their addresses are valid
2176 if (GET_CODE (x
) == MEM
2177 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2178 && ! push_operand (x
, GET_MODE (x
)))
2180 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2181 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2183 if (GET_CODE (y
) == MEM
2184 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2186 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2187 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2189 if (mode
== BLKmode
)
2192 return emit_move_insn_1 (x
, y
);
2195 /* Low level part of emit_move_insn.
2196 Called just like emit_move_insn, but assumes X and Y
2197 are basically valid. */
2200 emit_move_insn_1 (x
, y
)
2203 enum machine_mode mode
= GET_MODE (x
);
2204 enum machine_mode submode
;
2205 enum mode_class
class = GET_MODE_CLASS (mode
);
2208 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2210 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2212 /* Expand complex moves by moving real part and imag part, if possible. */
2213 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2214 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2216 (class == MODE_COMPLEX_INT
2217 ? MODE_INT
: MODE_FLOAT
),
2219 && (mov_optab
->handlers
[(int) submode
].insn_code
2220 != CODE_FOR_nothing
))
2222 /* Don't split destination if it is a stack push. */
2223 int stack
= push_operand (x
, GET_MODE (x
));
2226 /* If this is a stack, push the highpart first, so it
2227 will be in the argument order.
2229 In that case, change_address is used only to convert
2230 the mode, not to change the address. */
2233 /* Note that the real part always precedes the imag part in memory
2234 regardless of machine's endianness. */
2235 #ifdef STACK_GROWS_DOWNWARD
2236 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2237 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2238 gen_imagpart (submode
, y
)));
2239 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2240 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2241 gen_realpart (submode
, y
)));
2243 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2244 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2245 gen_realpart (submode
, y
)));
2246 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2247 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2248 gen_imagpart (submode
, y
)));
2253 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2254 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2255 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2256 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2259 return get_last_insn ();
2262 /* This will handle any multi-word mode that lacks a move_insn pattern.
2263 However, you will get better code if you define such patterns,
2264 even if they must turn into multiple assembler instructions. */
2265 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2270 #ifdef PUSH_ROUNDING
2272 /* If X is a push on the stack, do the push now and replace
2273 X with a reference to the stack pointer. */
2274 if (push_operand (x
, GET_MODE (x
)))
2276 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2277 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2281 /* Show the output dies here. */
2283 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
2286 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2289 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2290 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2292 /* If we can't get a part of Y, put Y into memory if it is a
2293 constant. Otherwise, force it into a register. If we still
2294 can't get a part of Y, abort. */
2295 if (ypart
== 0 && CONSTANT_P (y
))
2297 y
= force_const_mem (mode
, y
);
2298 ypart
= operand_subword (y
, i
, 1, mode
);
2300 else if (ypart
== 0)
2301 ypart
= operand_subword_force (y
, i
, mode
);
2303 if (xpart
== 0 || ypart
== 0)
2306 last_insn
= emit_move_insn (xpart
, ypart
);
2315 /* Pushing data onto the stack. */
2317 /* Push a block of length SIZE (perhaps variable)
2318 and return an rtx to address the beginning of the block.
2319 Note that it is not possible for the value returned to be a QUEUED.
2320 The value may be virtual_outgoing_args_rtx.
2322 EXTRA is the number of bytes of padding to push in addition to SIZE.
2323 BELOW nonzero means this padding comes at low addresses;
2324 otherwise, the padding comes at high addresses. */
2327 push_block (size
, extra
, below
)
2333 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2334 if (CONSTANT_P (size
))
2335 anti_adjust_stack (plus_constant (size
, extra
));
2336 else if (GET_CODE (size
) == REG
&& extra
== 0)
2337 anti_adjust_stack (size
);
2340 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2342 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2343 temp
, 0, OPTAB_LIB_WIDEN
);
2344 anti_adjust_stack (temp
);
2347 #ifdef STACK_GROWS_DOWNWARD
2348 temp
= virtual_outgoing_args_rtx
;
2349 if (extra
!= 0 && below
)
2350 temp
= plus_constant (temp
, extra
);
2352 if (GET_CODE (size
) == CONST_INT
)
2353 temp
= plus_constant (virtual_outgoing_args_rtx
,
2354 - INTVAL (size
) - (below
? 0 : extra
));
2355 else if (extra
!= 0 && !below
)
2356 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2357 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2359 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2360 negate_rtx (Pmode
, size
));
2363 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2369 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2372 /* Generate code to push X onto the stack, assuming it has mode MODE and
2374 MODE is redundant except when X is a CONST_INT (since they don't
2376 SIZE is an rtx for the size of data to be copied (in bytes),
2377 needed only if X is BLKmode.
2379 ALIGN (in bytes) is maximum alignment we can assume.
2381 If PARTIAL and REG are both nonzero, then copy that many of the first
2382 words of X into registers starting with REG, and push the rest of X.
2383 The amount of space pushed is decreased by PARTIAL words,
2384 rounded *down* to a multiple of PARM_BOUNDARY.
2385 REG must be a hard register in this case.
2386 If REG is zero but PARTIAL is not, take any all others actions for an
2387 argument partially in registers, but do not actually load any
2390 EXTRA is the amount in bytes of extra space to leave next to this arg.
2391 This is ignored if an argument block has already been allocated.
2393 On a machine that lacks real push insns, ARGS_ADDR is the address of
2394 the bottom of the argument block for this call. We use indexing off there
2395 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2396 argument block has not been preallocated.
2398 ARGS_SO_FAR is the size of args previously pushed for this call. */
2401 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2402 args_addr
, args_so_far
)
2404 enum machine_mode mode
;
2415 enum direction stack_direction
2416 #ifdef STACK_GROWS_DOWNWARD
2422 /* Decide where to pad the argument: `downward' for below,
2423 `upward' for above, or `none' for don't pad it.
2424 Default is below for small data on big-endian machines; else above. */
2425 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2427 /* Invert direction if stack is post-update. */
2428 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2429 if (where_pad
!= none
)
2430 where_pad
= (where_pad
== downward
? upward
: downward
);
2432 xinner
= x
= protect_from_queue (x
, 0);
2434 if (mode
== BLKmode
)
2436 /* Copy a block into the stack, entirely or partially. */
2439 int used
= partial
* UNITS_PER_WORD
;
2440 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2448 /* USED is now the # of bytes we need not copy to the stack
2449 because registers will take care of them. */
2452 xinner
= change_address (xinner
, BLKmode
,
2453 plus_constant (XEXP (xinner
, 0), used
));
2455 /* If the partial register-part of the arg counts in its stack size,
2456 skip the part of stack space corresponding to the registers.
2457 Otherwise, start copying to the beginning of the stack space,
2458 by setting SKIP to 0. */
2459 #ifndef REG_PARM_STACK_SPACE
2465 #ifdef PUSH_ROUNDING
2466 /* Do it with several push insns if that doesn't take lots of insns
2467 and if there is no difficulty with push insns that skip bytes
2468 on the stack for alignment purposes. */
2470 && GET_CODE (size
) == CONST_INT
2472 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2474 /* Here we avoid the case of a structure whose weak alignment
2475 forces many pushes of a small amount of data,
2476 and such small pushes do rounding that causes trouble. */
2477 && ((! SLOW_UNALIGNED_ACCESS
)
2478 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2479 || PUSH_ROUNDING (align
) == align
)
2480 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2482 /* Push padding now if padding above and stack grows down,
2483 or if padding below and stack grows up.
2484 But if space already allocated, this has already been done. */
2485 if (extra
&& args_addr
== 0
2486 && where_pad
!= none
&& where_pad
!= stack_direction
)
2487 anti_adjust_stack (GEN_INT (extra
));
2489 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
2490 INTVAL (size
) - used
, align
);
2493 #endif /* PUSH_ROUNDING */
2495 /* Otherwise make space on the stack and copy the data
2496 to the address of that space. */
2498 /* Deduct words put into registers from the size we must copy. */
2501 if (GET_CODE (size
) == CONST_INT
)
2502 size
= GEN_INT (INTVAL (size
) - used
);
2504 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2505 GEN_INT (used
), NULL_RTX
, 0,
2509 /* Get the address of the stack space.
2510 In this case, we do not deal with EXTRA separately.
2511 A single stack adjust will do. */
2514 temp
= push_block (size
, extra
, where_pad
== downward
);
2517 else if (GET_CODE (args_so_far
) == CONST_INT
)
2518 temp
= memory_address (BLKmode
,
2519 plus_constant (args_addr
,
2520 skip
+ INTVAL (args_so_far
)));
2522 temp
= memory_address (BLKmode
,
2523 plus_constant (gen_rtx (PLUS
, Pmode
,
2524 args_addr
, args_so_far
),
2527 /* TEMP is the address of the block. Copy the data there. */
2528 if (GET_CODE (size
) == CONST_INT
2529 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2532 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
2533 INTVAL (size
), align
);
2536 /* Try the most limited insn first, because there's no point
2537 including more than one in the machine description unless
2538 the more limited one has some advantage. */
2539 #ifdef HAVE_movstrqi
2541 && GET_CODE (size
) == CONST_INT
2542 && ((unsigned) INTVAL (size
)
2543 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
2545 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
2546 xinner
, size
, GEN_INT (align
));
2554 #ifdef HAVE_movstrhi
2556 && GET_CODE (size
) == CONST_INT
2557 && ((unsigned) INTVAL (size
)
2558 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
2560 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
2561 xinner
, size
, GEN_INT (align
));
2569 #ifdef HAVE_movstrsi
2572 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
2573 xinner
, size
, GEN_INT (align
));
2581 #ifdef HAVE_movstrdi
2584 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
2585 xinner
, size
, GEN_INT (align
));
2594 #ifndef ACCUMULATE_OUTGOING_ARGS
2595 /* If the source is referenced relative to the stack pointer,
2596 copy it to another register to stabilize it. We do not need
2597 to do this if we know that we won't be changing sp. */
2599 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2600 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2601 temp
= copy_to_reg (temp
);
2604 /* Make inhibit_defer_pop nonzero around the library call
2605 to force it to pop the bcopy-arguments right away. */
2607 #ifdef TARGET_MEM_FUNCTIONS
2608 emit_library_call (memcpy_libfunc
, 0,
2609 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2610 convert_to_mode (TYPE_MODE (sizetype
),
2611 size
, TREE_UNSIGNED (sizetype
)),
2612 TYPE_MODE (sizetype
));
2614 emit_library_call (bcopy_libfunc
, 0,
2615 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2616 convert_to_mode (TYPE_MODE (integer_type_node
),
2618 TREE_UNSIGNED (integer_type_node
)),
2619 TYPE_MODE (integer_type_node
));
2624 else if (partial
> 0)
2626 /* Scalar partly in registers. */
2628 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2631 /* # words of start of argument
2632 that we must make space for but need not store. */
2633 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2634 int args_offset
= INTVAL (args_so_far
);
2637 /* Push padding now if padding above and stack grows down,
2638 or if padding below and stack grows up.
2639 But if space already allocated, this has already been done. */
2640 if (extra
&& args_addr
== 0
2641 && where_pad
!= none
&& where_pad
!= stack_direction
)
2642 anti_adjust_stack (GEN_INT (extra
));
2644 /* If we make space by pushing it, we might as well push
2645 the real data. Otherwise, we can leave OFFSET nonzero
2646 and leave the space uninitialized. */
2650 /* Now NOT_STACK gets the number of words that we don't need to
2651 allocate on the stack. */
2652 not_stack
= partial
- offset
;
2654 /* If the partial register-part of the arg counts in its stack size,
2655 skip the part of stack space corresponding to the registers.
2656 Otherwise, start copying to the beginning of the stack space,
2657 by setting SKIP to 0. */
2658 #ifndef REG_PARM_STACK_SPACE
2664 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2665 x
= validize_mem (force_const_mem (mode
, x
));
2667 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2668 SUBREGs of such registers are not allowed. */
2669 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2670 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2671 x
= copy_to_reg (x
);
2673 /* Loop over all the words allocated on the stack for this arg. */
2674 /* We can do it by words, because any scalar bigger than a word
2675 has a size a multiple of a word. */
2676 #ifndef PUSH_ARGS_REVERSED
2677 for (i
= not_stack
; i
< size
; i
++)
2679 for (i
= size
- 1; i
>= not_stack
; i
--)
2681 if (i
>= not_stack
+ offset
)
2682 emit_push_insn (operand_subword_force (x
, i
, mode
),
2683 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2685 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2686 * UNITS_PER_WORD
)));
2692 /* Push padding now if padding above and stack grows down,
2693 or if padding below and stack grows up.
2694 But if space already allocated, this has already been done. */
2695 if (extra
&& args_addr
== 0
2696 && where_pad
!= none
&& where_pad
!= stack_direction
)
2697 anti_adjust_stack (GEN_INT (extra
));
2699 #ifdef PUSH_ROUNDING
2701 addr
= gen_push_operand ();
2704 if (GET_CODE (args_so_far
) == CONST_INT
)
2706 = memory_address (mode
,
2707 plus_constant (args_addr
, INTVAL (args_so_far
)));
2709 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2712 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2716 /* If part should go in registers, copy that part
2717 into the appropriate registers. Do this now, at the end,
2718 since mem-to-mem copies above may do function calls. */
2719 if (partial
> 0 && reg
!= 0)
2721 /* Handle calls that pass values in multiple non-contiguous locations.
2722 The Irix 6 ABI has examples of this. */
2723 if (GET_CODE (reg
) == PARALLEL
)
2724 emit_group_load (reg
, x
);
2726 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2729 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2730 anti_adjust_stack (GEN_INT (extra
));
2733 /* Expand an assignment that stores the value of FROM into TO.
2734 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2735 (This may contain a QUEUED rtx;
2736 if the value is constant, this rtx is a constant.)
2737 Otherwise, the returned value is NULL_RTX.
2739 SUGGEST_REG is no longer actually used.
2740 It used to mean, copy the value through a register
2741 and return that register, if that is possible.
2742 We now use WANT_VALUE to decide whether to do this. */
2745 expand_assignment (to
, from
, want_value
, suggest_reg
)
2750 register rtx to_rtx
= 0;
2753 /* Don't crash if the lhs of the assignment was erroneous. */
2755 if (TREE_CODE (to
) == ERROR_MARK
)
2757 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2758 return want_value
? result
: NULL_RTX
;
2761 if (output_bytecode
)
2763 tree dest_innermost
;
2765 bc_expand_expr (from
);
2766 bc_emit_instruction (duplicate
);
2768 dest_innermost
= bc_expand_address (to
);
2770 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2771 take care of it here. */
2773 bc_store_memory (TREE_TYPE (to
), dest_innermost
);
2777 /* Assignment of a structure component needs special treatment
2778 if the structure component's rtx is not simply a MEM.
2779 Assignment of an array element at a constant index, and assignment of
2780 an array element in an unaligned packed structure field, has the same
2783 if (TREE_CODE (to
) == COMPONENT_REF
2784 || TREE_CODE (to
) == BIT_FIELD_REF
2785 || (TREE_CODE (to
) == ARRAY_REF
2786 && ((TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2787 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
)
2788 || (SLOW_UNALIGNED_ACCESS
&& get_inner_unaligned_p (to
)))))
2790 enum machine_mode mode1
;
2800 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
2801 &unsignedp
, &volatilep
, &alignment
);
2803 /* If we are going to use store_bit_field and extract_bit_field,
2804 make sure to_rtx will be safe for multiple use. */
2806 if (mode1
== VOIDmode
&& want_value
)
2807 tem
= stabilize_reference (tem
);
2809 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2812 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2814 if (GET_CODE (to_rtx
) != MEM
)
2816 to_rtx
= change_address (to_rtx
, VOIDmode
,
2817 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
2818 force_reg (ptr_mode
, offset_rtx
)));
2822 if (GET_CODE (to_rtx
) == MEM
)
2824 /* When the offset is zero, to_rtx is the address of the
2825 structure we are storing into, and hence may be shared.
2826 We must make a new MEM before setting the volatile bit. */
2828 to_rtx
= copy_rtx (to_rtx
);
2830 MEM_VOLATILE_P (to_rtx
) = 1;
2832 #if 0 /* This was turned off because, when a field is volatile
2833 in an object which is not volatile, the object may be in a register,
2834 and then we would abort over here. */
2840 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2842 /* Spurious cast makes HPUX compiler happy. */
2843 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2846 /* Required alignment of containing datum. */
2848 int_size_in_bytes (TREE_TYPE (tem
)));
2849 preserve_temp_slots (result
);
2853 /* If the value is meaningful, convert RESULT to the proper mode.
2854 Otherwise, return nothing. */
2855 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
2856 TYPE_MODE (TREE_TYPE (from
)),
2858 TREE_UNSIGNED (TREE_TYPE (to
)))
2862 /* If the rhs is a function call and its value is not an aggregate,
2863 call the function before we start to compute the lhs.
2864 This is needed for correct code for cases such as
2865 val = setjmp (buf) on machines where reference to val
2866 requires loading up part of an address in a separate insn.
2868 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2869 a promoted variable where the zero- or sign- extension needs to be done.
2870 Handling this in the normal way is safe because no computation is done
2872 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
2873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
2874 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
2879 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2881 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2883 /* Handle calls that return values in multiple non-contiguous locations.
2884 The Irix 6 ABI has examples of this. */
2885 if (GET_CODE (to_rtx
) == PARALLEL
)
2886 emit_group_load (to_rtx
, value
);
2887 else if (GET_MODE (to_rtx
) == BLKmode
)
2888 emit_block_move (to_rtx
, value
, expr_size (from
),
2889 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
2891 emit_move_insn (to_rtx
, value
);
2892 preserve_temp_slots (to_rtx
);
2895 return want_value
? to_rtx
: NULL_RTX
;
2898 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2899 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2902 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2904 /* Don't move directly into a return register. */
2905 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
2910 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
2911 emit_move_insn (to_rtx
, temp
);
2912 preserve_temp_slots (to_rtx
);
2915 return want_value
? to_rtx
: NULL_RTX
;
2918 /* In case we are returning the contents of an object which overlaps
2919 the place the value is being stored, use a safe function when copying
2920 a value through a pointer into a structure value return block. */
2921 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2922 && current_function_returns_struct
2923 && !current_function_returns_pcc_struct
)
2928 size
= expr_size (from
);
2929 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2931 #ifdef TARGET_MEM_FUNCTIONS
2932 emit_library_call (memcpy_libfunc
, 0,
2933 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2934 XEXP (from_rtx
, 0), Pmode
,
2935 convert_to_mode (TYPE_MODE (sizetype
),
2936 size
, TREE_UNSIGNED (sizetype
)),
2937 TYPE_MODE (sizetype
));
2939 emit_library_call (bcopy_libfunc
, 0,
2940 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2941 XEXP (to_rtx
, 0), Pmode
,
2942 convert_to_mode (TYPE_MODE (integer_type_node
),
2943 size
, TREE_UNSIGNED (integer_type_node
)),
2944 TYPE_MODE (integer_type_node
));
2947 preserve_temp_slots (to_rtx
);
2950 return want_value
? to_rtx
: NULL_RTX
;
2953 /* Compute FROM and store the value in the rtx we got. */
2956 result
= store_expr (from
, to_rtx
, want_value
);
2957 preserve_temp_slots (result
);
2960 return want_value
? result
: NULL_RTX
;
2963 /* Generate code for computing expression EXP,
2964 and storing the value into TARGET.
2965 TARGET may contain a QUEUED rtx.
2967 If WANT_VALUE is nonzero, return a copy of the value
2968 not in TARGET, so that we can be sure to use the proper
2969 value in a containing expression even if TARGET has something
2970 else stored in it. If possible, we copy the value through a pseudo
2971 and return that pseudo. Or, if the value is constant, we try to
2972 return the constant. In some cases, we return a pseudo
2973 copied *from* TARGET.
2975 If the mode is BLKmode then we may return TARGET itself.
2976 It turns out that in BLKmode it doesn't cause a problem.
2977 because C has no operators that could combine two different
2978 assignments into the same BLKmode object with different values
2979 with no sequence point. Will other languages need this to
2982 If WANT_VALUE is 0, we return NULL, to make sure
2983 to catch quickly any cases where the caller uses the value
2984 and fails to set WANT_VALUE. */
2987 store_expr (exp
, target
, want_value
)
2989 register rtx target
;
2993 int dont_return_target
= 0;
2995 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2997 /* Perform first part of compound expression, then assign from second
2999 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3001 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3003 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3005 /* For conditional expression, get safe form of the target. Then
3006 test the condition, doing the appropriate assignment on either
3007 side. This avoids the creation of unnecessary temporaries.
3008 For non-BLKmode, it is more efficient not to do this. */
3010 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3013 target
= protect_from_queue (target
, 1);
3015 do_pending_stack_adjust ();
3017 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3018 start_cleanup_deferal ();
3019 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3020 end_cleanup_deferal ();
3022 emit_jump_insn (gen_jump (lab2
));
3025 start_cleanup_deferal ();
3026 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3027 end_cleanup_deferal ();
3032 return want_value
? target
: NULL_RTX
;
3034 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3035 && GET_MODE (target
) != BLKmode
)
3036 /* If target is in memory and caller wants value in a register instead,
3037 arrange that. Pass TARGET as target for expand_expr so that,
3038 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3039 We know expand_expr will not use the target in that case.
3040 Don't do this if TARGET is volatile because we are supposed
3041 to write it and then read it. */
3043 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3044 GET_MODE (target
), 0);
3045 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3046 temp
= copy_to_reg (temp
);
3047 dont_return_target
= 1;
3049 else if (queued_subexp_p (target
))
3050 /* If target contains a postincrement, let's not risk
3051 using it as the place to generate the rhs. */
3053 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3055 /* Expand EXP into a new pseudo. */
3056 temp
= gen_reg_rtx (GET_MODE (target
));
3057 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3060 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3062 /* If target is volatile, ANSI requires accessing the value
3063 *from* the target, if it is accessed. So make that happen.
3064 In no case return the target itself. */
3065 if (! MEM_VOLATILE_P (target
) && want_value
)
3066 dont_return_target
= 1;
3068 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3069 /* If this is an scalar in a register that is stored in a wider mode
3070 than the declared mode, compute the result into its declared mode
3071 and then convert to the wider mode. Our value is the computed
3074 /* If we don't want a value, we can do the conversion inside EXP,
3075 which will often result in some optimizations. Do the conversion
3076 in two steps: first change the signedness, if needed, then
3077 the extend. But don't do this if the type of EXP is a subtype
3078 of something else since then the conversion might involve
3079 more than just converting modes. */
3080 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3081 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3083 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3084 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3087 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3091 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3092 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3096 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3098 /* If TEMP is a volatile MEM and we want a result value, make
3099 the access now so it gets done only once. Likewise if
3100 it contains TARGET. */
3101 if (GET_CODE (temp
) == MEM
&& want_value
3102 && (MEM_VOLATILE_P (temp
)
3103 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3104 temp
= copy_to_reg (temp
);
3106 /* If TEMP is a VOIDmode constant, use convert_modes to make
3107 sure that we properly convert it. */
3108 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3109 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3110 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3111 SUBREG_PROMOTED_UNSIGNED_P (target
));
3113 convert_move (SUBREG_REG (target
), temp
,
3114 SUBREG_PROMOTED_UNSIGNED_P (target
));
3115 return want_value
? temp
: NULL_RTX
;
3119 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3120 /* Return TARGET if it's a specified hardware register.
3121 If TARGET is a volatile mem ref, either return TARGET
3122 or return a reg copied *from* TARGET; ANSI requires this.
3124 Otherwise, if TEMP is not TARGET, return TEMP
3125 if it is constant (for efficiency),
3126 or if we really want the correct value. */
3127 if (!(target
&& GET_CODE (target
) == REG
3128 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3129 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3130 && ! rtx_equal_p (temp
, target
)
3131 && (CONSTANT_P (temp
) || want_value
))
3132 dont_return_target
= 1;
3135 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3136 the same as that of TARGET, adjust the constant. This is needed, for
3137 example, in case it is a CONST_DOUBLE and we want only a word-sized
3139 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3140 && TREE_CODE (exp
) != ERROR_MARK
3141 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3142 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3143 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3145 /* If value was not generated in the target, store it there.
3146 Convert the value to TARGET's type first if nec. */
3148 if (! rtx_equal_p (temp
, target
) && TREE_CODE (exp
) != ERROR_MARK
)
3150 target
= protect_from_queue (target
, 1);
3151 if (GET_MODE (temp
) != GET_MODE (target
)
3152 && GET_MODE (temp
) != VOIDmode
)
3154 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3155 if (dont_return_target
)
3157 /* In this case, we will return TEMP,
3158 so make sure it has the proper mode.
3159 But don't forget to store the value into TARGET. */
3160 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3161 emit_move_insn (target
, temp
);
3164 convert_move (target
, temp
, unsignedp
);
3167 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3169 /* Handle copying a string constant into an array.
3170 The string constant may be shorter than the array.
3171 So copy just the string's actual length, and clear the rest. */
3175 /* Get the size of the data type of the string,
3176 which is actually the size of the target. */
3177 size
= expr_size (exp
);
3178 if (GET_CODE (size
) == CONST_INT
3179 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3180 emit_block_move (target
, temp
, size
,
3181 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3184 /* Compute the size of the data to copy from the string. */
3186 = size_binop (MIN_EXPR
,
3187 make_tree (sizetype
, size
),
3189 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3190 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3194 /* Copy that much. */
3195 emit_block_move (target
, temp
, copy_size_rtx
,
3196 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3198 /* Figure out how much is left in TARGET that we have to clear.
3199 Do all calculations in ptr_mode. */
3201 addr
= XEXP (target
, 0);
3202 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3204 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3206 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3207 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3211 addr
= force_reg (ptr_mode
, addr
);
3212 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3213 copy_size_rtx
, NULL_RTX
, 0,
3216 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3217 copy_size_rtx
, NULL_RTX
, 0,
3220 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3221 GET_MODE (size
), 0, 0);
3222 label
= gen_label_rtx ();
3223 emit_jump_insn (gen_blt (label
));
3226 if (size
!= const0_rtx
)
3228 #ifdef TARGET_MEM_FUNCTIONS
3229 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3231 const0_rtx
, TYPE_MODE (integer_type_node
),
3232 convert_to_mode (TYPE_MODE (sizetype
),
3234 TREE_UNSIGNED (sizetype
)),
3235 TYPE_MODE (sizetype
));
3237 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3239 convert_to_mode (TYPE_MODE (integer_type_node
),
3241 TREE_UNSIGNED (integer_type_node
)),
3242 TYPE_MODE (integer_type_node
));
3250 /* Handle calls that return values in multiple non-contiguous locations.
3251 The Irix 6 ABI has examples of this. */
3252 else if (GET_CODE (target
) == PARALLEL
)
3253 emit_group_load (target
, temp
);
3254 else if (GET_MODE (temp
) == BLKmode
)
3255 emit_block_move (target
, temp
, expr_size (exp
),
3256 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3258 emit_move_insn (target
, temp
);
3261 /* If we don't want a value, return NULL_RTX. */
3265 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3266 ??? The latter test doesn't seem to make sense. */
3267 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3270 /* Return TARGET itself if it is a hard register. */
3271 else if (want_value
&& GET_MODE (target
) != BLKmode
3272 && ! (GET_CODE (target
) == REG
3273 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3274 return copy_to_reg (target
);
3280 /* Return 1 if EXP just contains zeros. */
3288 switch (TREE_CODE (exp
))
3292 case NON_LVALUE_EXPR
:
3293 return is_zeros_p (TREE_OPERAND (exp
, 0));
3296 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3300 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3303 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp
), dconst0
);
3306 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3307 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3308 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3309 if (! is_zeros_p (TREE_VALUE (elt
)))
3318 /* Return 1 if EXP contains mostly (3/4) zeros. */
3321 mostly_zeros_p (exp
)
3324 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3326 int elts
= 0, zeros
= 0;
3327 tree elt
= CONSTRUCTOR_ELTS (exp
);
3328 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3330 /* If there are no ranges of true bits, it is all zero. */
3331 return elt
== NULL_TREE
;
3333 for (; elt
; elt
= TREE_CHAIN (elt
))
3335 /* We do not handle the case where the index is a RANGE_EXPR,
3336 so the statistic will be somewhat inaccurate.
3337 We do make a more accurate count in store_constructor itself,
3338 so since this function is only used for nested array elements,
3339 this should be close enough. */
3340 if (mostly_zeros_p (TREE_VALUE (elt
)))
3345 return 4 * zeros
>= 3 * elts
;
3348 return is_zeros_p (exp
);
3351 /* Helper function for store_constructor.
3352 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3353 TYPE is the type of the CONSTRUCTOR, not the element type.
3354 CLEARED is as for store_constructor.
3356 This provides a recursive shortcut back to store_constructor when it isn't
3357 necessary to go through store_field. This is so that we can pass through
3358 the cleared field to let store_constructor know that we may not have to
3359 clear a substructure if the outer structure has already been cleared. */
3362 store_constructor_field (target
, bitsize
, bitpos
,
3363 mode
, exp
, type
, cleared
)
3365 int bitsize
, bitpos
;
3366 enum machine_mode mode
;
3370 if (TREE_CODE (exp
) == CONSTRUCTOR
3371 && bitpos
% BITS_PER_UNIT
== 0
3372 /* If we have a non-zero bitpos for a register target, then we just
3373 let store_field do the bitfield handling. This is unlikely to
3374 generate unnecessary clear instructions anyways. */
3375 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3378 target
= change_address (target
, VOIDmode
,
3379 plus_constant (XEXP (target
, 0),
3380 bitpos
/ BITS_PER_UNIT
));
3381 store_constructor (exp
, target
, cleared
);
3384 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3385 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3386 int_size_in_bytes (type
));
3389 /* Store the value of constructor EXP into the rtx TARGET.
3390 TARGET is either a REG or a MEM.
3391 CLEARED is true if TARGET is known to have been zero'd. */
3394 store_constructor (exp
, target
, cleared
)
3399 tree type
= TREE_TYPE (exp
);
3401 /* We know our target cannot conflict, since safe_from_p has been called. */
3403 /* Don't try copying piece by piece into a hard register
3404 since that is vulnerable to being clobbered by EXP.
3405 Instead, construct in a pseudo register and then copy it all. */
3406 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3408 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3409 store_constructor (exp
, temp
, 0);
3410 emit_move_insn (target
, temp
);
3415 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3416 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3420 /* Inform later passes that the whole union value is dead. */
3421 if (TREE_CODE (type
) == UNION_TYPE
3422 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3423 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3425 /* If we are building a static constructor into a register,
3426 set the initial value as zero so we can fold the value into
3427 a constant. But if more than one register is involved,
3428 this probably loses. */
3429 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3430 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3433 emit_move_insn (target
, const0_rtx
);
3438 /* If the constructor has fewer fields than the structure
3439 or if we are initializing the structure to mostly zeros,
3440 clear the whole structure first. */
3441 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3442 != list_length (TYPE_FIELDS (type
)))
3443 || mostly_zeros_p (exp
))
3446 clear_storage (target
, expr_size (exp
),
3447 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3452 /* Inform later passes that the old value is dead. */
3453 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3455 /* Store each element of the constructor into
3456 the corresponding field of TARGET. */
3458 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3460 register tree field
= TREE_PURPOSE (elt
);
3461 register enum machine_mode mode
;
3465 tree pos
, constant
= 0, offset
= 0;
3466 rtx to_rtx
= target
;
3468 /* Just ignore missing fields.
3469 We cleared the whole structure, above,
3470 if any fields are missing. */
3474 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3477 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3478 unsignedp
= TREE_UNSIGNED (field
);
3479 mode
= DECL_MODE (field
);
3480 if (DECL_BIT_FIELD (field
))
3483 pos
= DECL_FIELD_BITPOS (field
);
3484 if (TREE_CODE (pos
) == INTEGER_CST
)
3486 else if (TREE_CODE (pos
) == PLUS_EXPR
3487 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3488 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3493 bitpos
= TREE_INT_CST_LOW (constant
);
3499 if (contains_placeholder_p (offset
))
3500 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3503 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3504 size_int (BITS_PER_UNIT
));
3506 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3507 if (GET_CODE (to_rtx
) != MEM
)
3511 = change_address (to_rtx
, VOIDmode
,
3512 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
3513 force_reg (ptr_mode
, offset_rtx
)));
3515 if (TREE_READONLY (field
))
3517 if (GET_CODE (to_rtx
) == MEM
)
3518 to_rtx
= copy_rtx (to_rtx
);
3520 RTX_UNCHANGING_P (to_rtx
) = 1;
3523 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3524 mode
, TREE_VALUE (elt
), type
, cleared
);
3527 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3532 tree domain
= TYPE_DOMAIN (type
);
3533 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3534 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3535 tree elttype
= TREE_TYPE (type
);
3537 /* If the constructor has fewer elements than the array,
3538 clear the whole array first. Similarly if this this is
3539 static constructor of a non-BLKmode object. */
3540 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3544 HOST_WIDE_INT count
= 0, zero_count
= 0;
3546 /* This loop is a more accurate version of the loop in
3547 mostly_zeros_p (it handles RANGE_EXPR in an index).
3548 It is also needed to check for missing elements. */
3549 for (elt
= CONSTRUCTOR_ELTS (exp
);
3551 elt
= TREE_CHAIN (elt
))
3553 tree index
= TREE_PURPOSE (elt
);
3554 HOST_WIDE_INT this_node_count
;
3555 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3557 tree lo_index
= TREE_OPERAND (index
, 0);
3558 tree hi_index
= TREE_OPERAND (index
, 1);
3559 if (TREE_CODE (lo_index
) != INTEGER_CST
3560 || TREE_CODE (hi_index
) != INTEGER_CST
)
3565 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3566 - TREE_INT_CST_LOW (lo_index
) + 1;
3569 this_node_count
= 1;
3570 count
+= this_node_count
;
3571 if (mostly_zeros_p (TREE_VALUE (elt
)))
3572 zero_count
+= this_node_count
;
3574 /* Clear the entire array first if there are any missing elements,
3575 or if the incidence of zero elements is >= 75%. */
3576 if (count
< maxelt
- minelt
+ 1
3577 || 4 * zero_count
>= 3 * count
)
3583 clear_storage (target
, expr_size (exp
),
3584 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3588 /* Inform later passes that the old value is dead. */
3589 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3591 /* Store each element of the constructor into
3592 the corresponding element of TARGET, determined
3593 by counting the elements. */
3594 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3596 elt
= TREE_CHAIN (elt
), i
++)
3598 register enum machine_mode mode
;
3602 tree value
= TREE_VALUE (elt
);
3603 tree index
= TREE_PURPOSE (elt
);
3604 rtx xtarget
= target
;
3606 if (cleared
&& is_zeros_p (value
))
3609 mode
= TYPE_MODE (elttype
);
3610 bitsize
= GET_MODE_BITSIZE (mode
);
3611 unsignedp
= TREE_UNSIGNED (elttype
);
3613 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3615 tree lo_index
= TREE_OPERAND (index
, 0);
3616 tree hi_index
= TREE_OPERAND (index
, 1);
3617 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3618 struct nesting
*loop
;
3619 HOST_WIDE_INT lo
, hi
, count
;
3622 /* If the range is constant and "small", unroll the loop. */
3623 if (TREE_CODE (lo_index
) == INTEGER_CST
3624 && TREE_CODE (hi_index
) == INTEGER_CST
3625 && (lo
= TREE_INT_CST_LOW (lo_index
),
3626 hi
= TREE_INT_CST_LOW (hi_index
),
3627 count
= hi
- lo
+ 1,
3628 (GET_CODE (target
) != MEM
3630 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3631 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3634 lo
-= minelt
; hi
-= minelt
;
3635 for (; lo
<= hi
; lo
++)
3637 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3638 store_constructor_field (target
, bitsize
, bitpos
,
3639 mode
, value
, type
, cleared
);
3644 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3645 loop_top
= gen_label_rtx ();
3646 loop_end
= gen_label_rtx ();
3648 unsignedp
= TREE_UNSIGNED (domain
);
3650 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3652 DECL_RTL (index
) = index_r
3653 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3656 if (TREE_CODE (value
) == SAVE_EXPR
3657 && SAVE_EXPR_RTL (value
) == 0)
3659 /* Make sure value gets expanded once before the
3661 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3664 store_expr (lo_index
, index_r
, 0);
3665 loop
= expand_start_loop (0);
3667 /* Assign value to element index. */
3668 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3669 size_int (BITS_PER_UNIT
));
3670 position
= size_binop (MULT_EXPR
,
3671 size_binop (MINUS_EXPR
, index
,
3672 TYPE_MIN_VALUE (domain
)),
3674 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3675 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3676 xtarget
= change_address (target
, mode
, addr
);
3677 if (TREE_CODE (value
) == CONSTRUCTOR
)
3678 store_constructor (value
, xtarget
, cleared
);
3680 store_expr (value
, xtarget
, 0);
3682 expand_exit_loop_if_false (loop
,
3683 build (LT_EXPR
, integer_type_node
,
3686 expand_increment (build (PREINCREMENT_EXPR
,
3688 index
, integer_one_node
), 0, 0);
3690 emit_label (loop_end
);
3692 /* Needed by stupid register allocation. to extend the
3693 lifetime of pseudo-regs used by target past the end
3695 emit_insn (gen_rtx (USE
, GET_MODE (target
), target
));
3698 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3699 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3705 index
= size_int (i
);
3708 index
= size_binop (MINUS_EXPR
, index
,
3709 TYPE_MIN_VALUE (domain
));
3710 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3711 size_int (BITS_PER_UNIT
));
3712 position
= size_binop (MULT_EXPR
, index
, position
);
3713 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3714 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3715 xtarget
= change_address (target
, mode
, addr
);
3716 store_expr (value
, xtarget
, 0);
3721 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3722 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3724 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3725 store_constructor_field (target
, bitsize
, bitpos
,
3726 mode
, value
, type
, cleared
);
3730 /* set constructor assignments */
3731 else if (TREE_CODE (type
) == SET_TYPE
)
3733 tree elt
= CONSTRUCTOR_ELTS (exp
);
3734 rtx xtarget
= XEXP (target
, 0);
3735 int set_word_size
= TYPE_ALIGN (type
);
3736 int nbytes
= int_size_in_bytes (type
), nbits
;
3737 tree domain
= TYPE_DOMAIN (type
);
3738 tree domain_min
, domain_max
, bitlength
;
3740 /* The default implementation strategy is to extract the constant
3741 parts of the constructor, use that to initialize the target,
3742 and then "or" in whatever non-constant ranges we need in addition.
3744 If a large set is all zero or all ones, it is
3745 probably better to set it using memset (if available) or bzero.
3746 Also, if a large set has just a single range, it may also be
3747 better to first clear all the first clear the set (using
3748 bzero/memset), and set the bits we want. */
3750 /* Check for all zeros. */
3751 if (elt
== NULL_TREE
)
3754 clear_storage (target
, expr_size (exp
),
3755 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3759 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3760 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3761 bitlength
= size_binop (PLUS_EXPR
,
3762 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3765 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
3767 nbits
= TREE_INT_CST_LOW (bitlength
);
3769 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3770 are "complicated" (more than one range), initialize (the
3771 constant parts) by copying from a constant. */
3772 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
3773 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
3775 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3776 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3777 char *bit_buffer
= (char *) alloca (nbits
);
3778 HOST_WIDE_INT word
= 0;
3781 int offset
= 0; /* In bytes from beginning of set. */
3782 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
3785 if (bit_buffer
[ibit
])
3787 if (BYTES_BIG_ENDIAN
)
3788 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3790 word
|= 1 << bit_pos
;
3793 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
3795 if (word
!= 0 || ! cleared
)
3797 rtx datum
= GEN_INT (word
);
3799 /* The assumption here is that it is safe to use
3800 XEXP if the set is multi-word, but not if
3801 it's single-word. */
3802 if (GET_CODE (target
) == MEM
)
3804 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
3805 to_rtx
= change_address (target
, mode
, to_rtx
);
3807 else if (offset
== 0)
3811 emit_move_insn (to_rtx
, datum
);
3817 offset
+= set_word_size
/ BITS_PER_UNIT
;
3823 /* Don't bother clearing storage if the set is all ones. */
3824 if (TREE_CHAIN (elt
) != NULL_TREE
3825 || (TREE_PURPOSE (elt
) == NULL_TREE
3827 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
3828 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
3829 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
3830 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
3832 clear_storage (target
, expr_size (exp
),
3833 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3836 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
3838 /* start of range of element or NULL */
3839 tree startbit
= TREE_PURPOSE (elt
);
3840 /* end of range of element, or element value */
3841 tree endbit
= TREE_VALUE (elt
);
3842 HOST_WIDE_INT startb
, endb
;
3843 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
3845 bitlength_rtx
= expand_expr (bitlength
,
3846 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
3848 /* handle non-range tuple element like [ expr ] */
3849 if (startbit
== NULL_TREE
)
3851 startbit
= save_expr (endbit
);
3854 startbit
= convert (sizetype
, startbit
);
3855 endbit
= convert (sizetype
, endbit
);
3856 if (! integer_zerop (domain_min
))
3858 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
3859 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
3861 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
3862 EXPAND_CONST_ADDRESS
);
3863 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
3864 EXPAND_CONST_ADDRESS
);
3868 targetx
= assign_stack_temp (GET_MODE (target
),
3869 GET_MODE_SIZE (GET_MODE (target
)),
3871 emit_move_insn (targetx
, target
);
3873 else if (GET_CODE (target
) == MEM
)
3878 #ifdef TARGET_MEM_FUNCTIONS
3879 /* Optimization: If startbit and endbit are
3880 constants divisible by BITS_PER_UNIT,
3881 call memset instead. */
3882 if (TREE_CODE (startbit
) == INTEGER_CST
3883 && TREE_CODE (endbit
) == INTEGER_CST
3884 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
3885 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
3887 emit_library_call (memset_libfunc
, 0,
3889 plus_constant (XEXP (targetx
, 0),
3890 startb
/ BITS_PER_UNIT
),
3892 constm1_rtx
, TYPE_MODE (integer_type_node
),
3893 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
3894 TYPE_MODE (sizetype
));
3899 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__setbits"),
3900 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
3901 bitlength_rtx
, TYPE_MODE (sizetype
),
3902 startbit_rtx
, TYPE_MODE (sizetype
),
3903 endbit_rtx
, TYPE_MODE (sizetype
));
3906 emit_move_insn (target
, targetx
);
3914 /* Store the value of EXP (an expression tree)
3915 into a subfield of TARGET which has mode MODE and occupies
3916 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3917 If MODE is VOIDmode, it means that we are storing into a bit-field.
3919 If VALUE_MODE is VOIDmode, return nothing in particular.
3920 UNSIGNEDP is not used in this case.
3922 Otherwise, return an rtx for the value stored. This rtx
3923 has mode VALUE_MODE if that is convenient to do.
3924 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3926 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3927 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3930 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
3931 unsignedp
, align
, total_size
)
3933 int bitsize
, bitpos
;
3934 enum machine_mode mode
;
3936 enum machine_mode value_mode
;
3941 HOST_WIDE_INT width_mask
= 0;
3943 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
3944 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
3946 /* If we are storing into an unaligned field of an aligned union that is
3947 in a register, we may have the mode of TARGET being an integer mode but
3948 MODE == BLKmode. In that case, get an aligned object whose size and
3949 alignment are the same as TARGET and store TARGET into it (we can avoid
3950 the store if the field being stored is the entire width of TARGET). Then
3951 call ourselves recursively to store the field into a BLKmode version of
3952 that object. Finally, load from the object into TARGET. This is not
3953 very efficient in general, but should only be slightly more expensive
3954 than the otherwise-required unaligned accesses. Perhaps this can be
3955 cleaned up later. */
3958 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
3960 rtx object
= assign_stack_temp (GET_MODE (target
),
3961 GET_MODE_SIZE (GET_MODE (target
)), 0);
3962 rtx blk_object
= copy_rtx (object
);
3964 MEM_IN_STRUCT_P (object
) = 1;
3965 MEM_IN_STRUCT_P (blk_object
) = 1;
3966 PUT_MODE (blk_object
, BLKmode
);
3968 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
3969 emit_move_insn (object
, target
);
3971 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
3974 /* Even though we aren't returning target, we need to
3975 give it the updated value. */
3976 emit_move_insn (target
, object
);
3981 /* If the structure is in a register or if the component
3982 is a bit field, we cannot use addressing to access it.
3983 Use bit-field techniques or SUBREG to store in it. */
3985 if (mode
== VOIDmode
3986 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
3987 || GET_CODE (target
) == REG
3988 || GET_CODE (target
) == SUBREG
3989 /* If the field isn't aligned enough to store as an ordinary memref,
3990 store it as a bit field. */
3991 || (SLOW_UNALIGNED_ACCESS
3992 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
3993 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
3995 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3997 /* If BITSIZE is narrower than the size of the type of EXP
3998 we will be narrowing TEMP. Normally, what's wanted are the
3999 low-order bits. However, if EXP's type is a record and this is
4000 big-endian machine, we want the upper BITSIZE bits. */
4001 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4002 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4003 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4004 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4005 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4009 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4011 if (mode
!= VOIDmode
&& mode
!= BLKmode
4012 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4013 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4015 /* If the modes of TARGET and TEMP are both BLKmode, both
4016 must be in memory and BITPOS must be aligned on a byte
4017 boundary. If so, we simply do a block copy. */
4018 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4020 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4021 || bitpos
% BITS_PER_UNIT
!= 0)
4024 target
= change_address (target
, VOIDmode
,
4025 plus_constant (XEXP (target
, 0),
4026 bitpos
/ BITS_PER_UNIT
));
4028 emit_block_move (target
, temp
,
4029 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4033 return value_mode
== VOIDmode
? const0_rtx
: target
;
4036 /* Store the value in the bitfield. */
4037 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4038 if (value_mode
!= VOIDmode
)
4040 /* The caller wants an rtx for the value. */
4041 /* If possible, avoid refetching from the bitfield itself. */
4043 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4046 enum machine_mode tmode
;
4049 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4050 tmode
= GET_MODE (temp
);
4051 if (tmode
== VOIDmode
)
4053 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4054 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4055 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4057 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4058 NULL_RTX
, value_mode
, 0, align
,
4065 rtx addr
= XEXP (target
, 0);
4068 /* If a value is wanted, it must be the lhs;
4069 so make the address stable for multiple use. */
4071 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4072 && ! CONSTANT_ADDRESS_P (addr
)
4073 /* A frame-pointer reference is already stable. */
4074 && ! (GET_CODE (addr
) == PLUS
4075 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4076 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4077 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4078 addr
= copy_to_reg (addr
);
4080 /* Now build a reference to just the desired component. */
4082 to_rtx
= copy_rtx (change_address (target
, mode
,
4083 plus_constant (addr
,
4085 / BITS_PER_UNIT
))));
4086 MEM_IN_STRUCT_P (to_rtx
) = 1;
4088 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4092 /* Return true if any object containing the innermost array is an unaligned
4093 packed structure field. */
4096 get_inner_unaligned_p (exp
)
4099 int needed_alignment
= TYPE_ALIGN (TREE_TYPE (exp
));
4103 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4105 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4109 else if (TREE_CODE (exp
) != ARRAY_REF
4110 && TREE_CODE (exp
) != NON_LVALUE_EXPR
4111 && ! ((TREE_CODE (exp
) == NOP_EXPR
4112 || TREE_CODE (exp
) == CONVERT_EXPR
)
4113 && (TYPE_MODE (TREE_TYPE (exp
))
4114 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4117 exp
= TREE_OPERAND (exp
, 0);
4123 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4124 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4125 ARRAY_REFs and find the ultimate containing object, which we return.
4127 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4128 bit position, and *PUNSIGNEDP to the signedness of the field.
4129 If the position of the field is variable, we store a tree
4130 giving the variable offset (in units) in *POFFSET.
4131 This offset is in addition to the bit position.
4132 If the position is not variable, we store 0 in *POFFSET.
4133 We set *PALIGNMENT to the alignment in bytes of the address that will be
4134 computed. This is the alignment of the thing we return if *POFFSET
4135 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4137 If any of the extraction expressions is volatile,
4138 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4140 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4141 is a mode that can be used to access the field. In that case, *PBITSIZE
4144 If the field describes a variable-sized object, *PMODE is set to
4145 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4146 this case, but the address of the object can be found. */
4149 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4150 punsignedp
, pvolatilep
, palignment
)
4155 enum machine_mode
*pmode
;
4160 tree orig_exp
= exp
;
4162 enum machine_mode mode
= VOIDmode
;
4163 tree offset
= integer_zero_node
;
4164 int alignment
= BIGGEST_ALIGNMENT
;
4166 if (TREE_CODE (exp
) == COMPONENT_REF
)
4168 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4169 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4170 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4171 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4173 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4175 size_tree
= TREE_OPERAND (exp
, 1);
4176 *punsignedp
= TREE_UNSIGNED (exp
);
4180 mode
= TYPE_MODE (TREE_TYPE (exp
));
4181 *pbitsize
= GET_MODE_BITSIZE (mode
);
4182 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4187 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4188 mode
= BLKmode
, *pbitsize
= -1;
4190 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4193 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4194 and find the ultimate containing object. */
4200 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4202 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4203 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4204 : TREE_OPERAND (exp
, 2));
4205 tree constant
= integer_zero_node
, var
= pos
;
4207 /* If this field hasn't been filled in yet, don't go
4208 past it. This should only happen when folding expressions
4209 made during type construction. */
4213 /* Assume here that the offset is a multiple of a unit.
4214 If not, there should be an explicitly added constant. */
4215 if (TREE_CODE (pos
) == PLUS_EXPR
4216 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4217 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4218 else if (TREE_CODE (pos
) == INTEGER_CST
)
4219 constant
= pos
, var
= integer_zero_node
;
4221 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4222 offset
= size_binop (PLUS_EXPR
, offset
,
4223 size_binop (EXACT_DIV_EXPR
, var
,
4224 size_int (BITS_PER_UNIT
)));
4227 else if (TREE_CODE (exp
) == ARRAY_REF
)
4229 /* This code is based on the code in case ARRAY_REF in expand_expr
4230 below. We assume here that the size of an array element is
4231 always an integral multiple of BITS_PER_UNIT. */
4233 tree index
= TREE_OPERAND (exp
, 1);
4234 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4236 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4237 tree index_type
= TREE_TYPE (index
);
4239 if (! integer_zerop (low_bound
))
4240 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4242 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4244 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4246 index_type
= TREE_TYPE (index
);
4249 index
= fold (build (MULT_EXPR
, index_type
, index
,
4250 convert (index_type
,
4251 TYPE_SIZE (TREE_TYPE (exp
)))));
4253 if (TREE_CODE (index
) == INTEGER_CST
4254 && TREE_INT_CST_HIGH (index
) == 0)
4255 *pbitpos
+= TREE_INT_CST_LOW (index
);
4257 offset
= size_binop (PLUS_EXPR
, offset
,
4258 size_binop (FLOOR_DIV_EXPR
, index
,
4259 size_int (BITS_PER_UNIT
)));
4261 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4262 && ! ((TREE_CODE (exp
) == NOP_EXPR
4263 || TREE_CODE (exp
) == CONVERT_EXPR
)
4264 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4265 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4267 && (TYPE_MODE (TREE_TYPE (exp
))
4268 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4271 /* If any reference in the chain is volatile, the effect is volatile. */
4272 if (TREE_THIS_VOLATILE (exp
))
4275 /* If the offset is non-constant already, then we can't assume any
4276 alignment more than the alignment here. */
4277 if (! integer_zerop (offset
))
4278 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4280 exp
= TREE_OPERAND (exp
, 0);
4283 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4284 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
4285 else if (TREE_TYPE (exp
) != 0)
4286 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4288 if (integer_zerop (offset
))
4291 if (offset
!= 0 && contains_placeholder_p (offset
))
4292 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4296 *palignment
= alignment
/ BITS_PER_UNIT
;
4300 /* Given an rtx VALUE that may contain additions and multiplications,
4301 return an equivalent value that just refers to a register or memory.
4302 This is done by generating instructions to perform the arithmetic
4303 and returning a pseudo-register containing the value.
4305 The returned value may be a REG, SUBREG, MEM or constant. */
4308 force_operand (value
, target
)
4311 register optab binoptab
= 0;
4312 /* Use a temporary to force order of execution of calls to
4316 /* Use subtarget as the target for operand 0 of a binary operation. */
4317 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4319 if (GET_CODE (value
) == PLUS
)
4320 binoptab
= add_optab
;
4321 else if (GET_CODE (value
) == MINUS
)
4322 binoptab
= sub_optab
;
4323 else if (GET_CODE (value
) == MULT
)
4325 op2
= XEXP (value
, 1);
4326 if (!CONSTANT_P (op2
)
4327 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4329 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4330 return expand_mult (GET_MODE (value
), tmp
,
4331 force_operand (op2
, NULL_RTX
),
4337 op2
= XEXP (value
, 1);
4338 if (!CONSTANT_P (op2
)
4339 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4341 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4343 binoptab
= add_optab
;
4344 op2
= negate_rtx (GET_MODE (value
), op2
);
4347 /* Check for an addition with OP2 a constant integer and our first
4348 operand a PLUS of a virtual register and something else. In that
4349 case, we want to emit the sum of the virtual register and the
4350 constant first and then add the other value. This allows virtual
4351 register instantiation to simply modify the constant rather than
4352 creating another one around this addition. */
4353 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4354 && GET_CODE (XEXP (value
, 0)) == PLUS
4355 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4356 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4357 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4359 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4360 XEXP (XEXP (value
, 0), 0), op2
,
4361 subtarget
, 0, OPTAB_LIB_WIDEN
);
4362 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4363 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4364 target
, 0, OPTAB_LIB_WIDEN
);
4367 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4368 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4369 force_operand (op2
, NULL_RTX
),
4370 target
, 0, OPTAB_LIB_WIDEN
);
4371 /* We give UNSIGNEDP = 0 to expand_binop
4372 because the only operations we are expanding here are signed ones. */
4377 /* Subroutine of expand_expr:
4378 save the non-copied parts (LIST) of an expr (LHS), and return a list
4379 which can restore these values to their previous values,
4380 should something modify their storage. */
4383 save_noncopied_parts (lhs
, list
)
4390 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4391 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4392 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4395 tree part
= TREE_VALUE (tail
);
4396 tree part_type
= TREE_TYPE (part
);
4397 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4398 rtx target
= assign_temp (part_type
, 0, 1, 1);
4399 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4400 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4401 parts
= tree_cons (to_be_saved
,
4402 build (RTL_EXPR
, part_type
, NULL_TREE
,
4405 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4410 /* Subroutine of expand_expr:
4411 record the non-copied parts (LIST) of an expr (LHS), and return a list
4412 which specifies the initial values of these parts. */
4415 init_noncopied_parts (lhs
, list
)
4422 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4423 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4424 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4427 tree part
= TREE_VALUE (tail
);
4428 tree part_type
= TREE_TYPE (part
);
4429 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4430 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4435 /* Subroutine of expand_expr: return nonzero iff there is no way that
4436 EXP can reference X, which is being modified. */
4439 safe_from_p (x
, exp
)
4447 /* If EXP has varying size, we MUST use a target since we currently
4448 have no way of allocating temporaries of variable size
4449 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4450 So we assume here that something at a higher level has prevented a
4451 clash. This is somewhat bogus, but the best we can do. Only
4452 do this when X is BLKmode. */
4453 || (TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4454 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4455 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
4456 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
4457 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
4459 && GET_MODE (x
) == BLKmode
))
4462 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4463 find the underlying pseudo. */
4464 if (GET_CODE (x
) == SUBREG
)
4467 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4471 /* If X is a location in the outgoing argument area, it is always safe. */
4472 if (GET_CODE (x
) == MEM
4473 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4474 || (GET_CODE (XEXP (x
, 0)) == PLUS
4475 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4478 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4481 exp_rtl
= DECL_RTL (exp
);
4488 if (TREE_CODE (exp
) == TREE_LIST
)
4489 return ((TREE_VALUE (exp
) == 0
4490 || safe_from_p (x
, TREE_VALUE (exp
)))
4491 && (TREE_CHAIN (exp
) == 0
4492 || safe_from_p (x
, TREE_CHAIN (exp
))));
4497 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4501 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
4502 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
4506 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4507 the expression. If it is set, we conflict iff we are that rtx or
4508 both are in memory. Otherwise, we check all operands of the
4509 expression recursively. */
4511 switch (TREE_CODE (exp
))
4514 return (staticp (TREE_OPERAND (exp
, 0))
4515 || safe_from_p (x
, TREE_OPERAND (exp
, 0)));
4518 if (GET_CODE (x
) == MEM
)
4523 exp_rtl
= CALL_EXPR_RTL (exp
);
4526 /* Assume that the call will clobber all hard registers and
4528 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4529 || GET_CODE (x
) == MEM
)
4536 /* If a sequence exists, we would have to scan every instruction
4537 in the sequence to see if it was safe. This is probably not
4539 if (RTL_EXPR_SEQUENCE (exp
))
4542 exp_rtl
= RTL_EXPR_RTL (exp
);
4545 case WITH_CLEANUP_EXPR
:
4546 exp_rtl
= RTL_EXPR_RTL (exp
);
4549 case CLEANUP_POINT_EXPR
:
4550 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4553 exp_rtl
= SAVE_EXPR_RTL (exp
);
4557 /* The only operand we look at is operand 1. The rest aren't
4558 part of the expression. */
4559 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
4561 case METHOD_CALL_EXPR
:
4562 /* This takes a rtx argument, but shouldn't appear here. */
4566 /* If we have an rtx, we do not need to scan our operands. */
4570 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
4571 for (i
= 0; i
< nops
; i
++)
4572 if (TREE_OPERAND (exp
, i
) != 0
4573 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
4577 /* If we have an rtl, find any enclosed object. Then see if we conflict
4581 if (GET_CODE (exp_rtl
) == SUBREG
)
4583 exp_rtl
= SUBREG_REG (exp_rtl
);
4584 if (GET_CODE (exp_rtl
) == REG
4585 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
4589 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4590 are memory and EXP is not readonly. */
4591 return ! (rtx_equal_p (x
, exp_rtl
)
4592 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
4593 && ! TREE_READONLY (exp
)));
4596 /* If we reach here, it is safe. */
4600 /* Subroutine of expand_expr: return nonzero iff EXP is an
4601 expression whose type is statically determinable. */
4607 if (TREE_CODE (exp
) == PARM_DECL
4608 || TREE_CODE (exp
) == VAR_DECL
4609 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
4610 || TREE_CODE (exp
) == COMPONENT_REF
4611 || TREE_CODE (exp
) == ARRAY_REF
)
4616 /* Subroutine of expand_expr: return rtx if EXP is a
4617 variable or parameter; else return 0. */
4624 switch (TREE_CODE (exp
))
4628 return DECL_RTL (exp
);
4634 /* expand_expr: generate code for computing expression EXP.
4635 An rtx for the computed value is returned. The value is never null.
4636 In the case of a void EXP, const0_rtx is returned.
4638 The value may be stored in TARGET if TARGET is nonzero.
4639 TARGET is just a suggestion; callers must assume that
4640 the rtx returned may not be the same as TARGET.
4642 If TARGET is CONST0_RTX, it means that the value will be ignored.
4644 If TMODE is not VOIDmode, it suggests generating the
4645 result in mode TMODE. But this is done only when convenient.
4646 Otherwise, TMODE is ignored and the value generated in its natural mode.
4647 TMODE is just a suggestion; callers must assume that
4648 the rtx returned may not have mode TMODE.
4650 Note that TARGET may have neither TMODE nor MODE. In that case, it
4651 probably will not be used.
4653 If MODIFIER is EXPAND_SUM then when EXP is an addition
4654 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4655 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4656 products as above, or REG or MEM, or constant.
4657 Ordinarily in such cases we would output mul or add instructions
4658 and then return a pseudo reg containing the sum.
4660 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4661 it also marks a label as absolutely required (it can't be dead).
4662 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4663 This is used for outputting expressions used in initializers.
4665 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4666 with a constant address even if that address is not normally legitimate.
4667 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4670 expand_expr (exp
, target
, tmode
, modifier
)
4673 enum machine_mode tmode
;
4674 enum expand_modifier modifier
;
4676 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4677 This is static so it will be accessible to our recursive callees. */
4678 static tree placeholder_list
= 0;
4679 register rtx op0
, op1
, temp
;
4680 tree type
= TREE_TYPE (exp
);
4681 int unsignedp
= TREE_UNSIGNED (type
);
4682 register enum machine_mode mode
= TYPE_MODE (type
);
4683 register enum tree_code code
= TREE_CODE (exp
);
4685 /* Use subtarget as the target for operand 0 of a binary operation. */
4686 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4687 rtx original_target
= target
;
4688 /* Maybe defer this until sure not doing bytecode? */
4689 int ignore
= (target
== const0_rtx
4690 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4691 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4692 || code
== COND_EXPR
)
4693 && TREE_CODE (type
) == VOID_TYPE
));
4697 if (output_bytecode
&& modifier
!= EXPAND_INITIALIZER
)
4699 bc_expand_expr (exp
);
4703 /* Don't use hard regs as subtargets, because the combiner
4704 can only handle pseudo regs. */
4705 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4707 /* Avoid subtargets inside loops,
4708 since they hide some invariant expressions. */
4709 if (preserve_subexpressions_p ())
4712 /* If we are going to ignore this result, we need only do something
4713 if there is a side-effect somewhere in the expression. If there
4714 is, short-circuit the most common cases here. Note that we must
4715 not call expand_expr with anything but const0_rtx in case this
4716 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4720 if (! TREE_SIDE_EFFECTS (exp
))
4723 /* Ensure we reference a volatile object even if value is ignored. */
4724 if (TREE_THIS_VOLATILE (exp
)
4725 && TREE_CODE (exp
) != FUNCTION_DECL
4726 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4728 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
4729 if (GET_CODE (temp
) == MEM
)
4730 temp
= copy_to_reg (temp
);
4734 if (TREE_CODE_CLASS (code
) == '1')
4735 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4736 VOIDmode
, modifier
);
4737 else if (TREE_CODE_CLASS (code
) == '2'
4738 || TREE_CODE_CLASS (code
) == '<')
4740 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
4741 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
4744 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4745 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4746 /* If the second operand has no side effects, just evaluate
4748 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4749 VOIDmode
, modifier
);
4754 /* If will do cse, generate all results into pseudo registers
4755 since 1) that allows cse to find more things
4756 and 2) otherwise cse could produce an insn the machine
4759 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4760 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4767 tree function
= decl_function_context (exp
);
4768 /* Handle using a label in a containing function. */
4769 if (function
!= current_function_decl
&& function
!= 0)
4771 struct function
*p
= find_function_data (function
);
4772 /* Allocate in the memory associated with the function
4773 that the label is in. */
4774 push_obstacks (p
->function_obstack
,
4775 p
->function_maybepermanent_obstack
);
4777 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4778 label_rtx (exp
), p
->forced_labels
);
4781 else if (modifier
== EXPAND_INITIALIZER
)
4782 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4783 label_rtx (exp
), forced_labels
);
4784 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
4785 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
4786 if (function
!= current_function_decl
&& function
!= 0)
4787 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
4792 if (DECL_RTL (exp
) == 0)
4794 error_with_decl (exp
, "prior parameter's size depends on `%s'");
4795 return CONST0_RTX (mode
);
4798 /* ... fall through ... */
4801 /* If a static var's type was incomplete when the decl was written,
4802 but the type is complete now, lay out the decl now. */
4803 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4804 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
4806 push_obstacks_nochange ();
4807 end_temporary_allocation ();
4808 layout_decl (exp
, 0);
4809 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
4813 /* ... fall through ... */
4817 if (DECL_RTL (exp
) == 0)
4820 /* Ensure variable marked as used even if it doesn't go through
4821 a parser. If it hasn't be used yet, write out an external
4823 if (! TREE_USED (exp
))
4825 assemble_external (exp
);
4826 TREE_USED (exp
) = 1;
4829 /* Show we haven't gotten RTL for this yet. */
4832 /* Handle variables inherited from containing functions. */
4833 context
= decl_function_context (exp
);
4835 /* We treat inline_function_decl as an alias for the current function
4836 because that is the inline function whose vars, types, etc.
4837 are being merged into the current function.
4838 See expand_inline_function. */
4840 if (context
!= 0 && context
!= current_function_decl
4841 && context
!= inline_function_decl
4842 /* If var is static, we don't need a static chain to access it. */
4843 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
4844 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
4848 /* Mark as non-local and addressable. */
4849 DECL_NONLOCAL (exp
) = 1;
4850 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
4852 mark_addressable (exp
);
4853 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
4855 addr
= XEXP (DECL_RTL (exp
), 0);
4856 if (GET_CODE (addr
) == MEM
)
4857 addr
= gen_rtx (MEM
, Pmode
,
4858 fix_lexical_addr (XEXP (addr
, 0), exp
));
4860 addr
= fix_lexical_addr (addr
, exp
);
4861 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
4864 /* This is the case of an array whose size is to be determined
4865 from its initializer, while the initializer is still being parsed.
4868 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4869 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
4870 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
4871 XEXP (DECL_RTL (exp
), 0));
4873 /* If DECL_RTL is memory, we are in the normal case and either
4874 the address is not valid or it is not a register and -fforce-addr
4875 is specified, get the address into a register. */
4877 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4878 && modifier
!= EXPAND_CONST_ADDRESS
4879 && modifier
!= EXPAND_SUM
4880 && modifier
!= EXPAND_INITIALIZER
4881 && (! memory_address_p (DECL_MODE (exp
),
4882 XEXP (DECL_RTL (exp
), 0))
4884 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
4885 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
4886 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
4888 /* If we got something, return it. But first, set the alignment
4889 the address is a register. */
4892 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
4893 mark_reg_pointer (XEXP (temp
, 0),
4894 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
4899 /* If the mode of DECL_RTL does not match that of the decl, it
4900 must be a promoted value. We return a SUBREG of the wanted mode,
4901 but mark it so that we know that it was already extended. */
4903 if (GET_CODE (DECL_RTL (exp
)) == REG
4904 && GET_MODE (DECL_RTL (exp
)) != mode
)
4906 /* Get the signedness used for this variable. Ensure we get the
4907 same mode we got when the variable was declared. */
4908 if (GET_MODE (DECL_RTL (exp
))
4909 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
4912 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
4913 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4914 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4918 return DECL_RTL (exp
);
4921 return immed_double_const (TREE_INT_CST_LOW (exp
),
4922 TREE_INT_CST_HIGH (exp
),
4926 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
4929 /* If optimized, generate immediate CONST_DOUBLE
4930 which will be turned into memory by reload if necessary.
4932 We used to force a register so that loop.c could see it. But
4933 this does not allow gen_* patterns to perform optimizations with
4934 the constants. It also produces two insns in cases like "x = 1.0;".
4935 On most machines, floating-point constants are not permitted in
4936 many insns, so we'd end up copying it to a register in any case.
4938 Now, we do the copying in expand_binop, if appropriate. */
4939 return immed_real_const (exp
);
4943 if (! TREE_CST_RTL (exp
))
4944 output_constant_def (exp
);
4946 /* TREE_CST_RTL probably contains a constant address.
4947 On RISC machines where a constant address isn't valid,
4948 make some insns to get that address into a register. */
4949 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
4950 && modifier
!= EXPAND_CONST_ADDRESS
4951 && modifier
!= EXPAND_INITIALIZER
4952 && modifier
!= EXPAND_SUM
4953 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
4955 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
4956 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
4957 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
4958 return TREE_CST_RTL (exp
);
4961 context
= decl_function_context (exp
);
4963 /* We treat inline_function_decl as an alias for the current function
4964 because that is the inline function whose vars, types, etc.
4965 are being merged into the current function.
4966 See expand_inline_function. */
4967 if (context
== current_function_decl
|| context
== inline_function_decl
)
4970 /* If this is non-local, handle it. */
4973 temp
= SAVE_EXPR_RTL (exp
);
4974 if (temp
&& GET_CODE (temp
) == REG
)
4976 put_var_into_stack (exp
);
4977 temp
= SAVE_EXPR_RTL (exp
);
4979 if (temp
== 0 || GET_CODE (temp
) != MEM
)
4981 return change_address (temp
, mode
,
4982 fix_lexical_addr (XEXP (temp
, 0), exp
));
4984 if (SAVE_EXPR_RTL (exp
) == 0)
4986 if (mode
== VOIDmode
)
4989 temp
= assign_temp (type
, 0, 0, 0);
4991 SAVE_EXPR_RTL (exp
) = temp
;
4992 if (!optimize
&& GET_CODE (temp
) == REG
)
4993 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
4996 /* If the mode of TEMP does not match that of the expression, it
4997 must be a promoted value. We pass store_expr a SUBREG of the
4998 wanted mode but mark it so that we know that it was already
4999 extended. Note that `unsignedp' was modified above in
5002 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5004 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5005 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5006 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5009 if (temp
== const0_rtx
)
5010 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
5012 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5015 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5016 must be a promoted value. We return a SUBREG of the wanted mode,
5017 but mark it so that we know that it was already extended. */
5019 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5020 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5022 /* Compute the signedness and make the proper SUBREG. */
5023 promote_mode (type
, mode
, &unsignedp
, 0);
5024 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5025 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5026 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5030 return SAVE_EXPR_RTL (exp
);
5035 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5036 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5040 case PLACEHOLDER_EXPR
:
5041 /* If there is an object on the head of the placeholder list,
5042 see if some object in it's references is of type TYPE. For
5043 further information, see tree.def. */
5044 if (placeholder_list
)
5046 tree need_type
= TYPE_MAIN_VARIANT (type
);
5048 tree old_list
= placeholder_list
;
5051 /* See if the object is the type that we want and does not contain
5052 this PLACEHOLDER_EXPR itself. Then see if the operand of any
5053 reference is the type we want. */
5054 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list
)))
5056 && (! contains_this_placeholder_p
5057 (TREE_PURPOSE (placeholder_list
), exp
)))
5058 object
= TREE_PURPOSE (placeholder_list
);
5061 /* Find the outermost reference that is of the type we want and
5062 that does not itself contain this PLACEHOLDER_EXPR. */
5063 for (elt
= TREE_PURPOSE (placeholder_list
);
5065 && (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5066 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5067 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5068 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e');
5069 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
5070 || TREE_CODE (elt
) == COND_EXPR
)
5071 ? TREE_OPERAND (elt
, 1) : TREE_OPERAND (elt
, 0)))
5072 if (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5073 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt
, 0)))
5075 && ! contains_this_placeholder_p (TREE_OPERAND (elt
, 0),
5078 object
= TREE_OPERAND (elt
, 0);
5084 /* Expand this object skipping the list entries before
5085 it was found in case it is also a PLACEHOLDER_EXPR.
5086 In that case, we want to translate it using subsequent
5088 placeholder_list
= TREE_CHAIN (placeholder_list
);
5089 temp
= expand_expr (object
, original_target
, tmode
, modifier
);
5090 placeholder_list
= old_list
;
5095 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5098 case WITH_RECORD_EXPR
:
5099 /* Put the object on the placeholder list, expand our first operand,
5100 and pop the list. */
5101 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5103 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5105 placeholder_list
= TREE_CHAIN (placeholder_list
);
5109 expand_exit_loop_if_false (NULL_PTR
,
5110 invert_truthvalue (TREE_OPERAND (exp
, 0)));
5115 expand_start_loop (1);
5116 expand_expr_stmt (TREE_OPERAND (exp
, 0));
5124 tree vars
= TREE_OPERAND (exp
, 0);
5125 int vars_need_expansion
= 0;
5127 /* Need to open a binding contour here because
5128 if there are any cleanups they must be contained here. */
5129 expand_start_bindings (0);
5131 /* Mark the corresponding BLOCK for output in its proper place. */
5132 if (TREE_OPERAND (exp
, 2) != 0
5133 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
5134 insert_block (TREE_OPERAND (exp
, 2));
5136 /* If VARS have not yet been expanded, expand them now. */
5139 if (DECL_RTL (vars
) == 0)
5141 vars_need_expansion
= 1;
5144 expand_decl_init (vars
);
5145 vars
= TREE_CHAIN (vars
);
5148 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
5150 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
5156 if (RTL_EXPR_SEQUENCE (exp
))
5158 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
5160 emit_insns (RTL_EXPR_SEQUENCE (exp
));
5161 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
5163 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
5164 free_temps_for_rtl_expr (exp
);
5165 return RTL_EXPR_RTL (exp
);
5168 /* If we don't need the result, just ensure we evaluate any
5173 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5174 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
5178 /* All elts simple constants => refer to a constant in memory. But
5179 if this is a non-BLKmode mode, let it store a field at a time
5180 since that should make a CONST_INT or CONST_DOUBLE when we
5181 fold. Likewise, if we have a target we can use, it is best to
5182 store directly into the target unless the type is large enough
5183 that memcpy will be used. If we are making an initializer and
5184 all operands are constant, put it in memory as well. */
5185 else if ((TREE_STATIC (exp
)
5186 && ((mode
== BLKmode
5187 && ! (target
!= 0 && safe_from_p (target
, exp
)))
5188 || TREE_ADDRESSABLE (exp
)
5189 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5190 && (move_by_pieces_ninsns
5191 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
5192 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
5194 && ! mostly_zeros_p (exp
))))
5195 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
5197 rtx constructor
= output_constant_def (exp
);
5198 if (modifier
!= EXPAND_CONST_ADDRESS
5199 && modifier
!= EXPAND_INITIALIZER
5200 && modifier
!= EXPAND_SUM
5201 && (! memory_address_p (GET_MODE (constructor
),
5202 XEXP (constructor
, 0))
5204 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5205 constructor
= change_address (constructor
, VOIDmode
,
5206 XEXP (constructor
, 0));
5212 /* Handle calls that pass values in multiple non-contiguous
5213 locations. The Irix 6 ABI has examples of this. */
5214 if (target
== 0 || ! safe_from_p (target
, exp
)
5215 || GET_CODE (target
) == PARALLEL
)
5217 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5218 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5220 target
= assign_temp (type
, 0, 1, 1);
5223 if (TREE_READONLY (exp
))
5225 if (GET_CODE (target
) == MEM
)
5226 target
= copy_rtx (target
);
5228 RTX_UNCHANGING_P (target
) = 1;
5231 store_constructor (exp
, target
, 0);
5237 tree exp1
= TREE_OPERAND (exp
, 0);
5240 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5241 op0
= memory_address (mode
, op0
);
5243 temp
= gen_rtx (MEM
, mode
, op0
);
5244 /* If address was computed by addition,
5245 mark this as an element of an aggregate. */
5246 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5247 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
5248 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
5249 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5250 || (TREE_CODE (exp1
) == ADDR_EXPR
5251 && (exp2
= TREE_OPERAND (exp1
, 0))
5252 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
5253 MEM_IN_STRUCT_P (temp
) = 1;
5254 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5256 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5257 here, because, in C and C++, the fact that a location is accessed
5258 through a pointer to const does not mean that the value there can
5259 never change. Languages where it can never change should
5260 also set TREE_STATIC. */
5261 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5266 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5270 tree array
= TREE_OPERAND (exp
, 0);
5271 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5272 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5273 tree index
= TREE_OPERAND (exp
, 1);
5274 tree index_type
= TREE_TYPE (index
);
5277 if (TREE_CODE (low_bound
) != INTEGER_CST
5278 && contains_placeholder_p (low_bound
))
5279 low_bound
= build (WITH_RECORD_EXPR
, sizetype
, low_bound
, exp
);
5281 /* Optimize the special-case of a zero lower bound.
5283 We convert the low_bound to sizetype to avoid some problems
5284 with constant folding. (E.g. suppose the lower bound is 1,
5285 and its mode is QI. Without the conversion, (ARRAY
5286 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5287 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5289 But sizetype isn't quite right either (especially if
5290 the lowbound is negative). FIXME */
5292 if (! integer_zerop (low_bound
))
5293 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5294 convert (sizetype
, low_bound
)));
5296 if ((TREE_CODE (index
) != INTEGER_CST
5297 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5298 && (! SLOW_UNALIGNED_ACCESS
|| ! get_inner_unaligned_p (exp
)))
5300 /* Nonconstant array index or nonconstant element size, and
5301 not an array in an unaligned (packed) structure field.
5302 Generate the tree for *(&array+index) and expand that,
5303 except do it in a language-independent way
5304 and don't complain about non-lvalue arrays.
5305 `mark_addressable' should already have been called
5306 for any array for which this case will be reached. */
5308 /* Don't forget the const or volatile flag from the array
5310 tree variant_type
= build_type_variant (type
,
5311 TREE_READONLY (exp
),
5312 TREE_THIS_VOLATILE (exp
));
5313 tree array_adr
= build1 (ADDR_EXPR
,
5314 build_pointer_type (variant_type
), array
);
5316 tree size
= size_in_bytes (type
);
5318 /* Convert the integer argument to a type the same size as sizetype
5319 so the multiply won't overflow spuriously. */
5320 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
5321 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
5324 if (TREE_CODE (size
) != INTEGER_CST
5325 && contains_placeholder_p (size
))
5326 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, exp
);
5328 /* Don't think the address has side effects
5329 just because the array does.
5330 (In some cases the address might have side effects,
5331 and we fail to record that fact here. However, it should not
5332 matter, since expand_expr should not care.) */
5333 TREE_SIDE_EFFECTS (array_adr
) = 0;
5337 (INDIRECT_REF
, type
,
5338 fold (build (PLUS_EXPR
,
5339 TYPE_POINTER_TO (variant_type
),
5344 TYPE_POINTER_TO (variant_type
),
5345 fold (build (MULT_EXPR
, TREE_TYPE (index
),
5347 convert (TREE_TYPE (index
),
5350 /* Volatility, etc., of new expression is same as old
5352 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
5353 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
5354 TREE_READONLY (elt
) = TREE_READONLY (exp
);
5356 return expand_expr (elt
, target
, tmode
, modifier
);
5359 /* Fold an expression like: "foo"[2].
5360 This is not done in fold so it won't happen inside &.
5361 Don't fold if this is for wide characters since it's too
5362 difficult to do correctly and this is a very rare case. */
5364 if (TREE_CODE (array
) == STRING_CST
5365 && TREE_CODE (index
) == INTEGER_CST
5366 && !TREE_INT_CST_HIGH (index
)
5367 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5368 && GET_MODE_CLASS (mode
) == MODE_INT
5369 && GET_MODE_SIZE (mode
) == 1)
5370 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5372 /* If this is a constant index into a constant array,
5373 just get the value from the array. Handle both the cases when
5374 we have an explicit constructor and when our operand is a variable
5375 that was declared const. */
5377 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5379 if (TREE_CODE (index
) == INTEGER_CST
5380 && TREE_INT_CST_HIGH (index
) == 0)
5382 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5384 i
= TREE_INT_CST_LOW (index
);
5386 elem
= TREE_CHAIN (elem
);
5388 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5393 else if (optimize
>= 1
5394 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5395 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5396 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5398 if (TREE_CODE (index
) == INTEGER_CST
5399 && TREE_INT_CST_HIGH (index
) == 0)
5401 tree init
= DECL_INITIAL (array
);
5403 i
= TREE_INT_CST_LOW (index
);
5404 if (TREE_CODE (init
) == CONSTRUCTOR
)
5406 tree elem
= CONSTRUCTOR_ELTS (init
);
5409 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5410 elem
= TREE_CHAIN (elem
);
5412 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5415 else if (TREE_CODE (init
) == STRING_CST
5416 && i
< TREE_STRING_LENGTH (init
))
5417 return GEN_INT (TREE_STRING_POINTER (init
)[i
]);
5422 /* Treat array-ref with constant index as a component-ref. */
5426 /* If the operand is a CONSTRUCTOR, we can just extract the
5427 appropriate field if it is present. Don't do this if we have
5428 already written the data since we want to refer to that copy
5429 and varasm.c assumes that's what we'll do. */
5430 if (code
!= ARRAY_REF
5431 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5432 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5436 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5437 elt
= TREE_CHAIN (elt
))
5438 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
5439 /* We can normally use the value of the field in the
5440 CONSTRUCTOR. However, if this is a bitfield in
5441 an integral mode that we can fit in a HOST_WIDE_INT,
5442 we must mask only the number of bits in the bitfield,
5443 since this is done implicitly by the constructor. If
5444 the bitfield does not meet either of those conditions,
5445 we can't do this optimization. */
5446 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
5447 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
5449 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
5450 <= HOST_BITS_PER_WIDE_INT
))))
5452 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5453 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
5455 int bitsize
= DECL_FIELD_SIZE (TREE_PURPOSE (elt
));
5456 enum machine_mode imode
5457 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
5459 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
5461 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
5462 op0
= expand_and (op0
, op1
, target
);
5467 = build_int_2 (imode
- bitsize
, 0);
5469 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
5471 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
5481 enum machine_mode mode1
;
5487 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
5488 &mode1
, &unsignedp
, &volatilep
,
5491 /* If we got back the original object, something is wrong. Perhaps
5492 we are evaluating an expression too early. In any event, don't
5493 infinitely recurse. */
5497 /* If TEM's type is a union of variable size, pass TARGET to the inner
5498 computation, since it will need a temporary and TARGET is known
5499 to have to do. This occurs in unchecked conversion in Ada. */
5501 op0
= expand_expr (tem
,
5502 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
5503 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
5505 ? target
: NULL_RTX
),
5507 modifier
== EXPAND_INITIALIZER
? modifier
: 0);
5509 /* If this is a constant, put it into a register if it is a
5510 legitimate constant and memory if it isn't. */
5511 if (CONSTANT_P (op0
))
5513 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
5514 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
5515 op0
= force_reg (mode
, op0
);
5517 op0
= validize_mem (force_const_mem (mode
, op0
));
5522 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5524 if (GET_CODE (op0
) != MEM
)
5526 op0
= change_address (op0
, VOIDmode
,
5527 gen_rtx (PLUS
, ptr_mode
, XEXP (op0
, 0),
5528 force_reg (ptr_mode
, offset_rtx
)));
5531 /* Don't forget about volatility even if this is a bitfield. */
5532 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
5534 op0
= copy_rtx (op0
);
5535 MEM_VOLATILE_P (op0
) = 1;
5538 /* In cases where an aligned union has an unaligned object
5539 as a field, we might be extracting a BLKmode value from
5540 an integer-mode (e.g., SImode) object. Handle this case
5541 by doing the extract into an object as wide as the field
5542 (which we know to be the width of a basic mode), then
5543 storing into memory, and changing the mode to BLKmode.
5544 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5545 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5546 if (mode1
== VOIDmode
5547 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
5548 || (modifier
!= EXPAND_CONST_ADDRESS
5549 && modifier
!= EXPAND_INITIALIZER
5550 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
5551 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5552 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5553 /* If the field isn't aligned enough to fetch as a memref,
5554 fetch it as a bit field. */
5555 || (SLOW_UNALIGNED_ACCESS
5556 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
5557 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
5559 enum machine_mode ext_mode
= mode
;
5561 if (ext_mode
== BLKmode
)
5562 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
5564 if (ext_mode
== BLKmode
)
5566 /* In this case, BITPOS must start at a byte boundary and
5567 TARGET, if specified, must be a MEM. */
5568 if (GET_CODE (op0
) != MEM
5569 || (target
!= 0 && GET_CODE (target
) != MEM
)
5570 || bitpos
% BITS_PER_UNIT
!= 0)
5573 op0
= change_address (op0
, VOIDmode
,
5574 plus_constant (XEXP (op0
, 0),
5575 bitpos
/ BITS_PER_UNIT
));
5577 target
= assign_temp (type
, 0, 1, 1);
5579 emit_block_move (target
, op0
,
5580 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5587 op0
= validize_mem (op0
);
5589 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
5590 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5592 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
5593 unsignedp
, target
, ext_mode
, ext_mode
,
5595 int_size_in_bytes (TREE_TYPE (tem
)));
5597 /* If the result is a record type and BITSIZE is narrower than
5598 the mode of OP0, an integral mode, and this is a big endian
5599 machine, we must put the field into the high-order bits. */
5600 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
5601 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
5602 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
5603 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
5604 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
5608 if (mode
== BLKmode
)
5610 rtx
new = assign_stack_temp (ext_mode
,
5611 bitsize
/ BITS_PER_UNIT
, 0);
5613 emit_move_insn (new, op0
);
5614 op0
= copy_rtx (new);
5615 PUT_MODE (op0
, BLKmode
);
5616 MEM_IN_STRUCT_P (op0
) = 1;
5622 /* If the result is BLKmode, use that to access the object
5624 if (mode
== BLKmode
)
5627 /* Get a reference to just this component. */
5628 if (modifier
== EXPAND_CONST_ADDRESS
5629 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5630 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
5631 (bitpos
/ BITS_PER_UNIT
)));
5633 op0
= change_address (op0
, mode1
,
5634 plus_constant (XEXP (op0
, 0),
5635 (bitpos
/ BITS_PER_UNIT
)));
5636 if (GET_CODE (XEXP (op0
, 0)) == REG
)
5637 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5639 MEM_IN_STRUCT_P (op0
) = 1;
5640 MEM_VOLATILE_P (op0
) |= volatilep
;
5641 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
5642 || modifier
== EXPAND_CONST_ADDRESS
5643 || modifier
== EXPAND_INITIALIZER
)
5645 else if (target
== 0)
5646 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5648 convert_move (target
, op0
, unsignedp
);
5652 /* Intended for a reference to a buffer of a file-object in Pascal.
5653 But it's not certain that a special tree code will really be
5654 necessary for these. INDIRECT_REF might work for them. */
5660 /* Pascal set IN expression.
5663 rlo = set_low - (set_low%bits_per_word);
5664 the_word = set [ (index - rlo)/bits_per_word ];
5665 bit_index = index % bits_per_word;
5666 bitmask = 1 << bit_index;
5667 return !!(the_word & bitmask); */
5669 tree set
= TREE_OPERAND (exp
, 0);
5670 tree index
= TREE_OPERAND (exp
, 1);
5671 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
5672 tree set_type
= TREE_TYPE (set
);
5673 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
5674 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
5675 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
5676 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
5677 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
5678 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
5679 rtx setaddr
= XEXP (setval
, 0);
5680 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
5682 rtx diff
, quo
, rem
, addr
, bit
, result
;
5684 preexpand_calls (exp
);
5686 /* If domain is empty, answer is no. Likewise if index is constant
5687 and out of bounds. */
5688 if ((TREE_CODE (set_high_bound
) == INTEGER_CST
5689 && TREE_CODE (set_low_bound
) == INTEGER_CST
5690 && tree_int_cst_lt (set_high_bound
, set_low_bound
)
5691 || (TREE_CODE (index
) == INTEGER_CST
5692 && TREE_CODE (set_low_bound
) == INTEGER_CST
5693 && tree_int_cst_lt (index
, set_low_bound
))
5694 || (TREE_CODE (set_high_bound
) == INTEGER_CST
5695 && TREE_CODE (index
) == INTEGER_CST
5696 && tree_int_cst_lt (set_high_bound
, index
))))
5700 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5702 /* If we get here, we have to generate the code for both cases
5703 (in range and out of range). */
5705 op0
= gen_label_rtx ();
5706 op1
= gen_label_rtx ();
5708 if (! (GET_CODE (index_val
) == CONST_INT
5709 && GET_CODE (lo_r
) == CONST_INT
))
5711 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
5712 GET_MODE (index_val
), iunsignedp
, 0);
5713 emit_jump_insn (gen_blt (op1
));
5716 if (! (GET_CODE (index_val
) == CONST_INT
5717 && GET_CODE (hi_r
) == CONST_INT
))
5719 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5720 GET_MODE (index_val
), iunsignedp
, 0);
5721 emit_jump_insn (gen_bgt (op1
));
5724 /* Calculate the element number of bit zero in the first word
5726 if (GET_CODE (lo_r
) == CONST_INT
)
5727 rlow
= GEN_INT (INTVAL (lo_r
)
5728 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5730 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5731 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5732 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5734 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5735 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5737 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5738 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5739 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5740 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5742 addr
= memory_address (byte_mode
,
5743 expand_binop (index_mode
, add_optab
, diff
,
5744 setaddr
, NULL_RTX
, iunsignedp
,
5747 /* Extract the bit we want to examine */
5748 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5749 gen_rtx (MEM
, byte_mode
, addr
),
5750 make_tree (TREE_TYPE (index
), rem
),
5752 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
5753 GET_MODE (target
) == byte_mode
? target
: 0,
5754 1, OPTAB_LIB_WIDEN
);
5756 if (result
!= target
)
5757 convert_move (target
, result
, 1);
5759 /* Output the code to handle the out-of-range case. */
5762 emit_move_insn (target
, const0_rtx
);
5767 case WITH_CLEANUP_EXPR
:
5768 if (RTL_EXPR_RTL (exp
) == 0)
5771 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5772 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
5774 /* That's it for this cleanup. */
5775 TREE_OPERAND (exp
, 2) = 0;
5777 return RTL_EXPR_RTL (exp
);
5779 case CLEANUP_POINT_EXPR
:
5781 extern int temp_slot_level
;
5782 /* Start a new binding layer that will keep track of all cleanup
5783 actions to be performed. */
5784 expand_start_bindings (0);
5786 target_temp_slot_level
= temp_slot_level
;
5788 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5789 /* If we're going to use this value, load it up now. */
5791 op0
= force_not_mem (op0
);
5792 preserve_temp_slots (op0
);
5793 expand_end_bindings (NULL_TREE
, 0, 0);
5798 /* Check for a built-in function. */
5799 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5800 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5802 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5803 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
5805 /* If this call was expanded already by preexpand_calls,
5806 just return the result we got. */
5807 if (CALL_EXPR_RTL (exp
) != 0)
5808 return CALL_EXPR_RTL (exp
);
5810 return expand_call (exp
, target
, ignore
);
5812 case NON_LVALUE_EXPR
:
5815 case REFERENCE_EXPR
:
5816 if (TREE_CODE (type
) == UNION_TYPE
)
5818 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5821 if (mode
!= BLKmode
)
5822 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5824 target
= assign_temp (type
, 0, 1, 1);
5827 if (GET_CODE (target
) == MEM
)
5828 /* Store data into beginning of memory target. */
5829 store_expr (TREE_OPERAND (exp
, 0),
5830 change_address (target
, TYPE_MODE (valtype
), 0), 0);
5832 else if (GET_CODE (target
) == REG
)
5833 /* Store this field into a union of the proper type. */
5834 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
5835 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
5837 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5841 /* Return the entire union. */
5845 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5847 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
5850 /* If the signedness of the conversion differs and OP0 is
5851 a promoted SUBREG, clear that indication since we now
5852 have to do the proper extension. */
5853 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
5854 && GET_CODE (op0
) == SUBREG
)
5855 SUBREG_PROMOTED_VAR_P (op0
) = 0;
5860 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
5861 if (GET_MODE (op0
) == mode
)
5864 /* If OP0 is a constant, just convert it into the proper mode. */
5865 if (CONSTANT_P (op0
))
5867 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5868 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5870 if (modifier
== EXPAND_INITIALIZER
)
5871 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
5875 convert_to_mode (mode
, op0
,
5876 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5878 convert_move (target
, op0
,
5879 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5883 /* We come here from MINUS_EXPR when the second operand is a
5886 this_optab
= add_optab
;
5888 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5889 something else, make sure we add the register to the constant and
5890 then to the other thing. This case can occur during strength
5891 reduction and doing it this way will produce better code if the
5892 frame pointer or argument pointer is eliminated.
5894 fold-const.c will ensure that the constant is always in the inner
5895 PLUS_EXPR, so the only case we need to do anything about is if
5896 sp, ap, or fp is our second argument, in which case we must swap
5897 the innermost first argument and our second argument. */
5899 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5900 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
5901 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
5902 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
5903 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
5904 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
5906 tree t
= TREE_OPERAND (exp
, 1);
5908 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5909 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
5912 /* If the result is to be ptr_mode and we are adding an integer to
5913 something, we might be forming a constant. So try to use
5914 plus_constant. If it produces a sum and we can't accept it,
5915 use force_operand. This allows P = &ARR[const] to generate
5916 efficient code on machines where a SYMBOL_REF is not a valid
5919 If this is an EXPAND_SUM call, always return the sum. */
5920 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
5921 || mode
== ptr_mode
)
5923 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
5924 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5925 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
5927 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
5929 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
5930 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5931 op1
= force_operand (op1
, target
);
5935 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5936 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
5937 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
5939 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
5941 if (! CONSTANT_P (op0
))
5943 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5944 VOIDmode
, modifier
);
5945 /* Don't go to both_summands if modifier
5946 says it's not right to return a PLUS. */
5947 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5951 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
5952 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5953 op0
= force_operand (op0
, target
);
5958 /* No sense saving up arithmetic to be done
5959 if it's all in the wrong mode to form part of an address.
5960 And force_operand won't know whether to sign-extend or
5962 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5963 || mode
!= ptr_mode
)
5966 preexpand_calls (exp
);
5967 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5970 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
5971 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
5974 /* Make sure any term that's a sum with a constant comes last. */
5975 if (GET_CODE (op0
) == PLUS
5976 && CONSTANT_P (XEXP (op0
, 1)))
5982 /* If adding to a sum including a constant,
5983 associate it to put the constant outside. */
5984 if (GET_CODE (op1
) == PLUS
5985 && CONSTANT_P (XEXP (op1
, 1)))
5987 rtx constant_term
= const0_rtx
;
5989 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
5992 /* Ensure that MULT comes first if there is one. */
5993 else if (GET_CODE (op0
) == MULT
)
5994 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
5996 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
5998 /* Let's also eliminate constants from op0 if possible. */
5999 op0
= eliminate_constant_term (op0
, &constant_term
);
6001 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6002 their sum should be a constant. Form it into OP1, since the
6003 result we want will then be OP0 + OP1. */
6005 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
6010 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
6013 /* Put a constant term last and put a multiplication first. */
6014 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
6015 temp
= op1
, op1
= op0
, op0
= temp
;
6017 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
6018 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
6021 /* For initializers, we are allowed to return a MINUS of two
6022 symbolic constants. Here we handle all cases when both operands
6024 /* Handle difference of two symbolic constants,
6025 for the sake of an initializer. */
6026 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6027 && really_constant_p (TREE_OPERAND (exp
, 0))
6028 && really_constant_p (TREE_OPERAND (exp
, 1)))
6030 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
6031 VOIDmode
, modifier
);
6032 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6033 VOIDmode
, modifier
);
6035 /* If the last operand is a CONST_INT, use plus_constant of
6036 the negated constant. Else make the MINUS. */
6037 if (GET_CODE (op1
) == CONST_INT
)
6038 return plus_constant (op0
, - INTVAL (op1
));
6040 return gen_rtx (MINUS
, mode
, op0
, op1
);
6042 /* Convert A - const to A + (-const). */
6043 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6045 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6046 TREE_OPERAND (exp
, 1)));
6048 /* Deal with the case where we can't negate the constant
6050 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6052 tree newtype
= signed_type (type
);
6053 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6054 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
6055 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
6057 if (! TREE_OVERFLOW (newneg
))
6058 return expand_expr (convert (type
,
6059 build (PLUS_EXPR
, newtype
,
6061 target
, tmode
, modifier
);
6065 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
6069 this_optab
= sub_optab
;
6073 preexpand_calls (exp
);
6074 /* If first operand is constant, swap them.
6075 Thus the following special case checks need only
6076 check the second operand. */
6077 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6079 register tree t1
= TREE_OPERAND (exp
, 0);
6080 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
6081 TREE_OPERAND (exp
, 1) = t1
;
6084 /* Attempt to return something suitable for generating an
6085 indexed address, for machines that support that. */
6087 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
6088 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6089 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6091 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
6093 /* Apply distributive law if OP0 is x+c. */
6094 if (GET_CODE (op0
) == PLUS
6095 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
6096 return gen_rtx (PLUS
, mode
,
6097 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
6098 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
6099 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
6100 * INTVAL (XEXP (op0
, 1))));
6102 if (GET_CODE (op0
) != REG
)
6103 op0
= force_operand (op0
, NULL_RTX
);
6104 if (GET_CODE (op0
) != REG
)
6105 op0
= copy_to_mode_reg (mode
, op0
);
6107 return gen_rtx (MULT
, mode
, op0
,
6108 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
6111 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6114 /* Check for multiplying things that have been extended
6115 from a narrower type. If this machine supports multiplying
6116 in that narrower type with a result in the desired type,
6117 do it that way, and avoid the explicit type-conversion. */
6118 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
6119 && TREE_CODE (type
) == INTEGER_TYPE
6120 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6121 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6122 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6123 && int_fits_type_p (TREE_OPERAND (exp
, 1),
6124 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6125 /* Don't use a widening multiply if a shift will do. */
6126 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
6127 > HOST_BITS_PER_WIDE_INT
)
6128 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
6130 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6131 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6133 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
6134 /* If both operands are extended, they must either both
6135 be zero-extended or both be sign-extended. */
6136 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6138 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
6140 enum machine_mode innermode
6141 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
6142 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6143 ? smul_widen_optab
: umul_widen_optab
);
6144 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6145 ? umul_widen_optab
: smul_widen_optab
);
6146 if (mode
== GET_MODE_WIDER_MODE (innermode
))
6148 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
6150 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6151 NULL_RTX
, VOIDmode
, 0);
6152 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6153 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6156 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6157 NULL_RTX
, VOIDmode
, 0);
6160 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
6161 && innermode
== word_mode
)
6164 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6165 NULL_RTX
, VOIDmode
, 0);
6166 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6167 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6170 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6171 NULL_RTX
, VOIDmode
, 0);
6172 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
6173 unsignedp
, OPTAB_LIB_WIDEN
);
6174 htem
= expand_mult_highpart_adjust (innermode
,
6175 gen_highpart (innermode
, temp
),
6177 gen_highpart (innermode
, temp
),
6179 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
6184 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6185 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6186 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
6188 case TRUNC_DIV_EXPR
:
6189 case FLOOR_DIV_EXPR
:
6191 case ROUND_DIV_EXPR
:
6192 case EXACT_DIV_EXPR
:
6193 preexpand_calls (exp
);
6194 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6196 /* Possible optimization: compute the dividend with EXPAND_SUM
6197 then if the divisor is constant can optimize the case
6198 where some terms of the dividend have coeffs divisible by it. */
6199 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6200 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6201 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
6204 this_optab
= flodiv_optab
;
6207 case TRUNC_MOD_EXPR
:
6208 case FLOOR_MOD_EXPR
:
6210 case ROUND_MOD_EXPR
:
6211 preexpand_calls (exp
);
6212 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6214 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6215 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6216 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
6218 case FIX_ROUND_EXPR
:
6219 case FIX_FLOOR_EXPR
:
6221 abort (); /* Not used for C. */
6223 case FIX_TRUNC_EXPR
:
6224 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6226 target
= gen_reg_rtx (mode
);
6227 expand_fix (target
, op0
, unsignedp
);
6231 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6233 target
= gen_reg_rtx (mode
);
6234 /* expand_float can't figure out what to do if FROM has VOIDmode.
6235 So give it the correct mode. With -O, cse will optimize this. */
6236 if (GET_MODE (op0
) == VOIDmode
)
6237 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6239 expand_float (target
, op0
,
6240 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6244 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6245 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6251 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6253 /* Handle complex values specially. */
6254 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6255 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6256 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6258 /* Unsigned abs is simply the operand. Testing here means we don't
6259 risk generating incorrect code below. */
6260 if (TREE_UNSIGNED (type
))
6263 return expand_abs (mode
, op0
, target
, unsignedp
,
6264 safe_from_p (target
, TREE_OPERAND (exp
, 0)));
6268 target
= original_target
;
6269 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
6270 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6271 || GET_MODE (target
) != mode
6272 || (GET_CODE (target
) == REG
6273 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6274 target
= gen_reg_rtx (mode
);
6275 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6276 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6278 /* First try to do it with a special MIN or MAX instruction.
6279 If that does not win, use a conditional jump to select the proper
6281 this_optab
= (TREE_UNSIGNED (type
)
6282 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6283 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6285 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6290 /* At this point, a MEM target is no longer useful; we will get better
6293 if (GET_CODE (target
) == MEM
)
6294 target
= gen_reg_rtx (mode
);
6297 emit_move_insn (target
, op0
);
6299 op0
= gen_label_rtx ();
6301 /* If this mode is an integer too wide to compare properly,
6302 compare word by word. Rely on cse to optimize constant cases. */
6303 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6305 if (code
== MAX_EXPR
)
6306 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6307 target
, op1
, NULL_RTX
, op0
);
6309 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6310 op1
, target
, NULL_RTX
, op0
);
6311 emit_move_insn (target
, op1
);
6315 if (code
== MAX_EXPR
)
6316 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6317 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6318 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6320 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6321 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6322 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6323 if (temp
== const0_rtx
)
6324 emit_move_insn (target
, op1
);
6325 else if (temp
!= const_true_rtx
)
6327 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6328 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6331 emit_move_insn (target
, op1
);
6338 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6339 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6345 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6346 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6351 /* ??? Can optimize bitwise operations with one arg constant.
6352 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6353 and (a bitwise1 b) bitwise2 b (etc)
6354 but that is probably not worth while. */
6356 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6357 boolean values when we want in all cases to compute both of them. In
6358 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6359 as actual zero-or-1 values and then bitwise anding. In cases where
6360 there cannot be any side effects, better code would be made by
6361 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6362 how to recognize those cases. */
6364 case TRUTH_AND_EXPR
:
6366 this_optab
= and_optab
;
6371 this_optab
= ior_optab
;
6374 case TRUTH_XOR_EXPR
:
6376 this_optab
= xor_optab
;
6383 preexpand_calls (exp
);
6384 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6386 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6387 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6390 /* Could determine the answer when only additive constants differ. Also,
6391 the addition of one can be handled by changing the condition. */
6398 preexpand_calls (exp
);
6399 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
6403 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6404 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
6406 && GET_CODE (original_target
) == REG
6407 && (GET_MODE (original_target
)
6408 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6410 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6413 if (temp
!= original_target
)
6414 temp
= copy_to_reg (temp
);
6416 op1
= gen_label_rtx ();
6417 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
6418 GET_MODE (temp
), unsignedp
, 0);
6419 emit_jump_insn (gen_beq (op1
));
6420 emit_move_insn (temp
, const1_rtx
);
6425 /* If no set-flag instruction, must generate a conditional
6426 store into a temporary variable. Drop through
6427 and handle this like && and ||. */
6429 case TRUTH_ANDIF_EXPR
:
6430 case TRUTH_ORIF_EXPR
:
6432 && (target
== 0 || ! safe_from_p (target
, exp
)
6433 /* Make sure we don't have a hard reg (such as function's return
6434 value) live across basic blocks, if not optimizing. */
6435 || (!optimize
&& GET_CODE (target
) == REG
6436 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
6437 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6440 emit_clr_insn (target
);
6442 op1
= gen_label_rtx ();
6443 jumpifnot (exp
, op1
);
6446 emit_0_to_1_insn (target
);
6449 return ignore
? const0_rtx
: target
;
6451 case TRUTH_NOT_EXPR
:
6452 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6453 /* The parser is careful to generate TRUTH_NOT_EXPR
6454 only with operands that are always zero or one. */
6455 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
6456 target
, 1, OPTAB_LIB_WIDEN
);
6462 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6464 return expand_expr (TREE_OPERAND (exp
, 1),
6465 (ignore
? const0_rtx
: target
),
6469 /* If we would have a "singleton" (see below) were it not for a
6470 conversion in each arm, bring that conversion back out. */
6471 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6472 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
6473 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
6474 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
6476 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
6477 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
6479 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6480 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6481 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6482 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6483 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6484 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6485 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6486 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6487 return expand_expr (build1 (NOP_EXPR
, type
,
6488 build (COND_EXPR
, TREE_TYPE (true),
6489 TREE_OPERAND (exp
, 0),
6491 target
, tmode
, modifier
);
6495 /* Note that COND_EXPRs whose type is a structure or union
6496 are required to be constructed to contain assignments of
6497 a temporary variable, so that we can evaluate them here
6498 for side effect only. If type is void, we must do likewise. */
6500 /* If an arm of the branch requires a cleanup,
6501 only that cleanup is performed. */
6504 tree binary_op
= 0, unary_op
= 0;
6506 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6507 convert it to our mode, if necessary. */
6508 if (integer_onep (TREE_OPERAND (exp
, 1))
6509 && integer_zerop (TREE_OPERAND (exp
, 2))
6510 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6514 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6519 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
6520 if (GET_MODE (op0
) == mode
)
6524 target
= gen_reg_rtx (mode
);
6525 convert_move (target
, op0
, unsignedp
);
6529 /* Check for X ? A + B : A. If we have this, we can copy A to the
6530 output and conditionally add B. Similarly for unary operations.
6531 Don't do this if X has side-effects because those side effects
6532 might affect A or B and the "?" operation is a sequence point in
6533 ANSI. (operand_equal_p tests for side effects.) */
6535 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
6536 && operand_equal_p (TREE_OPERAND (exp
, 2),
6537 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6538 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
6539 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
6540 && operand_equal_p (TREE_OPERAND (exp
, 1),
6541 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6542 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
6543 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
6544 && operand_equal_p (TREE_OPERAND (exp
, 2),
6545 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6546 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
6547 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
6548 && operand_equal_p (TREE_OPERAND (exp
, 1),
6549 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6550 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
6552 /* If we are not to produce a result, we have no target. Otherwise,
6553 if a target was specified use it; it will not be used as an
6554 intermediate target unless it is safe. If no target, use a
6559 else if (original_target
6560 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0))
6561 || (singleton
&& GET_CODE (original_target
) == REG
6562 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
6563 && original_target
== var_rtx (singleton
)))
6564 && GET_MODE (original_target
) == mode
6565 && ! (GET_CODE (original_target
) == MEM
6566 && MEM_VOLATILE_P (original_target
)))
6567 temp
= original_target
;
6568 else if (TREE_ADDRESSABLE (type
))
6571 temp
= assign_temp (type
, 0, 0, 1);
6573 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6574 do the test of X as a store-flag operation, do this as
6575 A + ((X != 0) << log C). Similarly for other simple binary
6576 operators. Only do for C == 1 if BRANCH_COST is low. */
6577 if (temp
&& singleton
&& binary_op
6578 && (TREE_CODE (binary_op
) == PLUS_EXPR
6579 || TREE_CODE (binary_op
) == MINUS_EXPR
6580 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
6581 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
6582 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
6583 : integer_onep (TREE_OPERAND (binary_op
, 1)))
6584 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6587 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
6588 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
6589 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
6592 /* If we had X ? A : A + 1, do this as A + (X == 0).
6594 We have to invert the truth value here and then put it
6595 back later if do_store_flag fails. We cannot simply copy
6596 TREE_OPERAND (exp, 0) to another variable and modify that
6597 because invert_truthvalue can modify the tree pointed to
6599 if (singleton
== TREE_OPERAND (exp
, 1))
6600 TREE_OPERAND (exp
, 0)
6601 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6603 result
= do_store_flag (TREE_OPERAND (exp
, 0),
6604 (safe_from_p (temp
, singleton
)
6606 mode
, BRANCH_COST
<= 1);
6608 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
6609 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
6610 build_int_2 (tree_log2
6614 (safe_from_p (temp
, singleton
)
6615 ? temp
: NULL_RTX
), 0);
6619 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
6620 return expand_binop (mode
, boptab
, op1
, result
, temp
,
6621 unsignedp
, OPTAB_LIB_WIDEN
);
6623 else if (singleton
== TREE_OPERAND (exp
, 1))
6624 TREE_OPERAND (exp
, 0)
6625 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6628 do_pending_stack_adjust ();
6630 op0
= gen_label_rtx ();
6632 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
6636 /* If the target conflicts with the other operand of the
6637 binary op, we can't use it. Also, we can't use the target
6638 if it is a hard register, because evaluating the condition
6639 might clobber it. */
6641 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
6642 || (GET_CODE (temp
) == REG
6643 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
6644 temp
= gen_reg_rtx (mode
);
6645 store_expr (singleton
, temp
, 0);
6648 expand_expr (singleton
,
6649 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6650 if (singleton
== TREE_OPERAND (exp
, 1))
6651 jumpif (TREE_OPERAND (exp
, 0), op0
);
6653 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6655 start_cleanup_deferal ();
6656 if (binary_op
&& temp
== 0)
6657 /* Just touch the other operand. */
6658 expand_expr (TREE_OPERAND (binary_op
, 1),
6659 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6661 store_expr (build (TREE_CODE (binary_op
), type
,
6662 make_tree (type
, temp
),
6663 TREE_OPERAND (binary_op
, 1)),
6666 store_expr (build1 (TREE_CODE (unary_op
), type
,
6667 make_tree (type
, temp
)),
6671 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6672 comparison operator. If we have one of these cases, set the
6673 output to A, branch on A (cse will merge these two references),
6674 then set the output to FOO. */
6676 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6677 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6678 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6679 TREE_OPERAND (exp
, 1), 0)
6680 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6681 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
6683 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6684 temp
= gen_reg_rtx (mode
);
6685 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6686 jumpif (TREE_OPERAND (exp
, 0), op0
);
6688 start_cleanup_deferal ();
6689 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6693 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6694 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6695 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6696 TREE_OPERAND (exp
, 2), 0)
6697 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6698 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
6700 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6701 temp
= gen_reg_rtx (mode
);
6702 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6703 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6705 start_cleanup_deferal ();
6706 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6711 op1
= gen_label_rtx ();
6712 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6714 start_cleanup_deferal ();
6716 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6718 expand_expr (TREE_OPERAND (exp
, 1),
6719 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6720 end_cleanup_deferal ();
6722 emit_jump_insn (gen_jump (op1
));
6725 start_cleanup_deferal ();
6727 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6729 expand_expr (TREE_OPERAND (exp
, 2),
6730 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6733 end_cleanup_deferal ();
6744 /* Something needs to be initialized, but we didn't know
6745 where that thing was when building the tree. For example,
6746 it could be the return value of a function, or a parameter
6747 to a function which lays down in the stack, or a temporary
6748 variable which must be passed by reference.
6750 We guarantee that the expression will either be constructed
6751 or copied into our original target. */
6753 tree slot
= TREE_OPERAND (exp
, 0);
6754 tree cleanups
= NULL_TREE
;
6758 if (TREE_CODE (slot
) != VAR_DECL
)
6762 target
= original_target
;
6766 if (DECL_RTL (slot
) != 0)
6768 target
= DECL_RTL (slot
);
6769 /* If we have already expanded the slot, so don't do
6771 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6776 target
= assign_temp (type
, 2, 1, 1);
6777 /* All temp slots at this level must not conflict. */
6778 preserve_temp_slots (target
);
6779 DECL_RTL (slot
) = target
;
6781 /* Since SLOT is not known to the called function
6782 to belong to its stack frame, we must build an explicit
6783 cleanup. This case occurs when we must build up a reference
6784 to pass the reference as an argument. In this case,
6785 it is very likely that such a reference need not be
6788 if (TREE_OPERAND (exp
, 2) == 0)
6789 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
6790 cleanups
= TREE_OPERAND (exp
, 2);
6795 /* This case does occur, when expanding a parameter which
6796 needs to be constructed on the stack. The target
6797 is the actual stack address that we want to initialize.
6798 The function we call will perform the cleanup in this case. */
6800 /* If we have already assigned it space, use that space,
6801 not target that we were passed in, as our target
6802 parameter is only a hint. */
6803 if (DECL_RTL (slot
) != 0)
6805 target
= DECL_RTL (slot
);
6806 /* If we have already expanded the slot, so don't do
6808 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6812 DECL_RTL (slot
) = target
;
6815 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
6816 /* Mark it as expanded. */
6817 TREE_OPERAND (exp
, 1) = NULL_TREE
;
6819 store_expr (exp1
, target
, 0);
6821 expand_decl_cleanup (NULL_TREE
, cleanups
);
6828 tree lhs
= TREE_OPERAND (exp
, 0);
6829 tree rhs
= TREE_OPERAND (exp
, 1);
6830 tree noncopied_parts
= 0;
6831 tree lhs_type
= TREE_TYPE (lhs
);
6833 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6834 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
6835 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
6836 TYPE_NONCOPIED_PARTS (lhs_type
));
6837 while (noncopied_parts
!= 0)
6839 expand_assignment (TREE_VALUE (noncopied_parts
),
6840 TREE_PURPOSE (noncopied_parts
), 0, 0);
6841 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6848 /* If lhs is complex, expand calls in rhs before computing it.
6849 That's so we don't compute a pointer and save it over a call.
6850 If lhs is simple, compute it first so we can give it as a
6851 target if the rhs is just a call. This avoids an extra temp and copy
6852 and that prevents a partial-subsumption which makes bad code.
6853 Actually we could treat component_ref's of vars like vars. */
6855 tree lhs
= TREE_OPERAND (exp
, 0);
6856 tree rhs
= TREE_OPERAND (exp
, 1);
6857 tree noncopied_parts
= 0;
6858 tree lhs_type
= TREE_TYPE (lhs
);
6862 if (TREE_CODE (lhs
) != VAR_DECL
6863 && TREE_CODE (lhs
) != RESULT_DECL
6864 && TREE_CODE (lhs
) != PARM_DECL
6865 && ! (TREE_CODE (lhs
) == INDIRECT_REF
6866 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
6867 preexpand_calls (exp
);
6869 /* Check for |= or &= of a bitfield of size one into another bitfield
6870 of size 1. In this case, (unless we need the result of the
6871 assignment) we can do this more efficiently with a
6872 test followed by an assignment, if necessary.
6874 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6875 things change so we do, this code should be enhanced to
6878 && TREE_CODE (lhs
) == COMPONENT_REF
6879 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
6880 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
6881 && TREE_OPERAND (rhs
, 0) == lhs
6882 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
6883 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
6884 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
6886 rtx label
= gen_label_rtx ();
6888 do_jump (TREE_OPERAND (rhs
, 1),
6889 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
6890 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
6891 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
6892 (TREE_CODE (rhs
) == BIT_IOR_EXPR
6894 : integer_zero_node
)),
6896 do_pending_stack_adjust ();
6901 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
6902 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
6903 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
6904 TYPE_NONCOPIED_PARTS (lhs_type
));
6906 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6907 while (noncopied_parts
!= 0)
6909 expand_assignment (TREE_PURPOSE (noncopied_parts
),
6910 TREE_VALUE (noncopied_parts
), 0, 0);
6911 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6916 case PREINCREMENT_EXPR
:
6917 case PREDECREMENT_EXPR
:
6918 return expand_increment (exp
, 0, ignore
);
6920 case POSTINCREMENT_EXPR
:
6921 case POSTDECREMENT_EXPR
:
6922 /* Faster to treat as pre-increment if result is not used. */
6923 return expand_increment (exp
, ! ignore
, ignore
);
6926 /* If nonzero, TEMP will be set to the address of something that might
6927 be a MEM corresponding to a stack slot. */
6930 /* Are we taking the address of a nested function? */
6931 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
6932 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
6933 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0)))
6935 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
6936 op0
= force_operand (op0
, target
);
6938 /* If we are taking the address of something erroneous, just
6940 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
6944 /* We make sure to pass const0_rtx down if we came in with
6945 ignore set, to avoid doing the cleanups twice for something. */
6946 op0
= expand_expr (TREE_OPERAND (exp
, 0),
6947 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
6948 (modifier
== EXPAND_INITIALIZER
6949 ? modifier
: EXPAND_CONST_ADDRESS
));
6951 /* If we are going to ignore the result, OP0 will have been set
6952 to const0_rtx, so just return it. Don't get confused and
6953 think we are taking the address of the constant. */
6957 op0
= protect_from_queue (op0
, 0);
6959 /* We would like the object in memory. If it is a constant,
6960 we can have it be statically allocated into memory. For
6961 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6962 memory and store the value into it. */
6964 if (CONSTANT_P (op0
))
6965 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6967 else if (GET_CODE (op0
) == MEM
)
6969 mark_temp_addr_taken (op0
);
6970 temp
= XEXP (op0
, 0);
6973 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6974 || GET_CODE (op0
) == CONCAT
)
6976 /* If this object is in a register, it must be not
6978 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6979 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
6981 mark_temp_addr_taken (memloc
);
6982 emit_move_insn (memloc
, op0
);
6986 if (GET_CODE (op0
) != MEM
)
6989 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6991 temp
= XEXP (op0
, 0);
6992 #ifdef POINTERS_EXTEND_UNSIGNED
6993 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
6994 && mode
== ptr_mode
)
6995 temp
= convert_memory_address (ptr_mode
, temp
);
7000 op0
= force_operand (XEXP (op0
, 0), target
);
7003 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
7004 op0
= force_reg (Pmode
, op0
);
7006 if (GET_CODE (op0
) == REG
7007 && ! REG_USERVAR_P (op0
))
7008 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
7010 /* If we might have had a temp slot, add an equivalent address
7013 update_temp_slot_address (temp
, op0
);
7015 #ifdef POINTERS_EXTEND_UNSIGNED
7016 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7017 && mode
== ptr_mode
)
7018 op0
= convert_memory_address (ptr_mode
, op0
);
7023 case ENTRY_VALUE_EXPR
:
7026 /* COMPLEX type for Extended Pascal & Fortran */
7029 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7032 /* Get the rtx code of the operands. */
7033 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7034 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
7037 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7041 /* Move the real (op0) and imaginary (op1) parts to their location. */
7042 emit_move_insn (gen_realpart (mode
, target
), op0
);
7043 emit_move_insn (gen_imagpart (mode
, target
), op1
);
7045 insns
= get_insns ();
7048 /* Complex construction should appear as a single unit. */
7049 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7050 each with a separate pseudo as destination.
7051 It's not correct for flow to treat them as a unit. */
7052 if (GET_CODE (target
) != CONCAT
)
7053 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
7061 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7062 return gen_realpart (mode
, op0
);
7065 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7066 return gen_imagpart (mode
, op0
);
7070 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7074 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7077 target
= gen_reg_rtx (mode
);
7081 /* Store the realpart and the negated imagpart to target. */
7082 emit_move_insn (gen_realpart (partmode
, target
),
7083 gen_realpart (partmode
, op0
));
7085 imag_t
= gen_imagpart (partmode
, target
);
7086 temp
= expand_unop (partmode
, neg_optab
,
7087 gen_imagpart (partmode
, op0
), imag_t
, 0);
7089 emit_move_insn (imag_t
, temp
);
7091 insns
= get_insns ();
7094 /* Conjugate should appear as a single unit
7095 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7096 each with a separate pseudo as destination.
7097 It's not correct for flow to treat them as a unit. */
7098 if (GET_CODE (target
) != CONCAT
)
7099 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
7106 case TRY_CATCH_EXPR
:
7108 tree handler
= TREE_OPERAND (exp
, 1);
7110 expand_eh_region_start ();
7112 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7114 expand_eh_region_end (handler
);
7121 rtx dcc
= get_dynamic_cleanup_chain ();
7122 emit_move_insn (dcc
, validize_mem (gen_rtx (MEM
, Pmode
, dcc
)));
7128 rtx dhc
= get_dynamic_handler_chain ();
7129 emit_move_insn (dhc
, validize_mem (gen_rtx (MEM
, Pmode
, dhc
)));
7134 op0
= CONST0_RTX (tmode
);
7140 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7143 /* Here to do an ordinary binary operator, generating an instruction
7144 from the optab already placed in `this_optab'. */
7146 preexpand_calls (exp
);
7147 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
7149 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7150 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7152 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
7153 unsignedp
, OPTAB_LIB_WIDEN
);
7160 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7163 bc_expand_expr (exp
)
7166 enum tree_code code
;
7169 struct binary_operator
*binoptab
;
7170 struct unary_operator
*unoptab
;
7171 struct increment_operator
*incroptab
;
7172 struct bc_label
*lab
, *lab1
;
7173 enum bytecode_opcode opcode
;
7176 code
= TREE_CODE (exp
);
7182 if (DECL_RTL (exp
) == 0)
7184 error_with_decl (exp
, "prior parameter's size depends on `%s'");
7188 bc_load_parmaddr (DECL_RTL (exp
));
7189 bc_load_memory (TREE_TYPE (exp
), exp
);
7195 if (DECL_RTL (exp
) == 0)
7199 if (BYTECODE_LABEL (DECL_RTL (exp
)))
7200 bc_load_externaddr (DECL_RTL (exp
));
7202 bc_load_localaddr (DECL_RTL (exp
));
7204 if (TREE_PUBLIC (exp
))
7205 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
7206 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
7208 bc_load_localaddr (DECL_RTL (exp
));
7210 bc_load_memory (TREE_TYPE (exp
), exp
);
7215 #ifdef DEBUG_PRINT_CODE
7216 fprintf (stderr
, " [%x]\n", TREE_INT_CST_LOW (exp
));
7218 bc_emit_instruction (mode_to_const_map
[(int) (DECL_BIT_FIELD (exp
)
7220 : TYPE_MODE (TREE_TYPE (exp
)))],
7221 (HOST_WIDE_INT
) TREE_INT_CST_LOW (exp
));
7227 #ifdef DEBUG_PRINT_CODE
7228 fprintf (stderr
, " [%g]\n", (double) TREE_INT_CST_LOW (exp
));
7230 /* FIX THIS: find a better way to pass real_cst's. -bson */
7231 bc_emit_instruction (mode_to_const_map
[TYPE_MODE (TREE_TYPE (exp
))],
7232 (double) TREE_REAL_CST (exp
));
7241 /* We build a call description vector describing the type of
7242 the return value and of the arguments; this call vector,
7243 together with a pointer to a location for the return value
7244 and the base of the argument list, is passed to the low
7245 level machine dependent call subroutine, which is responsible
7246 for putting the arguments wherever real functions expect
7247 them, as well as getting the return value back. */
7249 tree calldesc
= 0, arg
;
7253 /* Push the evaluated args on the evaluation stack in reverse
7254 order. Also make an entry for each arg in the calldesc
7255 vector while we're at it. */
7257 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7259 for (arg
= TREE_OPERAND (exp
, 1); arg
; arg
= TREE_CHAIN (arg
))
7262 bc_expand_expr (TREE_VALUE (arg
));
7264 calldesc
= tree_cons ((tree
) 0,
7265 size_in_bytes (TREE_TYPE (TREE_VALUE (arg
))),
7267 calldesc
= tree_cons ((tree
) 0,
7268 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg
))),
7272 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7274 /* Allocate a location for the return value and push its
7275 address on the evaluation stack. Also make an entry
7276 at the front of the calldesc for the return value type. */
7278 type
= TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7279 retval
= bc_allocate_local (int_size_in_bytes (type
), TYPE_ALIGN (type
));
7280 bc_load_localaddr (retval
);
7282 calldesc
= tree_cons ((tree
) 0, size_in_bytes (type
), calldesc
);
7283 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (type
), calldesc
);
7285 /* Prepend the argument count. */
7286 calldesc
= tree_cons ((tree
) 0,
7287 build_int_2 (nargs
, 0),
7290 /* Push the address of the call description vector on the stack. */
7291 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
7292 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
7293 build_index_type (build_int_2 (nargs
* 2, 0)));
7294 r
= output_constant_def (calldesc
);
7295 bc_load_externaddr (r
);
7297 /* Push the address of the function to be called. */
7298 bc_expand_expr (TREE_OPERAND (exp
, 0));
7300 /* Call the function, popping its address and the calldesc vector
7301 address off the evaluation stack in the process. */
7302 bc_emit_instruction (call
);
7304 /* Pop the arguments off the stack. */
7305 bc_adjust_stack (nargs
);
7307 /* Load the return value onto the stack. */
7308 bc_load_localaddr (retval
);
7309 bc_load_memory (type
, TREE_OPERAND (exp
, 0));
7315 if (!SAVE_EXPR_RTL (exp
))
7317 /* First time around: copy to local variable */
7318 SAVE_EXPR_RTL (exp
) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp
)),
7319 TYPE_ALIGN (TREE_TYPE(exp
)));
7320 bc_expand_expr (TREE_OPERAND (exp
, 0));
7321 bc_emit_instruction (duplicate
);
7323 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7324 bc_store_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7328 /* Consecutive reference: use saved copy */
7329 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7330 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7335 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7336 how are they handled instead? */
7339 TREE_USED (exp
) = 1;
7340 bc_expand_expr (STMT_BODY (exp
));
7347 bc_expand_expr (TREE_OPERAND (exp
, 0));
7348 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)), TREE_TYPE (exp
));
7353 expand_assignment (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1), 0, 0);
7358 bc_expand_address (TREE_OPERAND (exp
, 0));
7363 bc_expand_expr (TREE_OPERAND (exp
, 0));
7364 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7369 bc_expand_expr (bc_canonicalize_array_ref (exp
));
7374 bc_expand_component_address (exp
);
7376 /* If we have a bitfield, generate a proper load */
7377 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp
, 1)), TREE_OPERAND (exp
, 1));
7382 bc_expand_expr (TREE_OPERAND (exp
, 0));
7383 bc_emit_instruction (drop
);
7384 bc_expand_expr (TREE_OPERAND (exp
, 1));
7389 bc_expand_expr (TREE_OPERAND (exp
, 0));
7390 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7391 lab
= bc_get_bytecode_label ();
7392 bc_emit_bytecode (xjumpifnot
);
7393 bc_emit_bytecode_labelref (lab
);
7395 #ifdef DEBUG_PRINT_CODE
7396 fputc ('\n', stderr
);
7398 bc_expand_expr (TREE_OPERAND (exp
, 1));
7399 lab1
= bc_get_bytecode_label ();
7400 bc_emit_bytecode (jump
);
7401 bc_emit_bytecode_labelref (lab1
);
7403 #ifdef DEBUG_PRINT_CODE
7404 fputc ('\n', stderr
);
7407 bc_emit_bytecode_labeldef (lab
);
7408 bc_expand_expr (TREE_OPERAND (exp
, 2));
7409 bc_emit_bytecode_labeldef (lab1
);
7412 case TRUTH_ANDIF_EXPR
:
7414 opcode
= xjumpifnot
;
7417 case TRUTH_ORIF_EXPR
:
7424 binoptab
= optab_plus_expr
;
7429 binoptab
= optab_minus_expr
;
7434 binoptab
= optab_mult_expr
;
7437 case TRUNC_DIV_EXPR
:
7438 case FLOOR_DIV_EXPR
:
7440 case ROUND_DIV_EXPR
:
7441 case EXACT_DIV_EXPR
:
7443 binoptab
= optab_trunc_div_expr
;
7446 case TRUNC_MOD_EXPR
:
7447 case FLOOR_MOD_EXPR
:
7449 case ROUND_MOD_EXPR
:
7451 binoptab
= optab_trunc_mod_expr
;
7454 case FIX_ROUND_EXPR
:
7455 case FIX_FLOOR_EXPR
:
7457 abort (); /* Not used for C. */
7459 case FIX_TRUNC_EXPR
:
7466 abort (); /* FIXME */
7470 binoptab
= optab_rdiv_expr
;
7475 binoptab
= optab_bit_and_expr
;
7480 binoptab
= optab_bit_ior_expr
;
7485 binoptab
= optab_bit_xor_expr
;
7490 binoptab
= optab_lshift_expr
;
7495 binoptab
= optab_rshift_expr
;
7498 case TRUTH_AND_EXPR
:
7500 binoptab
= optab_truth_and_expr
;
7505 binoptab
= optab_truth_or_expr
;
7510 binoptab
= optab_lt_expr
;
7515 binoptab
= optab_le_expr
;
7520 binoptab
= optab_ge_expr
;
7525 binoptab
= optab_gt_expr
;
7530 binoptab
= optab_eq_expr
;
7535 binoptab
= optab_ne_expr
;
7540 unoptab
= optab_negate_expr
;
7545 unoptab
= optab_bit_not_expr
;
7548 case TRUTH_NOT_EXPR
:
7550 unoptab
= optab_truth_not_expr
;
7553 case PREDECREMENT_EXPR
:
7555 incroptab
= optab_predecrement_expr
;
7558 case PREINCREMENT_EXPR
:
7560 incroptab
= optab_preincrement_expr
;
7563 case POSTDECREMENT_EXPR
:
7565 incroptab
= optab_postdecrement_expr
;
7568 case POSTINCREMENT_EXPR
:
7570 incroptab
= optab_postincrement_expr
;
7575 bc_expand_constructor (exp
);
7585 tree vars
= TREE_OPERAND (exp
, 0);
7586 int vars_need_expansion
= 0;
7588 /* Need to open a binding contour here because
7589 if there are any cleanups they most be contained here. */
7590 expand_start_bindings (0);
7592 /* Mark the corresponding BLOCK for output. */
7593 if (TREE_OPERAND (exp
, 2) != 0)
7594 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
7596 /* If VARS have not yet been expanded, expand them now. */
7599 if (DECL_RTL (vars
) == 0)
7601 vars_need_expansion
= 1;
7604 expand_decl_init (vars
);
7605 vars
= TREE_CHAIN (vars
);
7608 bc_expand_expr (TREE_OPERAND (exp
, 1));
7610 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7620 bc_expand_binary_operation (binoptab
, TREE_TYPE (exp
),
7621 TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1));
7627 bc_expand_unary_operation (unoptab
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7633 bc_expand_expr (TREE_OPERAND (exp
, 0));
7634 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7635 lab
= bc_get_bytecode_label ();
7637 bc_emit_instruction (duplicate
);
7638 bc_emit_bytecode (opcode
);
7639 bc_emit_bytecode_labelref (lab
);
7641 #ifdef DEBUG_PRINT_CODE
7642 fputc ('\n', stderr
);
7645 bc_emit_instruction (drop
);
7647 bc_expand_expr (TREE_OPERAND (exp
, 1));
7648 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7649 bc_emit_bytecode_labeldef (lab
);
7655 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7657 /* Push the quantum. */
7658 bc_expand_expr (TREE_OPERAND (exp
, 1));
7660 /* Convert it to the lvalue's type. */
7661 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)), type
);
7663 /* Push the address of the lvalue */
7664 bc_expand_expr (build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
), TREE_OPERAND (exp
, 0)));
7666 /* Perform actual increment */
7667 bc_expand_increment (incroptab
, type
);
7671 /* Return the alignment in bits of EXP, a pointer valued expression.
7672 But don't return more than MAX_ALIGN no matter what.
7673 The alignment returned is, by default, the alignment of the thing that
7674 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7676 Otherwise, look at the expression to see if we can do better, i.e., if the
7677 expression is actually pointing at an object whose alignment is tighter. */
7680 get_pointer_alignment (exp
, max_align
)
7684 unsigned align
, inner
;
7686 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7689 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7690 align
= MIN (align
, max_align
);
7694 switch (TREE_CODE (exp
))
7698 case NON_LVALUE_EXPR
:
7699 exp
= TREE_OPERAND (exp
, 0);
7700 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7702 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7703 align
= MIN (inner
, max_align
);
7707 /* If sum of pointer + int, restrict our maximum alignment to that
7708 imposed by the integer. If not, we can't do any better than
7710 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7713 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7718 exp
= TREE_OPERAND (exp
, 0);
7722 /* See what we are pointing at and look at its alignment. */
7723 exp
= TREE_OPERAND (exp
, 0);
7724 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7725 align
= FUNCTION_BOUNDARY
;
7726 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7727 align
= DECL_ALIGN (exp
);
7728 #ifdef CONSTANT_ALIGNMENT
7729 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7730 align
= CONSTANT_ALIGNMENT (exp
, align
);
7732 return MIN (align
, max_align
);
7740 /* Return the tree node and offset if a given argument corresponds to
7741 a string constant. */
7744 string_constant (arg
, ptr_offset
)
7750 if (TREE_CODE (arg
) == ADDR_EXPR
7751 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7753 *ptr_offset
= integer_zero_node
;
7754 return TREE_OPERAND (arg
, 0);
7756 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7758 tree arg0
= TREE_OPERAND (arg
, 0);
7759 tree arg1
= TREE_OPERAND (arg
, 1);
7764 if (TREE_CODE (arg0
) == ADDR_EXPR
7765 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7768 return TREE_OPERAND (arg0
, 0);
7770 else if (TREE_CODE (arg1
) == ADDR_EXPR
7771 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7774 return TREE_OPERAND (arg1
, 0);
7781 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7782 way, because it could contain a zero byte in the middle.
7783 TREE_STRING_LENGTH is the size of the character array, not the string.
7785 Unfortunately, string_constant can't access the values of const char
7786 arrays with initializers, so neither can we do so here. */
7796 src
= string_constant (src
, &offset_node
);
7799 max
= TREE_STRING_LENGTH (src
);
7800 ptr
= TREE_STRING_POINTER (src
);
7801 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7803 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7804 compute the offset to the following null if we don't know where to
7805 start searching for it. */
7807 for (i
= 0; i
< max
; i
++)
7810 /* We don't know the starting offset, but we do know that the string
7811 has no internal zero bytes. We can assume that the offset falls
7812 within the bounds of the string; otherwise, the programmer deserves
7813 what he gets. Subtract the offset from the length of the string,
7815 /* This would perhaps not be valid if we were dealing with named
7816 arrays in addition to literal string constants. */
7817 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7820 /* We have a known offset into the string. Start searching there for
7821 a null character. */
7822 if (offset_node
== 0)
7826 /* Did we get a long long offset? If so, punt. */
7827 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7829 offset
= TREE_INT_CST_LOW (offset_node
);
7831 /* If the offset is known to be out of bounds, warn, and call strlen at
7833 if (offset
< 0 || offset
> max
)
7835 warning ("offset outside bounds of constant string");
7838 /* Use strlen to search for the first zero byte. Since any strings
7839 constructed with build_string will have nulls appended, we win even
7840 if we get handed something like (char[4])"abcd".
7842 Since OFFSET is our starting index into the string, no further
7843 calculation is needed. */
7844 return size_int (strlen (ptr
+ offset
));
7848 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7849 enum built_in_function fndecl_code
;
7855 /* Some machines need special handling before we can access
7856 arbitrary frames. For example, on the sparc, we must first flush
7857 all register windows to the stack. */
7858 #ifdef SETUP_FRAME_ADDRESSES
7859 SETUP_FRAME_ADDRESSES ();
7862 /* On the sparc, the return address is not in the frame, it is in a
7863 register. There is no way to access it off of the current frame
7864 pointer, but it can be accessed off the previous frame pointer by
7865 reading the value from the register window save area. */
7866 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7867 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7871 /* Scan back COUNT frames to the specified frame. */
7872 for (i
= 0; i
< count
; i
++)
7874 /* Assume the dynamic chain pointer is in the word that the
7875 frame address points to, unless otherwise specified. */
7876 #ifdef DYNAMIC_CHAIN_ADDRESS
7877 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7879 tem
= memory_address (Pmode
, tem
);
7880 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
7883 /* For __builtin_frame_address, return what we've got. */
7884 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
7887 /* For __builtin_return_address, Get the return address from that
7889 #ifdef RETURN_ADDR_RTX
7890 tem
= RETURN_ADDR_RTX (count
, tem
);
7892 tem
= memory_address (Pmode
,
7893 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
7894 tem
= gen_rtx (MEM
, Pmode
, tem
);
7899 /* __builtin_setjmp is passed a pointer to an array of five words (not
7900 all will be used on all machines). It operates similarly to the C
7901 library function of the same name, but is more efficient. Much of
7902 the code below (and for longjmp) is copied from the handling of
7905 NOTE: This is intended for use by GNAT and the exception handling
7906 scheme in the compiler and will only work in the method used by
7910 expand_builtin_setjmp (buf_addr
, target
)
7914 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
7915 enum machine_mode sa_mode
= Pmode
, value_mode
;
7917 int old_inhibit_defer_pop
= inhibit_defer_pop
;
7919 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
7920 build_function_type (void_type_node
, NULL_TREE
),
7923 CUMULATIVE_ARGS args_so_far
;
7927 value_mode
= TYPE_MODE (integer_type_node
);
7929 #ifdef POINTERS_EXTEND_UNSIGNED
7930 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
7933 buf_addr
= force_reg (Pmode
, buf_addr
);
7935 if (target
== 0 || GET_CODE (target
) != REG
7936 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
7937 target
= gen_reg_rtx (value_mode
);
7941 CONST_CALL_P (emit_note (NULL_PTR
, NOTE_INSN_SETJMP
)) = 1;
7942 current_function_calls_setjmp
= 1;
7944 /* We store the frame pointer and the address of lab1 in the buffer
7945 and use the rest of it for the stack save area, which is
7946 machine-dependent. */
7947 emit_move_insn (gen_rtx (MEM
, Pmode
, buf_addr
),
7948 virtual_stack_vars_rtx
);
7950 (validize_mem (gen_rtx (MEM
, Pmode
,
7951 plus_constant (buf_addr
,
7952 GET_MODE_SIZE (Pmode
)))),
7953 gen_rtx (LABEL_REF
, Pmode
, lab1
));
7955 #ifdef HAVE_save_stack_nonlocal
7956 if (HAVE_save_stack_nonlocal
)
7957 sa_mode
= insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0];
7960 stack_save
= gen_rtx (MEM
, sa_mode
,
7961 plus_constant (buf_addr
,
7962 2 * GET_MODE_SIZE (Pmode
)));
7963 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
7967 emit_insn (gen_setjmp ());
7970 /* Set TARGET to zero and branch around the other case. */
7971 emit_move_insn (target
, const0_rtx
);
7972 emit_jump_insn (gen_jump (lab2
));
7976 /* Note that setjmp clobbers FP when we get here, so we have to make
7977 sure it's marked as used by this function. */
7978 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
7980 /* Mark the static chain as clobbered here so life information
7981 doesn't get messed up for it. */
7982 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, static_chain_rtx
));
7984 /* Now put in the code to restore the frame pointer, and argument
7985 pointer, if needed. The code below is from expand_end_bindings
7986 in stmt.c; see detailed documentation there. */
7987 #ifdef HAVE_nonlocal_goto
7988 if (! HAVE_nonlocal_goto
)
7990 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
7992 /* Do we need to do something like:
7994 current_function_has_nonlocal_label = 1;
7996 here? It seems like we might have to, or some subset of that
7997 functionality, but I am unsure. (mrs) */
7999 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8000 if (fixed_regs
[ARG_POINTER_REGNUM
])
8002 #ifdef ELIMINABLE_REGS
8003 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
8005 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
8006 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
8007 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
8010 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
8013 /* Now restore our arg pointer from the address at which it
8014 was saved in our stack frame.
8015 If there hasn't be space allocated for it yet, make
8017 if (arg_pointer_save_area
== 0)
8018 arg_pointer_save_area
8019 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8020 emit_move_insn (virtual_incoming_args_rtx
,
8021 copy_to_reg (arg_pointer_save_area
));
8026 #ifdef HAVE_nonlocal_goto_receiver
8027 if (HAVE_nonlocal_goto_receiver
)
8028 emit_insn (gen_nonlocal_goto_receiver ());
8030 /* The static chain pointer contains the address of dummy function.
8031 We need to call it here to handle some PIC cases of restoring a
8032 global pointer. Then return 1. */
8033 op0
= copy_to_mode_reg (Pmode
, static_chain_rtx
);
8035 /* We can't actually call emit_library_call here, so do everything
8036 it does, which isn't much for a libfunc with no args. */
8037 op0
= memory_address (FUNCTION_MODE
, op0
);
8039 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
,
8040 gen_rtx (SYMBOL_REF
, Pmode
, "__dummy"), 1);
8041 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1);
8043 #ifndef ACCUMULATE_OUTGOING_ARGS
8044 #ifdef HAVE_call_pop
8046 emit_call_insn (gen_call_pop (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8047 const0_rtx
, next_arg_reg
,
8048 GEN_INT (return_pops
)));
8055 emit_call_insn (gen_call (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8056 const0_rtx
, next_arg_reg
, const0_rtx
));
8061 emit_move_insn (target
, const1_rtx
);
8067 /* Expand an expression EXP that calls a built-in function,
8068 with result going to TARGET if that's convenient
8069 (and in mode MODE if that's convenient).
8070 SUBTARGET may be used as the target for computing one of EXP's operands.
8071 IGNORE is nonzero if the value is to be ignored. */
8073 #define CALLED_AS_BUILT_IN(NODE) \
8074 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8077 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
8081 enum machine_mode mode
;
8084 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8085 tree arglist
= TREE_OPERAND (exp
, 1);
8088 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
8089 optab builtin_optab
;
8091 switch (DECL_FUNCTION_CODE (fndecl
))
8096 /* build_function_call changes these into ABS_EXPR. */
8101 /* Treat these like sqrt, but only if the user asks for them. */
8102 if (! flag_fast_math
)
8104 case BUILT_IN_FSQRT
:
8105 /* If not optimizing, call the library function. */
8110 /* Arg could be wrong type if user redeclared this fcn wrong. */
8111 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
8114 /* Stabilize and compute the argument. */
8115 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
8116 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
8118 exp
= copy_node (exp
);
8119 arglist
= copy_node (arglist
);
8120 TREE_OPERAND (exp
, 1) = arglist
;
8121 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
8123 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8125 /* Make a suitable register to place result in. */
8126 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8131 switch (DECL_FUNCTION_CODE (fndecl
))
8134 builtin_optab
= sin_optab
; break;
8136 builtin_optab
= cos_optab
; break;
8137 case BUILT_IN_FSQRT
:
8138 builtin_optab
= sqrt_optab
; break;
8143 /* Compute into TARGET.
8144 Set TARGET to wherever the result comes back. */
8145 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8146 builtin_optab
, op0
, target
, 0);
8148 /* If we were unable to expand via the builtin, stop the
8149 sequence (without outputting the insns) and break, causing
8150 a call the the library function. */
8157 /* Check the results by default. But if flag_fast_math is turned on,
8158 then assume sqrt will always be called with valid arguments. */
8160 if (! flag_fast_math
)
8162 /* Don't define the builtin FP instructions
8163 if your machine is not IEEE. */
8164 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
8167 lab1
= gen_label_rtx ();
8169 /* Test the result; if it is NaN, set errno=EDOM because
8170 the argument was not in the domain. */
8171 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
8172 emit_jump_insn (gen_beq (lab1
));
8176 #ifdef GEN_ERRNO_RTX
8177 rtx errno_rtx
= GEN_ERRNO_RTX
;
8180 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "errno"));
8183 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
8186 /* We can't set errno=EDOM directly; let the library call do it.
8187 Pop the arguments right away in case the call gets deleted. */
8189 expand_call (exp
, target
, 0);
8196 /* Output the entire sequence. */
8197 insns
= get_insns ();
8203 /* __builtin_apply_args returns block of memory allocated on
8204 the stack into which is stored the arg pointer, structure
8205 value address, static chain, and all the registers that might
8206 possibly be used in performing a function call. The code is
8207 moved to the start of the function so the incoming values are
8209 case BUILT_IN_APPLY_ARGS
:
8210 /* Don't do __builtin_apply_args more than once in a function.
8211 Save the result of the first call and reuse it. */
8212 if (apply_args_value
!= 0)
8213 return apply_args_value
;
8215 /* When this function is called, it means that registers must be
8216 saved on entry to this function. So we migrate the
8217 call to the first insn of this function. */
8222 temp
= expand_builtin_apply_args ();
8226 apply_args_value
= temp
;
8228 /* Put the sequence after the NOTE that starts the function.
8229 If this is inside a SEQUENCE, make the outer-level insn
8230 chain current, so the code is placed at the start of the
8232 push_topmost_sequence ();
8233 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8234 pop_topmost_sequence ();
8238 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8239 FUNCTION with a copy of the parameters described by
8240 ARGUMENTS, and ARGSIZE. It returns a block of memory
8241 allocated on the stack into which is stored all the registers
8242 that might possibly be used for returning the result of a
8243 function. ARGUMENTS is the value returned by
8244 __builtin_apply_args. ARGSIZE is the number of bytes of
8245 arguments that must be copied. ??? How should this value be
8246 computed? We'll also need a safe worst case value for varargs
8248 case BUILT_IN_APPLY
:
8250 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8251 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8252 || TREE_CHAIN (arglist
) == 0
8253 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8254 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8255 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8263 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8264 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8266 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8269 /* __builtin_return (RESULT) causes the function to return the
8270 value described by RESULT. RESULT is address of the block of
8271 memory returned by __builtin_apply. */
8272 case BUILT_IN_RETURN
:
8274 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8275 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8276 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8277 NULL_RTX
, VOIDmode
, 0));
8280 case BUILT_IN_SAVEREGS
:
8281 /* Don't do __builtin_saveregs more than once in a function.
8282 Save the result of the first call and reuse it. */
8283 if (saveregs_value
!= 0)
8284 return saveregs_value
;
8286 /* When this function is called, it means that registers must be
8287 saved on entry to this function. So we migrate the
8288 call to the first insn of this function. */
8292 /* Now really call the function. `expand_call' does not call
8293 expand_builtin, so there is no danger of infinite recursion here. */
8296 #ifdef EXPAND_BUILTIN_SAVEREGS
8297 /* Do whatever the machine needs done in this case. */
8298 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8300 /* The register where the function returns its value
8301 is likely to have something else in it, such as an argument.
8302 So preserve that register around the call. */
8304 if (value_mode
!= VOIDmode
)
8306 rtx valreg
= hard_libcall_value (value_mode
);
8307 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8309 emit_move_insn (saved_valreg
, valreg
);
8310 temp
= expand_call (exp
, target
, ignore
);
8311 emit_move_insn (valreg
, saved_valreg
);
8314 /* Generate the call, putting the value in a pseudo. */
8315 temp
= expand_call (exp
, target
, ignore
);
8321 saveregs_value
= temp
;
8323 /* Put the sequence after the NOTE that starts the function.
8324 If this is inside a SEQUENCE, make the outer-level insn
8325 chain current, so the code is placed at the start of the
8327 push_topmost_sequence ();
8328 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8329 pop_topmost_sequence ();
8333 /* __builtin_args_info (N) returns word N of the arg space info
8334 for the current function. The number and meanings of words
8335 is controlled by the definition of CUMULATIVE_ARGS. */
8336 case BUILT_IN_ARGS_INFO
:
8338 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8340 int *word_ptr
= (int *) ¤t_function_args_info
;
8341 tree type
, elts
, result
;
8343 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8344 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8345 __FILE__
, __LINE__
);
8349 tree arg
= TREE_VALUE (arglist
);
8350 if (TREE_CODE (arg
) != INTEGER_CST
)
8351 error ("argument of `__builtin_args_info' must be constant");
8354 int wordnum
= TREE_INT_CST_LOW (arg
);
8356 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8357 error ("argument of `__builtin_args_info' out of range");
8359 return GEN_INT (word_ptr
[wordnum
]);
8363 error ("missing argument in `__builtin_args_info'");
8368 for (i
= 0; i
< nwords
; i
++)
8369 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8371 type
= build_array_type (integer_type_node
,
8372 build_index_type (build_int_2 (nwords
, 0)));
8373 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8374 TREE_CONSTANT (result
) = 1;
8375 TREE_STATIC (result
) = 1;
8376 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8377 TREE_CONSTANT (result
) = 1;
8378 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
8382 /* Return the address of the first anonymous stack arg. */
8383 case BUILT_IN_NEXT_ARG
:
8385 tree fntype
= TREE_TYPE (current_function_decl
);
8387 if ((TYPE_ARG_TYPES (fntype
) == 0
8388 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8390 && ! current_function_varargs
)
8392 error ("`va_start' used in function with fixed args");
8398 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8399 tree arg
= TREE_VALUE (arglist
);
8401 /* Strip off all nops for the sake of the comparison. This
8402 is not quite the same as STRIP_NOPS. It does more.
8403 We must also strip off INDIRECT_EXPR for C++ reference
8405 while (TREE_CODE (arg
) == NOP_EXPR
8406 || TREE_CODE (arg
) == CONVERT_EXPR
8407 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8408 || TREE_CODE (arg
) == INDIRECT_REF
)
8409 arg
= TREE_OPERAND (arg
, 0);
8410 if (arg
!= last_parm
)
8411 warning ("second parameter of `va_start' not last named argument");
8413 else if (! current_function_varargs
)
8414 /* Evidently an out of date version of <stdarg.h>; can't validate
8415 va_start's second argument, but can still work as intended. */
8416 warning ("`__builtin_next_arg' called without an argument");
8419 return expand_binop (Pmode
, add_optab
,
8420 current_function_internal_arg_pointer
,
8421 current_function_arg_offset_rtx
,
8422 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8424 case BUILT_IN_CLASSIFY_TYPE
:
8427 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8428 enum tree_code code
= TREE_CODE (type
);
8429 if (code
== VOID_TYPE
)
8430 return GEN_INT (void_type_class
);
8431 if (code
== INTEGER_TYPE
)
8432 return GEN_INT (integer_type_class
);
8433 if (code
== CHAR_TYPE
)
8434 return GEN_INT (char_type_class
);
8435 if (code
== ENUMERAL_TYPE
)
8436 return GEN_INT (enumeral_type_class
);
8437 if (code
== BOOLEAN_TYPE
)
8438 return GEN_INT (boolean_type_class
);
8439 if (code
== POINTER_TYPE
)
8440 return GEN_INT (pointer_type_class
);
8441 if (code
== REFERENCE_TYPE
)
8442 return GEN_INT (reference_type_class
);
8443 if (code
== OFFSET_TYPE
)
8444 return GEN_INT (offset_type_class
);
8445 if (code
== REAL_TYPE
)
8446 return GEN_INT (real_type_class
);
8447 if (code
== COMPLEX_TYPE
)
8448 return GEN_INT (complex_type_class
);
8449 if (code
== FUNCTION_TYPE
)
8450 return GEN_INT (function_type_class
);
8451 if (code
== METHOD_TYPE
)
8452 return GEN_INT (method_type_class
);
8453 if (code
== RECORD_TYPE
)
8454 return GEN_INT (record_type_class
);
8455 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8456 return GEN_INT (union_type_class
);
8457 if (code
== ARRAY_TYPE
)
8459 if (TYPE_STRING_FLAG (type
))
8460 return GEN_INT (string_type_class
);
8462 return GEN_INT (array_type_class
);
8464 if (code
== SET_TYPE
)
8465 return GEN_INT (set_type_class
);
8466 if (code
== FILE_TYPE
)
8467 return GEN_INT (file_type_class
);
8468 if (code
== LANG_TYPE
)
8469 return GEN_INT (lang_type_class
);
8471 return GEN_INT (no_type_class
);
8473 case BUILT_IN_CONSTANT_P
:
8478 tree arg
= TREE_VALUE (arglist
);
8481 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
8482 || (TREE_CODE (arg
) == ADDR_EXPR
8483 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8484 ? const1_rtx
: const0_rtx
);
8487 case BUILT_IN_FRAME_ADDRESS
:
8488 /* The argument must be a nonnegative integer constant.
8489 It counts the number of frames to scan up the stack.
8490 The value is the address of that frame. */
8491 case BUILT_IN_RETURN_ADDRESS
:
8492 /* The argument must be a nonnegative integer constant.
8493 It counts the number of frames to scan up the stack.
8494 The value is the return address saved in that frame. */
8496 /* Warning about missing arg was already issued. */
8498 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
8499 || tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8501 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8502 error ("invalid arg to `__builtin_frame_address'");
8504 error ("invalid arg to `__builtin_return_address'");
8509 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8510 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8511 hard_frame_pointer_rtx
);
8513 /* For __builtin_frame_address, return what we've got. */
8514 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8517 if (GET_CODE (tem
) != REG
)
8518 tem
= copy_to_reg (tem
);
8522 case BUILT_IN_ALLOCA
:
8524 /* Arg could be non-integer if user redeclared this fcn wrong. */
8525 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8528 /* Compute the argument. */
8529 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8531 /* Allocate the desired space. */
8532 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8535 /* If not optimizing, call the library function. */
8536 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8540 /* Arg could be non-integer if user redeclared this fcn wrong. */
8541 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8544 /* Compute the argument. */
8545 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8546 /* Compute ffs, into TARGET if possible.
8547 Set TARGET to wherever the result comes back. */
8548 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8549 ffs_optab
, op0
, target
, 1);
8554 case BUILT_IN_STRLEN
:
8555 /* If not optimizing, call the library function. */
8556 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8560 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8561 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8565 tree src
= TREE_VALUE (arglist
);
8566 tree len
= c_strlen (src
);
8569 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8571 rtx result
, src_rtx
, char_rtx
;
8572 enum machine_mode insn_mode
= value_mode
, char_mode
;
8573 enum insn_code icode
;
8575 /* If the length is known, just return it. */
8577 return expand_expr (len
, target
, mode
, 0);
8579 /* If SRC is not a pointer type, don't do this operation inline. */
8583 /* Call a function if we can't compute strlen in the right mode. */
8585 while (insn_mode
!= VOIDmode
)
8587 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8588 if (icode
!= CODE_FOR_nothing
)
8591 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8593 if (insn_mode
== VOIDmode
)
8596 /* Make a place to write the result of the instruction. */
8599 && GET_CODE (result
) == REG
8600 && GET_MODE (result
) == insn_mode
8601 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8602 result
= gen_reg_rtx (insn_mode
);
8604 /* Make sure the operands are acceptable to the predicates. */
8606 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8607 result
= gen_reg_rtx (insn_mode
);
8609 src_rtx
= memory_address (BLKmode
,
8610 expand_expr (src
, NULL_RTX
, ptr_mode
,
8612 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8613 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8615 char_rtx
= const0_rtx
;
8616 char_mode
= insn_operand_mode
[(int)icode
][2];
8617 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8618 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8620 emit_insn (GEN_FCN (icode
) (result
,
8621 gen_rtx (MEM
, BLKmode
, src_rtx
),
8622 char_rtx
, GEN_INT (align
)));
8624 /* Return the value in the proper mode for this function. */
8625 if (GET_MODE (result
) == value_mode
)
8627 else if (target
!= 0)
8629 convert_move (target
, result
, 0);
8633 return convert_to_mode (value_mode
, result
, 0);
8636 case BUILT_IN_STRCPY
:
8637 /* If not optimizing, call the library function. */
8638 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8642 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8643 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8644 || TREE_CHAIN (arglist
) == 0
8645 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8649 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8654 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8656 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8660 case BUILT_IN_MEMCPY
:
8661 /* If not optimizing, call the library function. */
8662 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8666 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8667 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8668 || TREE_CHAIN (arglist
) == 0
8669 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8670 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8671 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8675 tree dest
= TREE_VALUE (arglist
);
8676 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8677 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8681 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8683 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8684 rtx dest_rtx
, dest_mem
, src_mem
;
8686 /* If either SRC or DEST is not a pointer type, don't do
8687 this operation in-line. */
8688 if (src_align
== 0 || dest_align
== 0)
8690 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8691 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8695 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8696 dest_mem
= gen_rtx (MEM
, BLKmode
,
8697 memory_address (BLKmode
, dest_rtx
));
8698 /* There could be a void* cast on top of the object. */
8699 while (TREE_CODE (dest
) == NOP_EXPR
)
8700 dest
= TREE_OPERAND (dest
, 0);
8701 type
= TREE_TYPE (TREE_TYPE (dest
));
8702 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8703 src_mem
= gen_rtx (MEM
, BLKmode
,
8704 memory_address (BLKmode
,
8705 expand_expr (src
, NULL_RTX
,
8708 /* There could be a void* cast on top of the object. */
8709 while (TREE_CODE (src
) == NOP_EXPR
)
8710 src
= TREE_OPERAND (src
, 0);
8711 type
= TREE_TYPE (TREE_TYPE (src
));
8712 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
8714 /* Copy word part most expediently. */
8715 emit_block_move (dest_mem
, src_mem
,
8716 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8717 MIN (src_align
, dest_align
));
8718 return force_operand (dest_rtx
, NULL_RTX
);
8721 case BUILT_IN_MEMSET
:
8722 /* If not optimizing, call the library function. */
8723 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8727 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8728 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8729 || TREE_CHAIN (arglist
) == 0
8730 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8732 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8734 != (TREE_CODE (TREE_TYPE
8736 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
8740 tree dest
= TREE_VALUE (arglist
);
8741 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
8742 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8746 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8747 rtx dest_rtx
, dest_mem
;
8749 /* If DEST is not a pointer type, don't do this
8750 operation in-line. */
8751 if (dest_align
== 0)
8754 /* If VAL is not 0, don't do this operation in-line. */
8755 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
8758 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8759 dest_mem
= gen_rtx (MEM
, BLKmode
,
8760 memory_address (BLKmode
, dest_rtx
));
8761 /* There could be a void* cast on top of the object. */
8762 while (TREE_CODE (dest
) == NOP_EXPR
)
8763 dest
= TREE_OPERAND (dest
, 0);
8764 type
= TREE_TYPE (TREE_TYPE (dest
));
8765 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8767 clear_storage (dest_mem
, expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8770 return force_operand (dest_rtx
, NULL_RTX
);
8773 /* These comparison functions need an instruction that returns an actual
8774 index. An ordinary compare that just sets the condition codes
8776 #ifdef HAVE_cmpstrsi
8777 case BUILT_IN_STRCMP
:
8778 /* If not optimizing, call the library function. */
8779 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8783 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8784 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8785 || TREE_CHAIN (arglist
) == 0
8786 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8788 else if (!HAVE_cmpstrsi
)
8791 tree arg1
= TREE_VALUE (arglist
);
8792 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8796 len
= c_strlen (arg1
);
8798 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
8799 len2
= c_strlen (arg2
);
8801 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
8803 /* If we don't have a constant length for the first, use the length
8804 of the second, if we know it. We don't require a constant for
8805 this case; some cost analysis could be done if both are available
8806 but neither is constant. For now, assume they're equally cheap.
8808 If both strings have constant lengths, use the smaller. This
8809 could arise if optimization results in strcpy being called with
8810 two fixed strings, or if the code was machine-generated. We should
8811 add some code to the `memcmp' handler below to deal with such
8812 situations, someday. */
8813 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
8820 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
8822 if (tree_int_cst_lt (len2
, len
))
8826 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8830 case BUILT_IN_MEMCMP
:
8831 /* If not optimizing, call the library function. */
8832 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8836 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8837 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8838 || TREE_CHAIN (arglist
) == 0
8839 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8840 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8841 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8843 else if (!HAVE_cmpstrsi
)
8846 tree arg1
= TREE_VALUE (arglist
);
8847 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8848 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8852 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8854 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8855 enum machine_mode insn_mode
8856 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
8858 /* If we don't have POINTER_TYPE, call the function. */
8859 if (arg1_align
== 0 || arg2_align
== 0)
8861 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
8862 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8866 /* Make a place to write the result of the instruction. */
8869 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
8870 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8871 result
= gen_reg_rtx (insn_mode
);
8873 emit_insn (gen_cmpstrsi (result
,
8874 gen_rtx (MEM
, BLKmode
,
8875 expand_expr (arg1
, NULL_RTX
,
8878 gen_rtx (MEM
, BLKmode
,
8879 expand_expr (arg2
, NULL_RTX
,
8882 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8883 GEN_INT (MIN (arg1_align
, arg2_align
))));
8885 /* Return the value in the proper mode for this function. */
8886 mode
= TYPE_MODE (TREE_TYPE (exp
));
8887 if (GET_MODE (result
) == mode
)
8889 else if (target
!= 0)
8891 convert_move (target
, result
, 0);
8895 return convert_to_mode (mode
, result
, 0);
8898 case BUILT_IN_STRCMP
:
8899 case BUILT_IN_MEMCMP
:
8903 case BUILT_IN_SETJMP
:
8905 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8909 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
8911 return expand_builtin_setjmp (buf_addr
, target
);
8914 /* __builtin_longjmp is passed a pointer to an array of five words
8915 and a value, which is a dummy. It's similar to the C library longjmp
8916 function but works with __builtin_setjmp above. */
8917 case BUILT_IN_LONGJMP
:
8918 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
8919 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8923 tree dummy_id
= get_identifier ("__dummy");
8924 tree dummy_type
= build_function_type (void_type_node
, NULL_TREE
);
8925 tree dummy_decl
= build_decl (FUNCTION_DECL
, dummy_id
, dummy_type
);
8926 #ifdef POINTERS_EXTEND_UNSIGNED
8929 convert_memory_address
8931 expand_expr (TREE_VALUE (arglist
),
8932 NULL_RTX
, VOIDmode
, 0)));
8935 = force_reg (Pmode
, expand_expr (TREE_VALUE (arglist
),
8939 rtx fp
= gen_rtx (MEM
, Pmode
, buf_addr
);
8940 rtx lab
= gen_rtx (MEM
, Pmode
,
8941 plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
)));
8942 enum machine_mode sa_mode
8943 #ifdef HAVE_save_stack_nonlocal
8944 = (HAVE_save_stack_nonlocal
8945 ? insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0]
8950 rtx stack
= gen_rtx (MEM
, sa_mode
,
8951 plus_constant (buf_addr
,
8952 2 * GET_MODE_SIZE (Pmode
)));
8954 DECL_EXTERNAL (dummy_decl
) = 1;
8955 TREE_PUBLIC (dummy_decl
) = 1;
8956 make_decl_rtl (dummy_decl
, NULL_PTR
, 1);
8958 /* Expand the second expression just for side-effects. */
8959 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
8960 const0_rtx
, VOIDmode
, 0);
8962 assemble_external (dummy_decl
);
8964 /* Pick up FP, label, and SP from the block and jump. This code is
8965 from expand_goto in stmt.c; see there for detailed comments. */
8966 #if HAVE_nonlocal_goto
8967 if (HAVE_nonlocal_goto
)
8968 emit_insn (gen_nonlocal_goto (fp
, lab
, stack
,
8969 XEXP (DECL_RTL (dummy_decl
), 0)));
8973 lab
= copy_to_reg (lab
);
8974 emit_move_insn (hard_frame_pointer_rtx
, fp
);
8975 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
8977 /* Put in the static chain register the address of the dummy
8979 emit_move_insn (static_chain_rtx
, XEXP (DECL_RTL (dummy_decl
), 0));
8980 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
8981 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
8982 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
8983 emit_indirect_jump (lab
);
8989 default: /* just do library call, if unknown builtin */
8990 error ("built-in function `%s' not currently supported",
8991 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8994 /* The switch statement above can drop through to cause the function
8995 to be called normally. */
8997 return expand_call (exp
, target
, ignore
);
9000 /* Built-in functions to perform an untyped call and return. */
9002 /* For each register that may be used for calling a function, this
9003 gives a mode used to copy the register's value. VOIDmode indicates
9004 the register is not used for calling a function. If the machine
9005 has register windows, this gives only the outbound registers.
9006 INCOMING_REGNO gives the corresponding inbound register. */
9007 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
9009 /* For each register that may be used for returning values, this gives
9010 a mode used to copy the register's value. VOIDmode indicates the
9011 register is not used for returning values. If the machine has
9012 register windows, this gives only the outbound registers.
9013 INCOMING_REGNO gives the corresponding inbound register. */
9014 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
9016 /* For each register that may be used for calling a function, this
9017 gives the offset of that register into the block returned by
9018 __builtin_apply_args. 0 indicates that the register is not
9019 used for calling a function. */
9020 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
9022 /* Return the offset of register REGNO into the block returned by
9023 __builtin_apply_args. This is not declared static, since it is
9024 needed in objc-act.c. */
9027 apply_args_register_offset (regno
)
9032 /* Arguments are always put in outgoing registers (in the argument
9033 block) if such make sense. */
9034 #ifdef OUTGOING_REGNO
9035 regno
= OUTGOING_REGNO(regno
);
9037 return apply_args_reg_offset
[regno
];
9040 /* Return the size required for the block returned by __builtin_apply_args,
9041 and initialize apply_args_mode. */
9046 static int size
= -1;
9048 enum machine_mode mode
;
9050 /* The values computed by this function never change. */
9053 /* The first value is the incoming arg-pointer. */
9054 size
= GET_MODE_SIZE (Pmode
);
9056 /* The second value is the structure value address unless this is
9057 passed as an "invisible" first argument. */
9058 if (struct_value_rtx
)
9059 size
+= GET_MODE_SIZE (Pmode
);
9061 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9062 if (FUNCTION_ARG_REGNO_P (regno
))
9064 /* Search for the proper mode for copying this register's
9065 value. I'm not sure this is right, but it works so far. */
9066 enum machine_mode best_mode
= VOIDmode
;
9068 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9070 mode
= GET_MODE_WIDER_MODE (mode
))
9071 if (HARD_REGNO_MODE_OK (regno
, mode
)
9072 && HARD_REGNO_NREGS (regno
, mode
) == 1)
9075 if (best_mode
== VOIDmode
)
9076 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9078 mode
= GET_MODE_WIDER_MODE (mode
))
9079 if (HARD_REGNO_MODE_OK (regno
, mode
)
9080 && (mov_optab
->handlers
[(int) mode
].insn_code
9081 != CODE_FOR_nothing
))
9085 if (mode
== VOIDmode
)
9088 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9089 if (size
% align
!= 0)
9090 size
= CEIL (size
, align
) * align
;
9091 apply_args_reg_offset
[regno
] = size
;
9092 size
+= GET_MODE_SIZE (mode
);
9093 apply_args_mode
[regno
] = mode
;
9097 apply_args_mode
[regno
] = VOIDmode
;
9098 apply_args_reg_offset
[regno
] = 0;
9104 /* Return the size required for the block returned by __builtin_apply,
9105 and initialize apply_result_mode. */
9108 apply_result_size ()
9110 static int size
= -1;
9112 enum machine_mode mode
;
9114 /* The values computed by this function never change. */
9119 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9120 if (FUNCTION_VALUE_REGNO_P (regno
))
9122 /* Search for the proper mode for copying this register's
9123 value. I'm not sure this is right, but it works so far. */
9124 enum machine_mode best_mode
= VOIDmode
;
9126 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9128 mode
= GET_MODE_WIDER_MODE (mode
))
9129 if (HARD_REGNO_MODE_OK (regno
, mode
))
9132 if (best_mode
== VOIDmode
)
9133 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9135 mode
= GET_MODE_WIDER_MODE (mode
))
9136 if (HARD_REGNO_MODE_OK (regno
, mode
)
9137 && (mov_optab
->handlers
[(int) mode
].insn_code
9138 != CODE_FOR_nothing
))
9142 if (mode
== VOIDmode
)
9145 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9146 if (size
% align
!= 0)
9147 size
= CEIL (size
, align
) * align
;
9148 size
+= GET_MODE_SIZE (mode
);
9149 apply_result_mode
[regno
] = mode
;
9152 apply_result_mode
[regno
] = VOIDmode
;
9154 /* Allow targets that use untyped_call and untyped_return to override
9155 the size so that machine-specific information can be stored here. */
9156 #ifdef APPLY_RESULT_SIZE
9157 size
= APPLY_RESULT_SIZE
;
9163 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9164 /* Create a vector describing the result block RESULT. If SAVEP is true,
9165 the result block is used to save the values; otherwise it is used to
9166 restore the values. */
9169 result_vector (savep
, result
)
9173 int regno
, size
, align
, nelts
;
9174 enum machine_mode mode
;
9176 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9179 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9180 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9182 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9183 if (size
% align
!= 0)
9184 size
= CEIL (size
, align
) * align
;
9185 reg
= gen_rtx (REG
, mode
, savep
? regno
: INCOMING_REGNO (regno
));
9186 mem
= change_address (result
, mode
,
9187 plus_constant (XEXP (result
, 0), size
));
9188 savevec
[nelts
++] = (savep
9189 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
9190 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
9191 size
+= GET_MODE_SIZE (mode
);
9193 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9195 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9197 /* Save the state required to perform an untyped call with the same
9198 arguments as were passed to the current function. */
9201 expand_builtin_apply_args ()
9204 int size
, align
, regno
;
9205 enum machine_mode mode
;
9207 /* Create a block where the arg-pointer, structure value address,
9208 and argument registers can be saved. */
9209 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9211 /* Walk past the arg-pointer and structure value address. */
9212 size
= GET_MODE_SIZE (Pmode
);
9213 if (struct_value_rtx
)
9214 size
+= GET_MODE_SIZE (Pmode
);
9216 /* Save each register used in calling a function to the block. */
9217 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9218 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9222 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9223 if (size
% align
!= 0)
9224 size
= CEIL (size
, align
) * align
;
9226 tem
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9229 /* For reg-stack.c's stack register household.
9230 Compare with a similar piece of code in function.c. */
9232 emit_insn (gen_rtx (USE
, mode
, tem
));
9235 emit_move_insn (change_address (registers
, mode
,
9236 plus_constant (XEXP (registers
, 0),
9239 size
+= GET_MODE_SIZE (mode
);
9242 /* Save the arg pointer to the block. */
9243 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9244 copy_to_reg (virtual_incoming_args_rtx
));
9245 size
= GET_MODE_SIZE (Pmode
);
9247 /* Save the structure value address unless this is passed as an
9248 "invisible" first argument. */
9249 if (struct_value_incoming_rtx
)
9251 emit_move_insn (change_address (registers
, Pmode
,
9252 plus_constant (XEXP (registers
, 0),
9254 copy_to_reg (struct_value_incoming_rtx
));
9255 size
+= GET_MODE_SIZE (Pmode
);
9258 /* Return the address of the block. */
9259 return copy_addr_to_reg (XEXP (registers
, 0));
9262 /* Perform an untyped call and save the state required to perform an
9263 untyped return of whatever value was returned by the given function. */
9266 expand_builtin_apply (function
, arguments
, argsize
)
9267 rtx function
, arguments
, argsize
;
9269 int size
, align
, regno
;
9270 enum machine_mode mode
;
9271 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9272 rtx old_stack_level
= 0;
9273 rtx call_fusage
= 0;
9275 /* Create a block where the return registers can be saved. */
9276 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9278 /* ??? The argsize value should be adjusted here. */
9280 /* Fetch the arg pointer from the ARGUMENTS block. */
9281 incoming_args
= gen_reg_rtx (Pmode
);
9282 emit_move_insn (incoming_args
,
9283 gen_rtx (MEM
, Pmode
, arguments
));
9284 #ifndef STACK_GROWS_DOWNWARD
9285 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9286 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9289 /* Perform postincrements before actually calling the function. */
9292 /* Push a new argument block and copy the arguments. */
9293 do_pending_stack_adjust ();
9294 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9296 /* Push a block of memory onto the stack to store the memory arguments.
9297 Save the address in a register, and copy the memory arguments. ??? I
9298 haven't figured out how the calling convention macros effect this,
9299 but it's likely that the source and/or destination addresses in
9300 the block copy will need updating in machine specific ways. */
9301 dest
= copy_addr_to_reg (push_block (argsize
, 0, 0));
9302 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
9303 gen_rtx (MEM
, BLKmode
, incoming_args
),
9305 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9307 /* Refer to the argument block. */
9309 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
9311 /* Walk past the arg-pointer and structure value address. */
9312 size
= GET_MODE_SIZE (Pmode
);
9313 if (struct_value_rtx
)
9314 size
+= GET_MODE_SIZE (Pmode
);
9316 /* Restore each of the registers previously saved. Make USE insns
9317 for each of these registers for use in making the call. */
9318 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9319 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9321 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9322 if (size
% align
!= 0)
9323 size
= CEIL (size
, align
) * align
;
9324 reg
= gen_rtx (REG
, mode
, regno
);
9325 emit_move_insn (reg
,
9326 change_address (arguments
, mode
,
9327 plus_constant (XEXP (arguments
, 0),
9330 use_reg (&call_fusage
, reg
);
9331 size
+= GET_MODE_SIZE (mode
);
9334 /* Restore the structure value address unless this is passed as an
9335 "invisible" first argument. */
9336 size
= GET_MODE_SIZE (Pmode
);
9337 if (struct_value_rtx
)
9339 rtx value
= gen_reg_rtx (Pmode
);
9340 emit_move_insn (value
,
9341 change_address (arguments
, Pmode
,
9342 plus_constant (XEXP (arguments
, 0),
9344 emit_move_insn (struct_value_rtx
, value
);
9345 if (GET_CODE (struct_value_rtx
) == REG
)
9346 use_reg (&call_fusage
, struct_value_rtx
);
9347 size
+= GET_MODE_SIZE (Pmode
);
9350 /* All arguments and registers used for the call are set up by now! */
9351 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9353 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9354 and we don't want to load it into a register as an optimization,
9355 because prepare_call_address already did it if it should be done. */
9356 if (GET_CODE (function
) != SYMBOL_REF
)
9357 function
= memory_address (FUNCTION_MODE
, function
);
9359 /* Generate the actual call instruction and save the return value. */
9360 #ifdef HAVE_untyped_call
9361 if (HAVE_untyped_call
)
9362 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
9363 result
, result_vector (1, result
)));
9366 #ifdef HAVE_call_value
9367 if (HAVE_call_value
)
9371 /* Locate the unique return register. It is not possible to
9372 express a call that sets more than one return register using
9373 call_value; use untyped_call for that. In fact, untyped_call
9374 only needs to save the return registers in the given block. */
9375 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9376 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9379 abort (); /* HAVE_untyped_call required. */
9380 valreg
= gen_rtx (REG
, mode
, regno
);
9383 emit_call_insn (gen_call_value (valreg
,
9384 gen_rtx (MEM
, FUNCTION_MODE
, function
),
9385 const0_rtx
, NULL_RTX
, const0_rtx
));
9387 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9395 /* Find the CALL insn we just emitted. */
9396 for (call_insn
= get_last_insn ();
9397 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9398 call_insn
= PREV_INSN (call_insn
))
9404 /* Put the register usage information on the CALL. If there is already
9405 some usage information, put ours at the end. */
9406 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9410 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9411 link
= XEXP (link
, 1))
9414 XEXP (link
, 1) = call_fusage
;
9417 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9419 /* Restore the stack. */
9420 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9422 /* Return the address of the result block. */
9423 return copy_addr_to_reg (XEXP (result
, 0));
9426 /* Perform an untyped return. */
9429 expand_builtin_return (result
)
9432 int size
, align
, regno
;
9433 enum machine_mode mode
;
9435 rtx call_fusage
= 0;
9437 apply_result_size ();
9438 result
= gen_rtx (MEM
, BLKmode
, result
);
9440 #ifdef HAVE_untyped_return
9441 if (HAVE_untyped_return
)
9443 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9449 /* Restore the return value and note that each value is used. */
9451 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9452 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9454 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9455 if (size
% align
!= 0)
9456 size
= CEIL (size
, align
) * align
;
9457 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9458 emit_move_insn (reg
,
9459 change_address (result
, mode
,
9460 plus_constant (XEXP (result
, 0),
9463 push_to_sequence (call_fusage
);
9464 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
9465 call_fusage
= get_insns ();
9467 size
+= GET_MODE_SIZE (mode
);
9470 /* Put the USE insns before the return. */
9471 emit_insns (call_fusage
);
9473 /* Return whatever values was restored by jumping directly to the end
9475 expand_null_return ();
9478 /* Expand code for a post- or pre- increment or decrement
9479 and return the RTX for the result.
9480 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9483 expand_increment (exp
, post
, ignore
)
9487 register rtx op0
, op1
;
9488 register rtx temp
, value
;
9489 register tree incremented
= TREE_OPERAND (exp
, 0);
9490 optab this_optab
= add_optab
;
9492 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9493 int op0_is_copy
= 0;
9494 int single_insn
= 0;
9495 /* 1 means we can't store into OP0 directly,
9496 because it is a subreg narrower than a word,
9497 and we don't dare clobber the rest of the word. */
9500 if (output_bytecode
)
9502 bc_expand_expr (exp
);
9506 /* Stabilize any component ref that might need to be
9507 evaluated more than once below. */
9509 || TREE_CODE (incremented
) == BIT_FIELD_REF
9510 || (TREE_CODE (incremented
) == COMPONENT_REF
9511 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9512 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9513 incremented
= stabilize_reference (incremented
);
9514 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9515 ones into save exprs so that they don't accidentally get evaluated
9516 more than once by the code below. */
9517 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9518 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9519 incremented
= save_expr (incremented
);
9521 /* Compute the operands as RTX.
9522 Note whether OP0 is the actual lvalue or a copy of it:
9523 I believe it is a copy iff it is a register or subreg
9524 and insns were generated in computing it. */
9526 temp
= get_last_insn ();
9527 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9529 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9530 in place but instead must do sign- or zero-extension during assignment,
9531 so we copy it into a new register and let the code below use it as
9534 Note that we can safely modify this SUBREG since it is know not to be
9535 shared (it was made by the expand_expr call above). */
9537 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9540 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9544 else if (GET_CODE (op0
) == SUBREG
9545 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9547 /* We cannot increment this SUBREG in place. If we are
9548 post-incrementing, get a copy of the old value. Otherwise,
9549 just mark that we cannot increment in place. */
9551 op0
= copy_to_reg (op0
);
9556 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9557 && temp
!= get_last_insn ());
9558 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9560 /* Decide whether incrementing or decrementing. */
9561 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9562 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9563 this_optab
= sub_optab
;
9565 /* Convert decrement by a constant into a negative increment. */
9566 if (this_optab
== sub_optab
9567 && GET_CODE (op1
) == CONST_INT
)
9569 op1
= GEN_INT (- INTVAL (op1
));
9570 this_optab
= add_optab
;
9573 /* For a preincrement, see if we can do this with a single instruction. */
9576 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9577 if (icode
!= (int) CODE_FOR_nothing
9578 /* Make sure that OP0 is valid for operands 0 and 1
9579 of the insn we want to queue. */
9580 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9581 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9582 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9586 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9587 then we cannot just increment OP0. We must therefore contrive to
9588 increment the original value. Then, for postincrement, we can return
9589 OP0 since it is a copy of the old value. For preincrement, expand here
9590 unless we can do it with a single insn.
9592 Likewise if storing directly into OP0 would clobber high bits
9593 we need to preserve (bad_subreg). */
9594 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9596 /* This is the easiest way to increment the value wherever it is.
9597 Problems with multiple evaluation of INCREMENTED are prevented
9598 because either (1) it is a component_ref or preincrement,
9599 in which case it was stabilized above, or (2) it is an array_ref
9600 with constant index in an array in a register, which is
9601 safe to reevaluate. */
9602 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9603 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9604 ? MINUS_EXPR
: PLUS_EXPR
),
9607 TREE_OPERAND (exp
, 1));
9609 while (TREE_CODE (incremented
) == NOP_EXPR
9610 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9612 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9613 incremented
= TREE_OPERAND (incremented
, 0);
9616 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9617 return post
? op0
: temp
;
9622 /* We have a true reference to the value in OP0.
9623 If there is an insn to add or subtract in this mode, queue it.
9624 Queueing the increment insn avoids the register shuffling
9625 that often results if we must increment now and first save
9626 the old value for subsequent use. */
9628 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9629 op0
= stabilize (op0
);
9632 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9633 if (icode
!= (int) CODE_FOR_nothing
9634 /* Make sure that OP0 is valid for operands 0 and 1
9635 of the insn we want to queue. */
9636 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9637 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
9639 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9640 op1
= force_reg (mode
, op1
);
9642 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9644 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9646 rtx addr
= force_reg (Pmode
, XEXP (op0
, 0));
9649 op0
= change_address (op0
, VOIDmode
, addr
);
9650 temp
= force_reg (GET_MODE (op0
), op0
);
9651 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9652 op1
= force_reg (mode
, op1
);
9654 /* The increment queue is LIFO, thus we have to `queue'
9655 the instructions in reverse order. */
9656 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9657 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9662 /* Preincrement, or we can't increment with one simple insn. */
9664 /* Save a copy of the value before inc or dec, to return it later. */
9665 temp
= value
= copy_to_reg (op0
);
9667 /* Arrange to return the incremented value. */
9668 /* Copy the rtx because expand_binop will protect from the queue,
9669 and the results of that would be invalid for us to return
9670 if our caller does emit_queue before using our result. */
9671 temp
= copy_rtx (value
= op0
);
9673 /* Increment however we can. */
9674 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9675 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9676 /* Make sure the value is stored into OP0. */
9678 emit_move_insn (op0
, op1
);
9683 /* Expand all function calls contained within EXP, innermost ones first.
9684 But don't look within expressions that have sequence points.
9685 For each CALL_EXPR, record the rtx for its value
9686 in the CALL_EXPR_RTL field. */
9689 preexpand_calls (exp
)
9692 register int nops
, i
;
9693 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9695 if (! do_preexpand_calls
)
9698 /* Only expressions and references can contain calls. */
9700 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9703 switch (TREE_CODE (exp
))
9706 /* Do nothing if already expanded. */
9707 if (CALL_EXPR_RTL (exp
) != 0
9708 /* Do nothing if the call returns a variable-sized object. */
9709 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
9710 /* Do nothing to built-in functions. */
9711 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9712 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9714 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9717 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9722 case TRUTH_ANDIF_EXPR
:
9723 case TRUTH_ORIF_EXPR
:
9724 /* If we find one of these, then we can be sure
9725 the adjust will be done for it (since it makes jumps).
9726 Do it now, so that if this is inside an argument
9727 of a function, we don't get the stack adjustment
9728 after some other args have already been pushed. */
9729 do_pending_stack_adjust ();
9734 case WITH_CLEANUP_EXPR
:
9735 case CLEANUP_POINT_EXPR
:
9739 if (SAVE_EXPR_RTL (exp
) != 0)
9743 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9744 for (i
= 0; i
< nops
; i
++)
9745 if (TREE_OPERAND (exp
, i
) != 0)
9747 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9748 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9750 preexpand_calls (TREE_OPERAND (exp
, i
));
9754 /* At the start of a function, record that we have no previously-pushed
9755 arguments waiting to be popped. */
9758 init_pending_stack_adjust ()
9760 pending_stack_adjust
= 0;
9763 /* When exiting from function, if safe, clear out any pending stack adjust
9764 so the adjustment won't get done. */
9767 clear_pending_stack_adjust ()
9769 #ifdef EXIT_IGNORE_STACK
9771 && ! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
9772 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9773 && ! flag_inline_functions
)
9774 pending_stack_adjust
= 0;
9778 /* Pop any previously-pushed arguments that have not been popped yet. */
9781 do_pending_stack_adjust ()
9783 if (inhibit_defer_pop
== 0)
9785 if (pending_stack_adjust
!= 0)
9786 adjust_stack (GEN_INT (pending_stack_adjust
));
9787 pending_stack_adjust
= 0;
9791 /* Expand conditional expressions. */
9793 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9794 LABEL is an rtx of code CODE_LABEL, in this function and all the
9798 jumpifnot (exp
, label
)
9802 do_jump (exp
, label
, NULL_RTX
);
9805 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9812 do_jump (exp
, NULL_RTX
, label
);
9815 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9816 the result is zero, or IF_TRUE_LABEL if the result is one.
9817 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9818 meaning fall through in that case.
9820 do_jump always does any pending stack adjust except when it does not
9821 actually perform a jump. An example where there is no jump
9822 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9824 This function is responsible for optimizing cases such as
9825 &&, || and comparison operators in EXP. */
9828 do_jump (exp
, if_false_label
, if_true_label
)
9830 rtx if_false_label
, if_true_label
;
9832 register enum tree_code code
= TREE_CODE (exp
);
9833 /* Some cases need to create a label to jump to
9834 in order to properly fall through.
9835 These cases set DROP_THROUGH_LABEL nonzero. */
9836 rtx drop_through_label
= 0;
9841 enum machine_mode mode
;
9851 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9857 /* This is not true with #pragma weak */
9859 /* The address of something can never be zero. */
9861 emit_jump (if_true_label
);
9866 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9867 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9868 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9871 /* If we are narrowing the operand, we have to do the compare in the
9873 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9874 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9876 case NON_LVALUE_EXPR
:
9877 case REFERENCE_EXPR
:
9882 /* These cannot change zero->non-zero or vice versa. */
9883 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9887 /* This is never less insns than evaluating the PLUS_EXPR followed by
9888 a test and can be longer if the test is eliminated. */
9890 /* Reduce to minus. */
9891 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9892 TREE_OPERAND (exp
, 0),
9893 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9894 TREE_OPERAND (exp
, 1))));
9895 /* Process as MINUS. */
9899 /* Non-zero iff operands of minus differ. */
9900 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
9901 TREE_OPERAND (exp
, 0),
9902 TREE_OPERAND (exp
, 1)),
9907 /* If we are AND'ing with a small constant, do this comparison in the
9908 smallest type that fits. If the machine doesn't have comparisons
9909 that small, it will be converted back to the wider comparison.
9910 This helps if we are testing the sign bit of a narrower object.
9911 combine can't do this for us because it can't know whether a
9912 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9914 if (! SLOW_BYTE_ACCESS
9915 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9916 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9917 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
9918 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9919 && (type
= type_for_mode (mode
, 1)) != 0
9920 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9921 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9922 != CODE_FOR_nothing
))
9924 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9929 case TRUTH_NOT_EXPR
:
9930 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9933 case TRUTH_ANDIF_EXPR
:
9934 if (if_false_label
== 0)
9935 if_false_label
= drop_through_label
= gen_label_rtx ();
9936 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9937 start_cleanup_deferal ();
9938 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9939 end_cleanup_deferal ();
9942 case TRUTH_ORIF_EXPR
:
9943 if (if_true_label
== 0)
9944 if_true_label
= drop_through_label
= gen_label_rtx ();
9945 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9946 start_cleanup_deferal ();
9947 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9948 end_cleanup_deferal ();
9953 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9954 preserve_temp_slots (NULL_RTX
);
9958 do_pending_stack_adjust ();
9959 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9966 int bitsize
, bitpos
, unsignedp
;
9967 enum machine_mode mode
;
9973 /* Get description of this reference. We don't actually care
9974 about the underlying object here. */
9975 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9976 &mode
, &unsignedp
, &volatilep
,
9979 type
= type_for_size (bitsize
, unsignedp
);
9980 if (! SLOW_BYTE_ACCESS
9981 && type
!= 0 && bitsize
>= 0
9982 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9983 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9984 != CODE_FOR_nothing
))
9986 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9993 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9994 if (integer_onep (TREE_OPERAND (exp
, 1))
9995 && integer_zerop (TREE_OPERAND (exp
, 2)))
9996 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9998 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9999 && integer_onep (TREE_OPERAND (exp
, 2)))
10000 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10004 register rtx label1
= gen_label_rtx ();
10005 drop_through_label
= gen_label_rtx ();
10007 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
10009 start_cleanup_deferal ();
10010 /* Now the THEN-expression. */
10011 do_jump (TREE_OPERAND (exp
, 1),
10012 if_false_label
? if_false_label
: drop_through_label
,
10013 if_true_label
? if_true_label
: drop_through_label
);
10014 /* In case the do_jump just above never jumps. */
10015 do_pending_stack_adjust ();
10016 emit_label (label1
);
10018 /* Now the ELSE-expression. */
10019 do_jump (TREE_OPERAND (exp
, 2),
10020 if_false_label
? if_false_label
: drop_through_label
,
10021 if_true_label
? if_true_label
: drop_through_label
);
10022 end_cleanup_deferal ();
10028 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10030 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10031 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10032 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10033 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10036 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
10037 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10038 fold (build1 (REALPART_EXPR
,
10039 TREE_TYPE (inner_type
),
10040 TREE_OPERAND (exp
, 0))),
10041 fold (build1 (REALPART_EXPR
,
10042 TREE_TYPE (inner_type
),
10043 TREE_OPERAND (exp
, 1))))),
10044 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10045 fold (build1 (IMAGPART_EXPR
,
10046 TREE_TYPE (inner_type
),
10047 TREE_OPERAND (exp
, 0))),
10048 fold (build1 (IMAGPART_EXPR
,
10049 TREE_TYPE (inner_type
),
10050 TREE_OPERAND (exp
, 1))))))),
10051 if_false_label
, if_true_label
);
10052 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10053 && !can_compare_p (TYPE_MODE (inner_type
)))
10054 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
10056 comparison
= compare (exp
, EQ
, EQ
);
10062 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10064 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10065 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10066 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10067 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10070 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
10071 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10072 fold (build1 (REALPART_EXPR
,
10073 TREE_TYPE (inner_type
),
10074 TREE_OPERAND (exp
, 0))),
10075 fold (build1 (REALPART_EXPR
,
10076 TREE_TYPE (inner_type
),
10077 TREE_OPERAND (exp
, 1))))),
10078 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10079 fold (build1 (IMAGPART_EXPR
,
10080 TREE_TYPE (inner_type
),
10081 TREE_OPERAND (exp
, 0))),
10082 fold (build1 (IMAGPART_EXPR
,
10083 TREE_TYPE (inner_type
),
10084 TREE_OPERAND (exp
, 1))))))),
10085 if_false_label
, if_true_label
);
10086 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10087 && !can_compare_p (TYPE_MODE (inner_type
)))
10088 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
10090 comparison
= compare (exp
, NE
, NE
);
10095 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10097 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10098 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10100 comparison
= compare (exp
, LT
, LTU
);
10104 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10106 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10107 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10109 comparison
= compare (exp
, LE
, LEU
);
10113 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10115 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10116 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10118 comparison
= compare (exp
, GT
, GTU
);
10122 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10124 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10125 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10127 comparison
= compare (exp
, GE
, GEU
);
10132 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10134 /* This is not needed any more and causes poor code since it causes
10135 comparisons and tests from non-SI objects to have different code
10137 /* Copy to register to avoid generating bad insns by cse
10138 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10139 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10140 temp
= copy_to_reg (temp
);
10142 do_pending_stack_adjust ();
10143 if (GET_CODE (temp
) == CONST_INT
)
10144 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10145 else if (GET_CODE (temp
) == LABEL_REF
)
10146 comparison
= const_true_rtx
;
10147 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10148 && !can_compare_p (GET_MODE (temp
)))
10149 /* Note swapping the labels gives us not-equal. */
10150 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10151 else if (GET_MODE (temp
) != VOIDmode
)
10152 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10153 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10154 GET_MODE (temp
), NULL_RTX
, 0);
10159 /* Do any postincrements in the expression that was tested. */
10162 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10163 straight into a conditional jump instruction as the jump condition.
10164 Otherwise, all the work has been done already. */
10166 if (comparison
== const_true_rtx
)
10169 emit_jump (if_true_label
);
10171 else if (comparison
== const0_rtx
)
10173 if (if_false_label
)
10174 emit_jump (if_false_label
);
10176 else if (comparison
)
10177 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10179 if (drop_through_label
)
10181 /* If do_jump produces code that might be jumped around,
10182 do any stack adjusts from that code, before the place
10183 where control merges in. */
10184 do_pending_stack_adjust ();
10185 emit_label (drop_through_label
);
10189 /* Given a comparison expression EXP for values too wide to be compared
10190 with one insn, test the comparison and jump to the appropriate label.
10191 The code of EXP is ignored; we always test GT if SWAP is 0,
10192 and LT if SWAP is 1. */
10195 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10198 rtx if_false_label
, if_true_label
;
10200 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10201 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10202 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10203 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10204 rtx drop_through_label
= 0;
10205 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10208 if (! if_true_label
|| ! if_false_label
)
10209 drop_through_label
= gen_label_rtx ();
10210 if (! if_true_label
)
10211 if_true_label
= drop_through_label
;
10212 if (! if_false_label
)
10213 if_false_label
= drop_through_label
;
10215 /* Compare a word at a time, high order first. */
10216 for (i
= 0; i
< nwords
; i
++)
10219 rtx op0_word
, op1_word
;
10221 if (WORDS_BIG_ENDIAN
)
10223 op0_word
= operand_subword_force (op0
, i
, mode
);
10224 op1_word
= operand_subword_force (op1
, i
, mode
);
10228 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10229 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10232 /* All but high-order word must be compared as unsigned. */
10233 comp
= compare_from_rtx (op0_word
, op1_word
,
10234 (unsignedp
|| i
> 0) ? GTU
: GT
,
10235 unsignedp
, word_mode
, NULL_RTX
, 0);
10236 if (comp
== const_true_rtx
)
10237 emit_jump (if_true_label
);
10238 else if (comp
!= const0_rtx
)
10239 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10241 /* Consider lower words only if these are equal. */
10242 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10244 if (comp
== const_true_rtx
)
10245 emit_jump (if_false_label
);
10246 else if (comp
!= const0_rtx
)
10247 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10250 if (if_false_label
)
10251 emit_jump (if_false_label
);
10252 if (drop_through_label
)
10253 emit_label (drop_through_label
);
10256 /* Compare OP0 with OP1, word at a time, in mode MODE.
10257 UNSIGNEDP says to do unsigned comparison.
10258 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10261 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10262 enum machine_mode mode
;
10265 rtx if_false_label
, if_true_label
;
10267 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10268 rtx drop_through_label
= 0;
10271 if (! if_true_label
|| ! if_false_label
)
10272 drop_through_label
= gen_label_rtx ();
10273 if (! if_true_label
)
10274 if_true_label
= drop_through_label
;
10275 if (! if_false_label
)
10276 if_false_label
= drop_through_label
;
10278 /* Compare a word at a time, high order first. */
10279 for (i
= 0; i
< nwords
; i
++)
10282 rtx op0_word
, op1_word
;
10284 if (WORDS_BIG_ENDIAN
)
10286 op0_word
= operand_subword_force (op0
, i
, mode
);
10287 op1_word
= operand_subword_force (op1
, i
, mode
);
10291 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10292 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10295 /* All but high-order word must be compared as unsigned. */
10296 comp
= compare_from_rtx (op0_word
, op1_word
,
10297 (unsignedp
|| i
> 0) ? GTU
: GT
,
10298 unsignedp
, word_mode
, NULL_RTX
, 0);
10299 if (comp
== const_true_rtx
)
10300 emit_jump (if_true_label
);
10301 else if (comp
!= const0_rtx
)
10302 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10304 /* Consider lower words only if these are equal. */
10305 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10307 if (comp
== const_true_rtx
)
10308 emit_jump (if_false_label
);
10309 else if (comp
!= const0_rtx
)
10310 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10313 if (if_false_label
)
10314 emit_jump (if_false_label
);
10315 if (drop_through_label
)
10316 emit_label (drop_through_label
);
10319 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10320 with one insn, test the comparison and jump to the appropriate label. */
10323 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10325 rtx if_false_label
, if_true_label
;
10327 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10328 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10329 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10330 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10332 rtx drop_through_label
= 0;
10334 if (! if_false_label
)
10335 drop_through_label
= if_false_label
= gen_label_rtx ();
10337 for (i
= 0; i
< nwords
; i
++)
10339 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10340 operand_subword_force (op1
, i
, mode
),
10341 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10342 word_mode
, NULL_RTX
, 0);
10343 if (comp
== const_true_rtx
)
10344 emit_jump (if_false_label
);
10345 else if (comp
!= const0_rtx
)
10346 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10350 emit_jump (if_true_label
);
10351 if (drop_through_label
)
10352 emit_label (drop_through_label
);
10355 /* Jump according to whether OP0 is 0.
10356 We assume that OP0 has an integer mode that is too wide
10357 for the available compare insns. */
10360 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10362 rtx if_false_label
, if_true_label
;
10364 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10366 rtx drop_through_label
= 0;
10368 if (! if_false_label
)
10369 drop_through_label
= if_false_label
= gen_label_rtx ();
10371 for (i
= 0; i
< nwords
; i
++)
10373 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10375 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10376 if (comp
== const_true_rtx
)
10377 emit_jump (if_false_label
);
10378 else if (comp
!= const0_rtx
)
10379 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10383 emit_jump (if_true_label
);
10384 if (drop_through_label
)
10385 emit_label (drop_through_label
);
10388 /* Given a comparison expression in rtl form, output conditional branches to
10389 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10392 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10393 rtx comparison
, if_false_label
, if_true_label
;
10397 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10398 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10402 if (if_false_label
)
10403 emit_jump (if_false_label
);
10405 else if (if_false_label
)
10408 rtx prev
= get_last_insn ();
10411 /* Output the branch with the opposite condition. Then try to invert
10412 what is generated. If more than one insn is a branch, or if the
10413 branch is not the last insn written, abort. If we can't invert
10414 the branch, emit make a true label, redirect this jump to that,
10415 emit a jump to the false label and define the true label. */
10417 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10418 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10422 /* Here we get the first insn that was just emitted. It used to be the
10423 case that, on some machines, emitting the branch would discard
10424 the previous compare insn and emit a replacement. This isn't
10425 done anymore, but abort if we see that PREV is deleted. */
10428 insn
= get_insns ();
10429 else if (INSN_DELETED_P (prev
))
10432 insn
= NEXT_INSN (prev
);
10434 for (; insn
; insn
= NEXT_INSN (insn
))
10435 if (GET_CODE (insn
) == JUMP_INSN
)
10442 if (branch
!= get_last_insn ())
10445 JUMP_LABEL (branch
) = if_false_label
;
10446 if (! invert_jump (branch
, if_false_label
))
10448 if_true_label
= gen_label_rtx ();
10449 redirect_jump (branch
, if_true_label
);
10450 emit_jump (if_false_label
);
10451 emit_label (if_true_label
);
10456 /* Generate code for a comparison expression EXP
10457 (including code to compute the values to be compared)
10458 and set (CC0) according to the result.
10459 SIGNED_CODE should be the rtx operation for this comparison for
10460 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10462 We force a stack adjustment unless there are currently
10463 things pushed on the stack that aren't yet used. */
10466 compare (exp
, signed_code
, unsigned_code
)
10468 enum rtx_code signed_code
, unsigned_code
;
10471 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10473 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10474 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10475 register enum machine_mode mode
= TYPE_MODE (type
);
10476 int unsignedp
= TREE_UNSIGNED (type
);
10477 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10479 #ifdef HAVE_canonicalize_funcptr_for_compare
10480 /* If function pointers need to be "canonicalized" before they can
10481 be reliably compared, then canonicalize them. */
10482 if (HAVE_canonicalize_funcptr_for_compare
10483 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10484 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10487 rtx new_op0
= gen_reg_rtx (mode
);
10489 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10493 if (HAVE_canonicalize_funcptr_for_compare
10494 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10495 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10498 rtx new_op1
= gen_reg_rtx (mode
);
10500 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10505 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10507 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10508 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10511 /* Like compare but expects the values to compare as two rtx's.
10512 The decision as to signed or unsigned comparison must be made by the caller.
10514 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10517 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10518 size of MODE should be used. */
10521 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10522 register rtx op0
, op1
;
10523 enum rtx_code code
;
10525 enum machine_mode mode
;
10531 /* If one operand is constant, make it the second one. Only do this
10532 if the other operand is not constant as well. */
10534 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10535 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10540 code
= swap_condition (code
);
10543 if (flag_force_mem
)
10545 op0
= force_not_mem (op0
);
10546 op1
= force_not_mem (op1
);
10549 do_pending_stack_adjust ();
10551 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10552 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10556 /* There's no need to do this now that combine.c can eliminate lots of
10557 sign extensions. This can be less efficient in certain cases on other
10560 /* If this is a signed equality comparison, we can do it as an
10561 unsigned comparison since zero-extension is cheaper than sign
10562 extension and comparisons with zero are done as unsigned. This is
10563 the case even on machines that can do fast sign extension, since
10564 zero-extension is easier to combine with other operations than
10565 sign-extension is. If we are comparing against a constant, we must
10566 convert it to what it would look like unsigned. */
10567 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10568 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10570 if (GET_CODE (op1
) == CONST_INT
10571 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10572 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10577 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10579 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10582 /* Generate code to calculate EXP using a store-flag instruction
10583 and return an rtx for the result. EXP is either a comparison
10584 or a TRUTH_NOT_EXPR whose operand is a comparison.
10586 If TARGET is nonzero, store the result there if convenient.
10588 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10591 Return zero if there is no suitable set-flag instruction
10592 available on this machine.
10594 Once expand_expr has been called on the arguments of the comparison,
10595 we are committed to doing the store flag, since it is not safe to
10596 re-evaluate the expression. We emit the store-flag insn by calling
10597 emit_store_flag, but only expand the arguments if we have a reason
10598 to believe that emit_store_flag will be successful. If we think that
10599 it will, but it isn't, we have to simulate the store-flag with a
10600 set/jump/set sequence. */
10603 do_store_flag (exp
, target
, mode
, only_cheap
)
10606 enum machine_mode mode
;
10609 enum rtx_code code
;
10610 tree arg0
, arg1
, type
;
10612 enum machine_mode operand_mode
;
10616 enum insn_code icode
;
10617 rtx subtarget
= target
;
10618 rtx result
, label
, pattern
, jump_pat
;
10620 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10621 result at the end. We can't simply invert the test since it would
10622 have already been inverted if it were valid. This case occurs for
10623 some floating-point comparisons. */
10625 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10626 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10628 arg0
= TREE_OPERAND (exp
, 0);
10629 arg1
= TREE_OPERAND (exp
, 1);
10630 type
= TREE_TYPE (arg0
);
10631 operand_mode
= TYPE_MODE (type
);
10632 unsignedp
= TREE_UNSIGNED (type
);
10634 /* We won't bother with BLKmode store-flag operations because it would mean
10635 passing a lot of information to emit_store_flag. */
10636 if (operand_mode
== BLKmode
)
10639 /* We won't bother with store-flag operations involving function pointers
10640 when function pointers must be canonicalized before comparisons. */
10641 #ifdef HAVE_canonicalize_funcptr_for_compare
10642 if (HAVE_canonicalize_funcptr_for_compare
10643 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10644 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10646 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10647 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10648 == FUNCTION_TYPE
))))
10655 /* Get the rtx comparison code to use. We know that EXP is a comparison
10656 operation of some type. Some comparisons against 1 and -1 can be
10657 converted to comparisons with zero. Do so here so that the tests
10658 below will be aware that we have a comparison with zero. These
10659 tests will not catch constants in the first operand, but constants
10660 are rarely passed as the first operand. */
10662 switch (TREE_CODE (exp
))
10671 if (integer_onep (arg1
))
10672 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10674 code
= unsignedp
? LTU
: LT
;
10677 if (! unsignedp
&& integer_all_onesp (arg1
))
10678 arg1
= integer_zero_node
, code
= LT
;
10680 code
= unsignedp
? LEU
: LE
;
10683 if (! unsignedp
&& integer_all_onesp (arg1
))
10684 arg1
= integer_zero_node
, code
= GE
;
10686 code
= unsignedp
? GTU
: GT
;
10689 if (integer_onep (arg1
))
10690 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10692 code
= unsignedp
? GEU
: GE
;
10698 /* Put a constant second. */
10699 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10701 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10702 code
= swap_condition (code
);
10705 /* If this is an equality or inequality test of a single bit, we can
10706 do this by shifting the bit being tested to the low-order bit and
10707 masking the result with the constant 1. If the condition was EQ,
10708 we xor it with 1. This does not require an scc insn and is faster
10709 than an scc insn even if we have it. */
10711 if ((code
== NE
|| code
== EQ
)
10712 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10713 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10714 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
10716 tree inner
= TREE_OPERAND (arg0
, 0);
10721 tem
= INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
10722 NULL_RTX
, VOIDmode
, 0));
10723 /* In this case, immed_double_const will sign extend the value to make
10724 it look the same on the host and target. We must remove the
10725 sign-extension before calling exact_log2, since exact_log2 will
10726 fail for negative values. */
10727 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
10728 && BITS_PER_WORD
== GET_MODE_BITSIZE (TYPE_MODE (type
)))
10729 /* We don't use the obvious constant shift to generate the mask,
10730 because that generates compiler warnings when BITS_PER_WORD is
10731 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10732 code is unreachable in that case. */
10733 tem
= tem
& GET_MODE_MASK (word_mode
);
10734 bitnum
= exact_log2 (tem
);
10736 /* If INNER is a right shift of a constant and it plus BITNUM does
10737 not overflow, adjust BITNUM and INNER. */
10739 if (TREE_CODE (inner
) == RSHIFT_EXPR
10740 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10741 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10742 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
10743 < TYPE_PRECISION (type
)))
10745 bitnum
+=TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10746 inner
= TREE_OPERAND (inner
, 0);
10749 /* If we are going to be able to omit the AND below, we must do our
10750 operations as unsigned. If we must use the AND, we have a choice.
10751 Normally unsigned is faster, but for some machines signed is. */
10752 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10753 #ifdef LOAD_EXTEND_OP
10754 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10760 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10761 || GET_MODE (subtarget
) != operand_mode
10762 || ! safe_from_p (subtarget
, inner
))
10765 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10768 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10769 size_int (bitnum
), subtarget
, ops_unsignedp
);
10771 if (GET_MODE (op0
) != mode
)
10772 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10774 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10775 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10776 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10778 /* Put the AND last so it can combine with more things. */
10779 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10780 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10785 /* Now see if we are likely to be able to do this. Return if not. */
10786 if (! can_compare_p (operand_mode
))
10788 icode
= setcc_gen_code
[(int) code
];
10789 if (icode
== CODE_FOR_nothing
10790 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
10792 /* We can only do this if it is one of the special cases that
10793 can be handled without an scc insn. */
10794 if ((code
== LT
&& integer_zerop (arg1
))
10795 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10797 else if (BRANCH_COST
>= 0
10798 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10799 && TREE_CODE (type
) != REAL_TYPE
10800 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10801 != CODE_FOR_nothing
)
10802 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10803 != CODE_FOR_nothing
)))
10809 preexpand_calls (exp
);
10810 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10811 || GET_MODE (subtarget
) != operand_mode
10812 || ! safe_from_p (subtarget
, arg1
))
10815 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10816 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10819 target
= gen_reg_rtx (mode
);
10821 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10822 because, if the emit_store_flag does anything it will succeed and
10823 OP0 and OP1 will not be used subsequently. */
10825 result
= emit_store_flag (target
, code
,
10826 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10827 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10828 operand_mode
, unsignedp
, 1);
10833 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10834 result
, 0, OPTAB_LIB_WIDEN
);
10838 /* If this failed, we have to do this with set/compare/jump/set code. */
10839 if (GET_CODE (target
) != REG
10840 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10841 target
= gen_reg_rtx (GET_MODE (target
));
10843 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10844 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10845 operand_mode
, NULL_RTX
, 0);
10846 if (GET_CODE (result
) == CONST_INT
)
10847 return (((result
== const0_rtx
&& ! invert
)
10848 || (result
!= const0_rtx
&& invert
))
10849 ? const0_rtx
: const1_rtx
);
10851 label
= gen_label_rtx ();
10852 if (bcc_gen_fctn
[(int) code
] == 0)
10855 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10856 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10857 emit_label (label
);
10862 /* Generate a tablejump instruction (used for switch statements). */
10864 #ifdef HAVE_tablejump
10866 /* INDEX is the value being switched on, with the lowest value
10867 in the table already subtracted.
10868 MODE is its expected mode (needed if INDEX is constant).
10869 RANGE is the length of the jump table.
10870 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10872 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10873 index value is out of range. */
10876 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10877 rtx index
, range
, table_label
, default_label
;
10878 enum machine_mode mode
;
10880 register rtx temp
, vector
;
10882 /* Do an unsigned comparison (in the proper mode) between the index
10883 expression and the value which represents the length of the range.
10884 Since we just finished subtracting the lower bound of the range
10885 from the index expression, this comparison allows us to simultaneously
10886 check that the original index expression value is both greater than
10887 or equal to the minimum value of the range and less than or equal to
10888 the maximum value of the range. */
10890 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
10891 emit_jump_insn (gen_bgtu (default_label
));
10893 /* If index is in range, it must fit in Pmode.
10894 Convert to Pmode so we can index with it. */
10896 index
= convert_to_mode (Pmode
, index
, 1);
10898 /* Don't let a MEM slip thru, because then INDEX that comes
10899 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10900 and break_out_memory_refs will go to work on it and mess it up. */
10901 #ifdef PIC_CASE_VECTOR_ADDRESS
10902 if (flag_pic
&& GET_CODE (index
) != REG
)
10903 index
= copy_to_mode_reg (Pmode
, index
);
10906 /* If flag_force_addr were to affect this address
10907 it could interfere with the tricky assumptions made
10908 about addresses that contain label-refs,
10909 which may be valid only very near the tablejump itself. */
10910 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10911 GET_MODE_SIZE, because this indicates how large insns are. The other
10912 uses should all be Pmode, because they are addresses. This code
10913 could fail if addresses and insns are not the same size. */
10914 index
= gen_rtx (PLUS
, Pmode
,
10915 gen_rtx (MULT
, Pmode
, index
,
10916 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10917 gen_rtx (LABEL_REF
, Pmode
, table_label
));
10918 #ifdef PIC_CASE_VECTOR_ADDRESS
10920 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10923 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10924 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10925 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
10926 RTX_UNCHANGING_P (vector
) = 1;
10927 convert_move (temp
, vector
, 0);
10929 emit_jump_insn (gen_tablejump (temp
, table_label
));
10931 #ifndef CASE_VECTOR_PC_RELATIVE
10932 /* If we are generating PIC code or if the table is PC-relative, the
10933 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10939 #endif /* HAVE_tablejump */
10942 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10943 to that value is on the top of the stack. The resulting type is TYPE, and
10944 the source declaration is DECL. */
10947 bc_load_memory (type
, decl
)
10950 enum bytecode_opcode opcode
;
10953 /* Bit fields are special. We only know about signed and
10954 unsigned ints, and enums. The latter are treated as
10955 signed integers. */
10957 if (DECL_BIT_FIELD (decl
))
10958 if (TREE_CODE (type
) == ENUMERAL_TYPE
10959 || TREE_CODE (type
) == INTEGER_TYPE
)
10960 opcode
= TREE_UNSIGNED (type
) ? zxloadBI
: sxloadBI
;
10964 /* See corresponding comment in bc_store_memory. */
10965 if (TYPE_MODE (type
) == BLKmode
10966 || TYPE_MODE (type
) == VOIDmode
)
10969 opcode
= mode_to_load_map
[(int) TYPE_MODE (type
)];
10971 if (opcode
== neverneverland
)
10974 bc_emit_bytecode (opcode
);
10976 #ifdef DEBUG_PRINT_CODE
10977 fputc ('\n', stderr
);
10982 /* Store the contents of the second stack slot to the address in the
10983 top stack slot. DECL is the declaration of the destination and is used
10984 to determine whether we're dealing with a bitfield. */
10987 bc_store_memory (type
, decl
)
10990 enum bytecode_opcode opcode
;
10993 if (DECL_BIT_FIELD (decl
))
10995 if (TREE_CODE (type
) == ENUMERAL_TYPE
10996 || TREE_CODE (type
) == INTEGER_TYPE
)
11002 if (TYPE_MODE (type
) == BLKmode
)
11004 /* Copy structure. This expands to a block copy instruction, storeBLK.
11005 In addition to the arguments expected by the other store instructions,
11006 it also expects a type size (SImode) on top of the stack, which is the
11007 structure size in size units (usually bytes). The two first arguments
11008 are already on the stack; so we just put the size on level 1. For some
11009 other languages, the size may be variable, this is why we don't encode
11010 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11012 bc_expand_expr (TYPE_SIZE (type
));
11016 opcode
= mode_to_store_map
[(int) TYPE_MODE (type
)];
11018 if (opcode
== neverneverland
)
11021 bc_emit_bytecode (opcode
);
11023 #ifdef DEBUG_PRINT_CODE
11024 fputc ('\n', stderr
);
11029 /* Allocate local stack space sufficient to hold a value of the given
11030 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11031 integral power of 2. A special case is locals of type VOID, which
11032 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11033 remapped into the corresponding attribute of SI. */
11036 bc_allocate_local (size
, alignment
)
11037 int size
, alignment
;
11040 int byte_alignment
;
11045 /* Normalize size and alignment */
11047 size
= UNITS_PER_WORD
;
11049 if (alignment
< BITS_PER_UNIT
)
11050 byte_alignment
= 1 << (INT_ALIGN
- 1);
11053 byte_alignment
= alignment
/ BITS_PER_UNIT
;
11055 if (local_vars_size
& (byte_alignment
- 1))
11056 local_vars_size
+= byte_alignment
- (local_vars_size
& (byte_alignment
- 1));
11058 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11059 local_vars_size
+= size
;
11065 /* Allocate variable-sized local array. Variable-sized arrays are
11066 actually pointers to the address in memory where they are stored. */
11069 bc_allocate_variable_array (size
)
11073 const int ptralign
= (1 << (PTR_ALIGN
- 1));
11075 /* Align pointer */
11076 if (local_vars_size
& ptralign
)
11077 local_vars_size
+= ptralign
- (local_vars_size
& ptralign
);
11079 /* Note down local space needed: pointer to block; also return
11082 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11083 local_vars_size
+= POINTER_SIZE
/ BITS_PER_UNIT
;
11088 /* Push the machine address for the given external variable offset. */
11091 bc_load_externaddr (externaddr
)
11094 bc_emit_bytecode (constP
);
11095 bc_emit_code_labelref (BYTECODE_LABEL (externaddr
),
11096 BYTECODE_BC_LABEL (externaddr
)->offset
);
11098 #ifdef DEBUG_PRINT_CODE
11099 fputc ('\n', stderr
);
11104 /* Like above, but expects an IDENTIFIER. */
11107 bc_load_externaddr_id (id
, offset
)
11111 if (!IDENTIFIER_POINTER (id
))
11114 bc_emit_bytecode (constP
);
11115 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id
)), offset
);
11117 #ifdef DEBUG_PRINT_CODE
11118 fputc ('\n', stderr
);
11123 /* Push the machine address for the given local variable offset. */
11126 bc_load_localaddr (localaddr
)
11129 bc_emit_instruction (localP
, (HOST_WIDE_INT
) BYTECODE_BC_LABEL (localaddr
)->offset
);
11133 /* Push the machine address for the given parameter offset.
11134 NOTE: offset is in bits. */
11137 bc_load_parmaddr (parmaddr
)
11140 bc_emit_instruction (argP
, ((HOST_WIDE_INT
) BYTECODE_BC_LABEL (parmaddr
)->offset
11145 /* Convert a[i] into *(a + i). */
11148 bc_canonicalize_array_ref (exp
)
11151 tree type
= TREE_TYPE (exp
);
11152 tree array_adr
= build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
),
11153 TREE_OPERAND (exp
, 0));
11154 tree index
= TREE_OPERAND (exp
, 1);
11157 /* Convert the integer argument to a type the same size as a pointer
11158 so the multiply won't overflow spuriously. */
11160 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
11161 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
11163 /* The array address isn't volatile even if the array is.
11164 (Of course this isn't terribly relevant since the bytecode
11165 translator treats nearly everything as volatile anyway.) */
11166 TREE_THIS_VOLATILE (array_adr
) = 0;
11168 return build1 (INDIRECT_REF
, type
,
11169 fold (build (PLUS_EXPR
,
11170 TYPE_POINTER_TO (type
),
11172 fold (build (MULT_EXPR
,
11173 TYPE_POINTER_TO (type
),
11175 size_in_bytes (type
))))));
11179 /* Load the address of the component referenced by the given
11180 COMPONENT_REF expression.
11182 Returns innermost lvalue. */
11185 bc_expand_component_address (exp
)
11189 enum machine_mode mode
;
11191 HOST_WIDE_INT SIval
;
11194 tem
= TREE_OPERAND (exp
, 1);
11195 mode
= DECL_MODE (tem
);
11198 /* Compute cumulative bit offset for nested component refs
11199 and array refs, and find the ultimate containing object. */
11201 for (tem
= exp
;; tem
= TREE_OPERAND (tem
, 0))
11203 if (TREE_CODE (tem
) == COMPONENT_REF
)
11204 bitpos
+= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem
, 1)));
11206 if (TREE_CODE (tem
) == ARRAY_REF
11207 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11208 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
))) == INTEGER_CST
)
11210 bitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1))
11211 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)))
11212 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11217 bc_expand_expr (tem
);
11220 /* For bitfields also push their offset and size */
11221 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
11222 bc_push_offset_and_size (bitpos
, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp
, 1)));
11224 if (SIval
= bitpos
/ BITS_PER_UNIT
)
11225 bc_emit_instruction (addconstPSI
, SIval
);
11227 return (TREE_OPERAND (exp
, 1));
11231 /* Emit code to push two SI constants */
11234 bc_push_offset_and_size (offset
, size
)
11235 HOST_WIDE_INT offset
, size
;
11237 bc_emit_instruction (constSI
, offset
);
11238 bc_emit_instruction (constSI
, size
);
11242 /* Emit byte code to push the address of the given lvalue expression to
11243 the stack. If it's a bit field, we also push offset and size info.
11245 Returns innermost component, which allows us to determine not only
11246 its type, but also whether it's a bitfield. */
11249 bc_expand_address (exp
)
11253 if (!exp
|| TREE_CODE (exp
) == ERROR_MARK
)
11257 switch (TREE_CODE (exp
))
11261 return (bc_expand_address (bc_canonicalize_array_ref (exp
)));
11263 case COMPONENT_REF
:
11265 return (bc_expand_component_address (exp
));
11269 bc_expand_expr (TREE_OPERAND (exp
, 0));
11271 /* For variable-sized types: retrieve pointer. Sometimes the
11272 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11273 also make sure we have an operand, just in case... */
11275 if (TREE_OPERAND (exp
, 0)
11276 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
11277 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))) != INTEGER_CST
)
11278 bc_emit_instruction (loadP
);
11280 /* If packed, also return offset and size */
11281 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 0)))
11283 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 0))),
11284 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp
, 0))));
11286 return (TREE_OPERAND (exp
, 0));
11288 case FUNCTION_DECL
:
11290 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11291 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
11296 bc_load_parmaddr (DECL_RTL (exp
));
11298 /* For variable-sized types: retrieve pointer */
11299 if (TYPE_SIZE (TREE_TYPE (exp
))
11300 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11301 bc_emit_instruction (loadP
);
11303 /* If packed, also return offset and size */
11304 if (DECL_BIT_FIELD (exp
))
11305 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11306 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11312 bc_emit_instruction (returnP
);
11318 if (BYTECODE_LABEL (DECL_RTL (exp
)))
11319 bc_load_externaddr (DECL_RTL (exp
));
11322 if (DECL_EXTERNAL (exp
))
11323 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11324 (BYTECODE_BC_LABEL (DECL_RTL (exp
)))->offset
);
11326 bc_load_localaddr (DECL_RTL (exp
));
11328 /* For variable-sized types: retrieve pointer */
11329 if (TYPE_SIZE (TREE_TYPE (exp
))
11330 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11331 bc_emit_instruction (loadP
);
11333 /* If packed, also return offset and size */
11334 if (DECL_BIT_FIELD (exp
))
11335 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11336 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11344 bc_emit_bytecode (constP
);
11345 r
= output_constant_def (exp
);
11346 bc_emit_code_labelref (BYTECODE_LABEL (r
), BYTECODE_BC_LABEL (r
)->offset
);
11348 #ifdef DEBUG_PRINT_CODE
11349 fputc ('\n', stderr
);
11360 /* Most lvalues don't have components. */
11365 /* Emit a type code to be used by the runtime support in handling
11366 parameter passing. The type code consists of the machine mode
11367 plus the minimal alignment shifted left 8 bits. */
11370 bc_runtime_type_code (type
)
11375 switch (TREE_CODE (type
))
11381 case ENUMERAL_TYPE
:
11385 val
= (int) TYPE_MODE (type
) | TYPE_ALIGN (type
) << 8;
11397 return build_int_2 (val
, 0);
11401 /* Generate constructor label */
11404 bc_gen_constr_label ()
11406 static int label_counter
;
11407 static char label
[20];
11409 sprintf (label
, "*LR%d", label_counter
++);
11411 return (obstack_copy0 (&permanent_obstack
, label
, strlen (label
)));
11415 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11416 expand the constructor data as static data, and push a pointer to it.
11417 The pointer is put in the pointer table and is retrieved by a constP
11418 bytecode instruction. We then loop and store each constructor member in
11419 the corresponding component. Finally, we return the original pointer on
11423 bc_expand_constructor (constr
)
11427 HOST_WIDE_INT ptroffs
;
11431 /* Literal constructors are handled as constants, whereas
11432 non-literals are evaluated and stored element by element
11433 into the data segment. */
11435 /* Allocate space in proper segment and push pointer to space on stack.
11438 l
= bc_gen_constr_label ();
11440 if (TREE_CONSTANT (constr
))
11444 bc_emit_const_labeldef (l
);
11445 bc_output_constructor (constr
, int_size_in_bytes (TREE_TYPE (constr
)));
11451 bc_emit_data_labeldef (l
);
11452 bc_output_data_constructor (constr
);
11456 /* Add reference to pointer table and recall pointer to stack;
11457 this code is common for both types of constructors: literals
11458 and non-literals. */
11460 ptroffs
= bc_define_pointer (l
);
11461 bc_emit_instruction (constP
, ptroffs
);
11463 /* This is all that has to be done if it's a literal. */
11464 if (TREE_CONSTANT (constr
))
11468 /* At this point, we have the pointer to the structure on top of the stack.
11469 Generate sequences of store_memory calls for the constructor. */
11471 /* constructor type is structure */
11472 if (TREE_CODE (TREE_TYPE (constr
)) == RECORD_TYPE
)
11476 /* If the constructor has fewer fields than the structure,
11477 clear the whole structure first. */
11479 if (list_length (CONSTRUCTOR_ELTS (constr
))
11480 != list_length (TYPE_FIELDS (TREE_TYPE (constr
))))
11482 bc_emit_instruction (duplicate
);
11483 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11484 bc_emit_instruction (clearBLK
);
11487 /* Store each element of the constructor into the corresponding
11488 field of TARGET. */
11490 for (elt
= CONSTRUCTOR_ELTS (constr
); elt
; elt
= TREE_CHAIN (elt
))
11492 register tree field
= TREE_PURPOSE (elt
);
11493 register enum machine_mode mode
;
11498 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
)) /* * DECL_SIZE_UNIT (field) */;
11499 mode
= DECL_MODE (field
);
11500 unsignedp
= TREE_UNSIGNED (field
);
11502 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
11504 bc_store_field (elt
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11505 /* The alignment of TARGET is
11506 at least what its type requires. */
11508 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11509 int_size_in_bytes (TREE_TYPE (constr
)));
11514 /* Constructor type is array */
11515 if (TREE_CODE (TREE_TYPE (constr
)) == ARRAY_TYPE
)
11519 tree domain
= TYPE_DOMAIN (TREE_TYPE (constr
));
11520 int minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
11521 int maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
11522 tree elttype
= TREE_TYPE (TREE_TYPE (constr
));
11524 /* If the constructor has fewer fields than the structure,
11525 clear the whole structure first. */
11527 if (list_length (CONSTRUCTOR_ELTS (constr
)) < maxelt
- minelt
+ 1)
11529 bc_emit_instruction (duplicate
);
11530 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11531 bc_emit_instruction (clearBLK
);
11535 /* Store each element of the constructor into the corresponding
11536 element of TARGET, determined by counting the elements. */
11538 for (elt
= CONSTRUCTOR_ELTS (constr
), i
= 0;
11540 elt
= TREE_CHAIN (elt
), i
++)
11542 register enum machine_mode mode
;
11547 mode
= TYPE_MODE (elttype
);
11548 bitsize
= GET_MODE_BITSIZE (mode
);
11549 unsignedp
= TREE_UNSIGNED (elttype
);
11551 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
))
11552 /* * TYPE_SIZE_UNIT (elttype) */ );
11554 bc_store_field (elt
, bitsize
, bitpos
, mode
,
11555 TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11556 /* The alignment of TARGET is
11557 at least what its type requires. */
11559 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11560 int_size_in_bytes (TREE_TYPE (constr
)));
11567 /* Store the value of EXP (an expression tree) into member FIELD of
11568 structure at address on stack, which has type TYPE, mode MODE and
11569 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11572 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11573 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11576 bc_store_field (field
, bitsize
, bitpos
, mode
, exp
, type
,
11577 value_mode
, unsignedp
, align
, total_size
)
11578 int bitsize
, bitpos
;
11579 enum machine_mode mode
;
11580 tree field
, exp
, type
;
11581 enum machine_mode value_mode
;
11587 /* Expand expression and copy pointer */
11588 bc_expand_expr (exp
);
11589 bc_emit_instruction (over
);
11592 /* If the component is a bit field, we cannot use addressing to access
11593 it. Use bit-field techniques to store in it. */
11595 if (DECL_BIT_FIELD (field
))
11597 bc_store_bit_field (bitpos
, bitsize
, unsignedp
);
11601 /* Not bit field */
11603 HOST_WIDE_INT offset
= bitpos
/ BITS_PER_UNIT
;
11605 /* Advance pointer to the desired member */
11607 bc_emit_instruction (addconstPSI
, offset
);
11610 bc_store_memory (type
, field
);
11615 /* Store SI/SU in bitfield */
11618 bc_store_bit_field (offset
, size
, unsignedp
)
11619 int offset
, size
, unsignedp
;
11621 /* Push bitfield offset and size */
11622 bc_push_offset_and_size (offset
, size
);
11625 bc_emit_instruction (sstoreBI
);
11629 /* Load SI/SU from bitfield */
11632 bc_load_bit_field (offset
, size
, unsignedp
)
11633 int offset
, size
, unsignedp
;
11635 /* Push bitfield offset and size */
11636 bc_push_offset_and_size (offset
, size
);
11638 /* Load: sign-extend if signed, else zero-extend */
11639 bc_emit_instruction (unsignedp
? zxloadBI
: sxloadBI
);
11643 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11644 (adjust stack pointer upwards), negative means add that number of
11645 levels (adjust the stack pointer downwards). Only positive values
11646 normally make sense. */
11649 bc_adjust_stack (nlevels
)
11658 bc_emit_instruction (drop
);
11661 bc_emit_instruction (drop
);
11666 bc_emit_instruction (adjstackSI
, (HOST_WIDE_INT
) nlevels
);
11667 stack_depth
-= nlevels
;
11670 #if defined (VALIDATE_STACK_FOR_BC)
11671 VALIDATE_STACK_FOR_BC ();