1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
36 #include "insn-config.h"
39 #include "typeclass.h"
43 #include "bc-opcode.h"
44 #include "bc-typecd.h"
49 #define CEIL(x,y) (((x) + (y) - 1) / (y))
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
60 #define PUSH_ARGS_REVERSED /* If it's last to first */
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
69 #define STACK_PUSH_CODE PRE_INC
73 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
74 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* Nonzero to generate code for all the subroutines within an
90 expression before generating the upper levels of the expression.
91 Nowadays this is never zero. */
92 int do_preexpand_calls
= 1;
94 /* Number of units that we should eventually pop off the stack.
95 These are the arguments to function calls that have already returned. */
96 int pending_stack_adjust
;
98 /* Nonzero means stack pops must not be deferred, and deferred stack
99 pops must not be output. It is nonzero inside a function call,
100 inside a conditional expression, inside a statement expression,
101 and in other cases as well. */
102 int inhibit_defer_pop
;
104 /* When temporaries are created by TARGET_EXPRs, they are created at
105 this level of temp_slot_level, so that they can remain allocated
106 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
108 int target_temp_slot_level
;
110 /* Nonzero means __builtin_saveregs has already been done in this function.
111 The value is the pseudoreg containing the value __builtin_saveregs
113 static rtx saveregs_value
;
115 /* Similarly for __builtin_apply_args. */
116 static rtx apply_args_value
;
118 /* Don't check memory usage, since code is being emitted to check a memory
119 usage. Used when flag_check_memory_usage is true, to avoid infinite
121 static int in_check_memory_usage
;
123 /* This structure is used by move_by_pieces to describe the move to
125 struct move_by_pieces
135 int explicit_inc_from
;
142 /* This structure is used by clear_by_pieces to describe the clear to
145 struct clear_by_pieces
157 /* Used to generate bytecodes: keep track of size of local variables,
158 as well as depth of arithmetic stack. (Notice that variables are
159 stored on the machine's stack, not the arithmetic stack.) */
161 static rtx get_push_address
PROTO ((int));
162 extern int local_vars_size
;
163 extern int stack_depth
;
164 extern int max_stack_depth
;
165 extern struct obstack permanent_obstack
;
166 extern rtx arg_pointer_save_area
;
168 static rtx enqueue_insn
PROTO((rtx
, rtx
));
169 static int queued_subexp_p
PROTO((rtx
));
170 static void init_queue
PROTO((void));
171 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
172 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
173 static void move_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
174 struct move_by_pieces
*));
175 static void clear_by_pieces
PROTO((rtx
, int, int));
176 static void clear_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
177 struct clear_by_pieces
*));
178 static int is_zeros_p
PROTO((tree
));
179 static int mostly_zeros_p
PROTO((tree
));
180 static void store_constructor
PROTO((tree
, rtx
, int));
181 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
182 enum machine_mode
, int, int, int));
183 static tree save_noncopied_parts
PROTO((tree
, tree
));
184 static tree init_noncopied_parts
PROTO((tree
, tree
));
185 static int safe_from_p
PROTO((rtx
, tree
));
186 static int fixed_type_p
PROTO((tree
));
187 static rtx var_rtx
PROTO((tree
));
188 static int get_pointer_alignment
PROTO((tree
, unsigned));
189 static tree string_constant
PROTO((tree
, tree
*));
190 static tree c_strlen
PROTO((tree
));
191 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
192 enum machine_mode
, int));
193 static int apply_args_size
PROTO((void));
194 static int apply_result_size
PROTO((void));
195 static rtx result_vector
PROTO((int, rtx
));
196 static rtx expand_builtin_apply_args
PROTO((void));
197 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
198 static void expand_builtin_return
PROTO((rtx
));
199 static rtx expand_increment
PROTO((tree
, int, int));
200 void bc_expand_increment
PROTO((struct increment_operator
*, tree
));
201 rtx bc_allocate_local
PROTO((int, int));
202 void bc_store_memory
PROTO((tree
, tree
));
203 tree bc_expand_component_address
PROTO((tree
));
204 tree bc_expand_address
PROTO((tree
));
205 void bc_expand_constructor
PROTO((tree
));
206 void bc_adjust_stack
PROTO((int));
207 tree bc_canonicalize_array_ref
PROTO((tree
));
208 void bc_load_memory
PROTO((tree
, tree
));
209 void bc_load_externaddr
PROTO((rtx
));
210 void bc_load_externaddr_id
PROTO((tree
, int));
211 void bc_load_localaddr
PROTO((rtx
));
212 void bc_load_parmaddr
PROTO((rtx
));
213 static void preexpand_calls
PROTO((tree
));
214 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
215 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
216 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
217 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
218 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
219 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
220 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
221 extern tree truthvalue_conversion
PROTO((tree
));
223 /* Record for each mode whether we can move a register directly to or
224 from an object of that mode in memory. If we can't, we won't try
225 to use that mode directly when accessing a field of that mode. */
227 static char direct_load
[NUM_MACHINE_MODES
];
228 static char direct_store
[NUM_MACHINE_MODES
];
230 /* MOVE_RATIO is the number of move instructions that is better than
234 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
237 /* A value of around 6 would minimize code size; infinity would minimize
239 #define MOVE_RATIO 15
243 /* This array records the insn_code of insns to perform block moves. */
244 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
246 /* This array records the insn_code of insns to perform block clears. */
247 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
249 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
251 #ifndef SLOW_UNALIGNED_ACCESS
252 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
255 /* Register mappings for target machines without register windows. */
256 #ifndef INCOMING_REGNO
257 #define INCOMING_REGNO(OUT) (OUT)
259 #ifndef OUTGOING_REGNO
260 #define OUTGOING_REGNO(IN) (IN)
263 /* Maps used to convert modes to const, load, and store bytecodes. */
264 enum bytecode_opcode mode_to_const_map
[MAX_MACHINE_MODE
];
265 enum bytecode_opcode mode_to_load_map
[MAX_MACHINE_MODE
];
266 enum bytecode_opcode mode_to_store_map
[MAX_MACHINE_MODE
];
268 /* Initialize maps used to convert modes to const, load, and store
272 bc_init_mode_to_opcode_maps ()
276 for (mode
= 0; mode
< (int) MAX_MACHINE_MODE
; mode
++)
277 mode_to_const_map
[mode
]
278 = mode_to_load_map
[mode
]
279 = mode_to_store_map
[mode
] = neverneverland
;
281 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
282 mode_to_const_map[(int) SYM] = CONST; \
283 mode_to_load_map[(int) SYM] = LOAD; \
284 mode_to_store_map[(int) SYM] = STORE;
286 #include "modemap.def"
290 /* This is run once per compilation to set up which modes can be used
291 directly in memory and to initialize the block move optab. */
297 enum machine_mode mode
;
298 /* Try indexing by frame ptr and try by stack ptr.
299 It is known that on the Convex the stack ptr isn't a valid index.
300 With luck, one or the other is valid on any machine. */
301 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
302 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
305 insn
= emit_insn (gen_rtx (SET
, 0, NULL_RTX
, NULL_RTX
));
306 pat
= PATTERN (insn
);
308 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
309 mode
= (enum machine_mode
) ((int) mode
+ 1))
315 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
316 PUT_MODE (mem
, mode
);
317 PUT_MODE (mem1
, mode
);
319 /* See if there is some register that can be used in this mode and
320 directly loaded or stored from memory. */
322 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
323 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
324 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
327 if (! HARD_REGNO_MODE_OK (regno
, mode
))
330 reg
= gen_rtx (REG
, mode
, regno
);
333 SET_DEST (pat
) = reg
;
334 if (recog (pat
, insn
, &num_clobbers
) >= 0)
335 direct_load
[(int) mode
] = 1;
337 SET_SRC (pat
) = mem1
;
338 SET_DEST (pat
) = reg
;
339 if (recog (pat
, insn
, &num_clobbers
) >= 0)
340 direct_load
[(int) mode
] = 1;
343 SET_DEST (pat
) = mem
;
344 if (recog (pat
, insn
, &num_clobbers
) >= 0)
345 direct_store
[(int) mode
] = 1;
348 SET_DEST (pat
) = mem1
;
349 if (recog (pat
, insn
, &num_clobbers
) >= 0)
350 direct_store
[(int) mode
] = 1;
357 /* This is run at the start of compiling a function. */
364 pending_stack_adjust
= 0;
365 inhibit_defer_pop
= 0;
367 apply_args_value
= 0;
371 /* Save all variables describing the current status into the structure *P.
372 This is used before starting a nested function. */
378 /* Instead of saving the postincrement queue, empty it. */
381 p
->pending_stack_adjust
= pending_stack_adjust
;
382 p
->inhibit_defer_pop
= inhibit_defer_pop
;
383 p
->saveregs_value
= saveregs_value
;
384 p
->apply_args_value
= apply_args_value
;
385 p
->forced_labels
= forced_labels
;
387 pending_stack_adjust
= 0;
388 inhibit_defer_pop
= 0;
390 apply_args_value
= 0;
394 /* Restore all variables describing the current status from the structure *P.
395 This is used after a nested function. */
398 restore_expr_status (p
)
401 pending_stack_adjust
= p
->pending_stack_adjust
;
402 inhibit_defer_pop
= p
->inhibit_defer_pop
;
403 saveregs_value
= p
->saveregs_value
;
404 apply_args_value
= p
->apply_args_value
;
405 forced_labels
= p
->forced_labels
;
408 /* Manage the queue of increment instructions to be output
409 for POSTINCREMENT_EXPR expressions, etc. */
411 static rtx pending_chain
;
413 /* Queue up to increment (or change) VAR later. BODY says how:
414 BODY should be the same thing you would pass to emit_insn
415 to increment right away. It will go to emit_insn later on.
417 The value is a QUEUED expression to be used in place of VAR
418 where you want to guarantee the pre-incrementation value of VAR. */
421 enqueue_insn (var
, body
)
424 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
425 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
426 return pending_chain
;
429 /* Use protect_from_queue to convert a QUEUED expression
430 into something that you can put immediately into an instruction.
431 If the queued incrementation has not happened yet,
432 protect_from_queue returns the variable itself.
433 If the incrementation has happened, protect_from_queue returns a temp
434 that contains a copy of the old value of the variable.
436 Any time an rtx which might possibly be a QUEUED is to be put
437 into an instruction, it must be passed through protect_from_queue first.
438 QUEUED expressions are not meaningful in instructions.
440 Do not pass a value through protect_from_queue and then hold
441 on to it for a while before putting it in an instruction!
442 If the queue is flushed in between, incorrect code will result. */
445 protect_from_queue (x
, modify
)
449 register RTX_CODE code
= GET_CODE (x
);
451 #if 0 /* A QUEUED can hang around after the queue is forced out. */
452 /* Shortcut for most common case. */
453 if (pending_chain
== 0)
459 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
460 use of autoincrement. Make a copy of the contents of the memory
461 location rather than a copy of the address, but not if the value is
462 of mode BLKmode. Don't modify X in place since it might be
464 if (code
== MEM
&& GET_MODE (x
) != BLKmode
465 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
467 register rtx y
= XEXP (x
, 0);
468 register rtx
new = gen_rtx (MEM
, GET_MODE (x
), QUEUED_VAR (y
));
470 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
471 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
472 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
476 register rtx temp
= gen_reg_rtx (GET_MODE (new));
477 emit_insn_before (gen_move_insn (temp
, new),
483 /* Otherwise, recursively protect the subexpressions of all
484 the kinds of rtx's that can contain a QUEUED. */
487 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
488 if (tem
!= XEXP (x
, 0))
494 else if (code
== PLUS
|| code
== MULT
)
496 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
497 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
498 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
507 /* If the increment has not happened, use the variable itself. */
508 if (QUEUED_INSN (x
) == 0)
509 return QUEUED_VAR (x
);
510 /* If the increment has happened and a pre-increment copy exists,
512 if (QUEUED_COPY (x
) != 0)
513 return QUEUED_COPY (x
);
514 /* The increment has happened but we haven't set up a pre-increment copy.
515 Set one up now, and use it. */
516 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
517 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
519 return QUEUED_COPY (x
);
522 /* Return nonzero if X contains a QUEUED expression:
523 if it contains anything that will be altered by a queued increment.
524 We handle only combinations of MEM, PLUS, MINUS and MULT operators
525 since memory addresses generally contain only those. */
531 register enum rtx_code code
= GET_CODE (x
);
537 return queued_subexp_p (XEXP (x
, 0));
541 return (queued_subexp_p (XEXP (x
, 0))
542 || queued_subexp_p (XEXP (x
, 1)));
548 /* Perform all the pending incrementations. */
554 while (p
= pending_chain
)
556 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
557 pending_chain
= QUEUED_NEXT (p
);
568 /* Copy data from FROM to TO, where the machine modes are not the same.
569 Both modes may be integer, or both may be floating.
570 UNSIGNEDP should be nonzero if FROM is an unsigned type.
571 This causes zero-extension instead of sign-extension. */
574 convert_move (to
, from
, unsignedp
)
575 register rtx to
, from
;
578 enum machine_mode to_mode
= GET_MODE (to
);
579 enum machine_mode from_mode
= GET_MODE (from
);
580 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
581 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
585 /* rtx code for making an equivalent value. */
586 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
588 to
= protect_from_queue (to
, 1);
589 from
= protect_from_queue (from
, 0);
591 if (to_real
!= from_real
)
594 /* If FROM is a SUBREG that indicates that we have already done at least
595 the required extension, strip it. We don't handle such SUBREGs as
598 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
599 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
600 >= GET_MODE_SIZE (to_mode
))
601 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
602 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
604 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
607 if (to_mode
== from_mode
608 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
610 emit_move_insn (to
, from
);
618 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
620 /* Try converting directly if the insn is supported. */
621 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
624 emit_unop_insn (code
, to
, from
, UNKNOWN
);
629 #ifdef HAVE_trunchfqf2
630 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
632 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
636 #ifdef HAVE_trunctqfqf2
637 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
639 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
643 #ifdef HAVE_truncsfqf2
644 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
646 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
650 #ifdef HAVE_truncdfqf2
651 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
653 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
657 #ifdef HAVE_truncxfqf2
658 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
660 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
664 #ifdef HAVE_trunctfqf2
665 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
667 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
672 #ifdef HAVE_trunctqfhf2
673 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
675 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
679 #ifdef HAVE_truncsfhf2
680 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
682 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
686 #ifdef HAVE_truncdfhf2
687 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
689 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
693 #ifdef HAVE_truncxfhf2
694 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
696 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
700 #ifdef HAVE_trunctfhf2
701 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
703 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
708 #ifdef HAVE_truncsftqf2
709 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
711 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
715 #ifdef HAVE_truncdftqf2
716 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
718 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
722 #ifdef HAVE_truncxftqf2
723 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
725 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
729 #ifdef HAVE_trunctftqf2
730 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
732 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
737 #ifdef HAVE_truncdfsf2
738 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
740 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
744 #ifdef HAVE_truncxfsf2
745 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
747 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
751 #ifdef HAVE_trunctfsf2
752 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
754 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
758 #ifdef HAVE_truncxfdf2
759 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
761 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
765 #ifdef HAVE_trunctfdf2
766 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
768 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
780 libcall
= extendsfdf2_libfunc
;
784 libcall
= extendsfxf2_libfunc
;
788 libcall
= extendsftf2_libfunc
;
800 libcall
= truncdfsf2_libfunc
;
804 libcall
= extenddfxf2_libfunc
;
808 libcall
= extenddftf2_libfunc
;
820 libcall
= truncxfsf2_libfunc
;
824 libcall
= truncxfdf2_libfunc
;
836 libcall
= trunctfsf2_libfunc
;
840 libcall
= trunctfdf2_libfunc
;
852 if (libcall
== (rtx
) 0)
853 /* This conversion is not implemented yet. */
856 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
858 emit_move_insn (to
, value
);
862 /* Now both modes are integers. */
864 /* Handle expanding beyond a word. */
865 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
866 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
873 enum machine_mode lowpart_mode
;
874 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
876 /* Try converting directly if the insn is supported. */
877 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
880 /* If FROM is a SUBREG, put it into a register. Do this
881 so that we always generate the same set of insns for
882 better cse'ing; if an intermediate assignment occurred,
883 we won't be doing the operation directly on the SUBREG. */
884 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
885 from
= force_reg (from_mode
, from
);
886 emit_unop_insn (code
, to
, from
, equiv_code
);
889 /* Next, try converting via full word. */
890 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
891 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
892 != CODE_FOR_nothing
))
894 if (GET_CODE (to
) == REG
)
895 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, to
));
896 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
897 emit_unop_insn (code
, to
,
898 gen_lowpart (word_mode
, to
), equiv_code
);
902 /* No special multiword conversion insn; do it by hand. */
905 /* Since we will turn this into a no conflict block, we must ensure
906 that the source does not overlap the target. */
908 if (reg_overlap_mentioned_p (to
, from
))
909 from
= force_reg (from_mode
, from
);
911 /* Get a copy of FROM widened to a word, if necessary. */
912 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
913 lowpart_mode
= word_mode
;
915 lowpart_mode
= from_mode
;
917 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
919 lowpart
= gen_lowpart (lowpart_mode
, to
);
920 emit_move_insn (lowpart
, lowfrom
);
922 /* Compute the value to put in each remaining word. */
924 fill_value
= const0_rtx
;
929 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
930 && STORE_FLAG_VALUE
== -1)
932 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
934 fill_value
= gen_reg_rtx (word_mode
);
935 emit_insn (gen_slt (fill_value
));
941 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
942 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
944 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
948 /* Fill the remaining words. */
949 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
951 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
952 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
957 if (fill_value
!= subword
)
958 emit_move_insn (subword
, fill_value
);
961 insns
= get_insns ();
964 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
965 gen_rtx (equiv_code
, to_mode
, copy_rtx (from
)));
969 /* Truncating multi-word to a word or less. */
970 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
971 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
973 if (!((GET_CODE (from
) == MEM
974 && ! MEM_VOLATILE_P (from
)
975 && direct_load
[(int) to_mode
]
976 && ! mode_dependent_address_p (XEXP (from
, 0)))
977 || GET_CODE (from
) == REG
978 || GET_CODE (from
) == SUBREG
))
979 from
= force_reg (from_mode
, from
);
980 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
984 /* Handle pointer conversion */ /* SPEE 900220 */
985 if (to_mode
== PSImode
)
987 if (from_mode
!= SImode
)
988 from
= convert_to_mode (SImode
, from
, unsignedp
);
990 #ifdef HAVE_truncsipsi2
991 if (HAVE_truncsipsi2
)
993 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
996 #endif /* HAVE_truncsipsi2 */
1000 if (from_mode
== PSImode
)
1002 if (to_mode
!= SImode
)
1004 from
= convert_to_mode (SImode
, from
, unsignedp
);
1009 #ifdef HAVE_extendpsisi2
1010 if (HAVE_extendpsisi2
)
1012 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1015 #endif /* HAVE_extendpsisi2 */
1020 if (to_mode
== PDImode
)
1022 if (from_mode
!= DImode
)
1023 from
= convert_to_mode (DImode
, from
, unsignedp
);
1025 #ifdef HAVE_truncdipdi2
1026 if (HAVE_truncdipdi2
)
1028 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1031 #endif /* HAVE_truncdipdi2 */
1035 if (from_mode
== PDImode
)
1037 if (to_mode
!= DImode
)
1039 from
= convert_to_mode (DImode
, from
, unsignedp
);
1044 #ifdef HAVE_extendpdidi2
1045 if (HAVE_extendpdidi2
)
1047 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1050 #endif /* HAVE_extendpdidi2 */
1055 /* Now follow all the conversions between integers
1056 no more than a word long. */
1058 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1059 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1060 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1061 GET_MODE_BITSIZE (from_mode
)))
1063 if (!((GET_CODE (from
) == MEM
1064 && ! MEM_VOLATILE_P (from
)
1065 && direct_load
[(int) to_mode
]
1066 && ! mode_dependent_address_p (XEXP (from
, 0)))
1067 || GET_CODE (from
) == REG
1068 || GET_CODE (from
) == SUBREG
))
1069 from
= force_reg (from_mode
, from
);
1070 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1071 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1072 from
= copy_to_reg (from
);
1073 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1077 /* Handle extension. */
1078 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1080 /* Convert directly if that works. */
1081 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1082 != CODE_FOR_nothing
)
1084 emit_unop_insn (code
, to
, from
, equiv_code
);
1089 enum machine_mode intermediate
;
1091 /* Search for a mode to convert via. */
1092 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1093 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1094 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1095 != CODE_FOR_nothing
)
1096 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1097 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1098 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1099 != CODE_FOR_nothing
))
1101 convert_move (to
, convert_to_mode (intermediate
, from
,
1102 unsignedp
), unsignedp
);
1106 /* No suitable intermediate mode. */
1111 /* Support special truncate insns for certain modes. */
1113 if (from_mode
== DImode
&& to_mode
== SImode
)
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2
)
1118 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1122 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1126 if (from_mode
== DImode
&& to_mode
== HImode
)
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2
)
1131 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1135 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1139 if (from_mode
== DImode
&& to_mode
== QImode
)
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2
)
1144 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1148 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1152 if (from_mode
== SImode
&& to_mode
== HImode
)
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2
)
1157 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1161 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1165 if (from_mode
== SImode
&& to_mode
== QImode
)
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2
)
1170 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1174 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1178 if (from_mode
== HImode
&& to_mode
== QImode
)
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2
)
1183 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1187 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1191 if (from_mode
== TImode
&& to_mode
== DImode
)
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2
)
1196 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1200 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1204 if (from_mode
== TImode
&& to_mode
== SImode
)
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2
)
1209 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1213 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1217 if (from_mode
== TImode
&& to_mode
== HImode
)
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2
)
1222 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1226 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1230 if (from_mode
== TImode
&& to_mode
== QImode
)
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2
)
1235 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1239 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1248 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1249 emit_move_insn (to
, temp
);
1253 /* Mode combination is not recognized. */
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1268 convert_to_mode (mode
, x
, unsignedp
)
1269 enum machine_mode mode
;
1273 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_modes (mode
, oldmode
, x
, unsignedp
)
1291 enum machine_mode mode
, oldmode
;
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1300 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1303 x
= gen_lowpart (mode
, x
);
1305 if (GET_MODE (x
) != VOIDmode
)
1306 oldmode
= GET_MODE (x
);
1308 if (mode
== oldmode
)
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1317 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1318 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1321 HOST_WIDE_INT val
= INTVAL (x
);
1323 if (oldmode
!= VOIDmode
1324 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1326 int width
= GET_MODE_BITSIZE (oldmode
);
1328 /* We need to zero extend VAL. */
1329 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1332 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1340 if ((GET_CODE (x
) == CONST_INT
1341 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1342 || (GET_MODE_CLASS (mode
) == MODE_INT
1343 && GET_MODE_CLASS (oldmode
) == MODE_INT
1344 && (GET_CODE (x
) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1346 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1347 && direct_load
[(int) mode
])
1348 || (GET_CODE (x
) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1350 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1356 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1358 HOST_WIDE_INT val
= INTVAL (x
);
1359 int width
= GET_MODE_BITSIZE (oldmode
);
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1365 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1366 val
|= (HOST_WIDE_INT
) (-1) << width
;
1368 return GEN_INT (val
);
1371 return gen_lowpart (mode
, x
);
1374 temp
= gen_reg_rtx (mode
);
1375 convert_move (temp
, x
, unsignedp
);
1379 /* Generate several move instructions to copy LEN bytes
1380 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1381 The caller must pass FROM and TO
1382 through protect_from_queue before calling.
1383 ALIGN (in bytes) is maximum alignment we can assume. */
1386 move_by_pieces (to
, from
, len
, align
)
1390 struct move_by_pieces data
;
1391 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1392 int max_size
= MOVE_MAX
+ 1;
1395 data
.to_addr
= to_addr
;
1396 data
.from_addr
= from_addr
;
1400 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1401 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1403 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1404 || GET_CODE (from_addr
) == POST_INC
1405 || GET_CODE (from_addr
) == POST_DEC
);
1407 data
.explicit_inc_from
= 0;
1408 data
.explicit_inc_to
= 0;
1410 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1411 if (data
.reverse
) data
.offset
= len
;
1414 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1415 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1417 /* If copying requires more than two move insns,
1418 copy addresses to registers (to make displacements shorter)
1419 and use post-increment if available. */
1420 if (!(data
.autinc_from
&& data
.autinc_to
)
1421 && move_by_pieces_ninsns (len
, align
) > 2)
1423 #ifdef HAVE_PRE_DECREMENT
1424 if (data
.reverse
&& ! data
.autinc_from
)
1426 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1427 data
.autinc_from
= 1;
1428 data
.explicit_inc_from
= -1;
1431 #ifdef HAVE_POST_INCREMENT
1432 if (! data
.autinc_from
)
1434 data
.from_addr
= copy_addr_to_reg (from_addr
);
1435 data
.autinc_from
= 1;
1436 data
.explicit_inc_from
= 1;
1439 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1440 data
.from_addr
= copy_addr_to_reg (from_addr
);
1441 #ifdef HAVE_PRE_DECREMENT
1442 if (data
.reverse
&& ! data
.autinc_to
)
1444 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1446 data
.explicit_inc_to
= -1;
1449 #ifdef HAVE_POST_INCREMENT
1450 if (! data
.reverse
&& ! data
.autinc_to
)
1452 data
.to_addr
= copy_addr_to_reg (to_addr
);
1454 data
.explicit_inc_to
= 1;
1457 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1458 data
.to_addr
= copy_addr_to_reg (to_addr
);
1461 if (! SLOW_UNALIGNED_ACCESS
1462 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1465 /* First move what we can in the largest integer mode, then go to
1466 successively smaller modes. */
1468 while (max_size
> 1)
1470 enum machine_mode mode
= VOIDmode
, tmode
;
1471 enum insn_code icode
;
1473 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1474 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1475 if (GET_MODE_SIZE (tmode
) < max_size
)
1478 if (mode
== VOIDmode
)
1481 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1482 if (icode
!= CODE_FOR_nothing
1483 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1484 GET_MODE_SIZE (mode
)))
1485 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1487 max_size
= GET_MODE_SIZE (mode
);
1490 /* The code above should have handled everything. */
1495 /* Return number of insns required to move L bytes by pieces.
1496 ALIGN (in bytes) is maximum alignment we can assume. */
1499 move_by_pieces_ninsns (l
, align
)
1503 register int n_insns
= 0;
1504 int max_size
= MOVE_MAX
+ 1;
1506 if (! SLOW_UNALIGNED_ACCESS
1507 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1510 while (max_size
> 1)
1512 enum machine_mode mode
= VOIDmode
, tmode
;
1513 enum insn_code icode
;
1515 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1516 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1517 if (GET_MODE_SIZE (tmode
) < max_size
)
1520 if (mode
== VOIDmode
)
1523 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1524 if (icode
!= CODE_FOR_nothing
1525 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1526 GET_MODE_SIZE (mode
)))
1527 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1529 max_size
= GET_MODE_SIZE (mode
);
1535 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1536 with move instructions for mode MODE. GENFUN is the gen_... function
1537 to make a move insn for that mode. DATA has all the other info. */
1540 move_by_pieces_1 (genfun
, mode
, data
)
1541 rtx (*genfun
) PROTO ((rtx
, ...));
1542 enum machine_mode mode
;
1543 struct move_by_pieces
*data
;
1545 register int size
= GET_MODE_SIZE (mode
);
1546 register rtx to1
, from1
;
1548 while (data
->len
>= size
)
1550 if (data
->reverse
) data
->offset
-= size
;
1552 to1
= (data
->autinc_to
1553 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1554 : copy_rtx (change_address (data
->to
, mode
,
1555 plus_constant (data
->to_addr
,
1557 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1560 = (data
->autinc_from
1561 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1562 : copy_rtx (change_address (data
->from
, mode
,
1563 plus_constant (data
->from_addr
,
1565 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1567 #ifdef HAVE_PRE_DECREMENT
1568 if (data
->explicit_inc_to
< 0)
1569 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1570 if (data
->explicit_inc_from
< 0)
1571 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1574 emit_insn ((*genfun
) (to1
, from1
));
1575 #ifdef HAVE_POST_INCREMENT
1576 if (data
->explicit_inc_to
> 0)
1577 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1578 if (data
->explicit_inc_from
> 0)
1579 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1582 if (! data
->reverse
) data
->offset
+= size
;
1588 /* Emit code to move a block Y to a block X.
1589 This may be done with string-move instructions,
1590 with multiple scalar move instructions, or with a library call.
1592 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1594 SIZE is an rtx that says how long they are.
1595 ALIGN is the maximum alignment we can assume they have,
1598 Return the address of the new block, if memcpy is called and returns it,
1602 emit_block_move (x
, y
, size
, align
)
1609 if (GET_MODE (x
) != BLKmode
)
1612 if (GET_MODE (y
) != BLKmode
)
1615 x
= protect_from_queue (x
, 1);
1616 y
= protect_from_queue (y
, 0);
1617 size
= protect_from_queue (size
, 0);
1619 if (GET_CODE (x
) != MEM
)
1621 if (GET_CODE (y
) != MEM
)
1626 if (GET_CODE (size
) == CONST_INT
1627 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1628 move_by_pieces (x
, y
, INTVAL (size
), align
);
1631 /* Try the most limited insn first, because there's no point
1632 including more than one in the machine description unless
1633 the more limited one has some advantage. */
1635 rtx opalign
= GEN_INT (align
);
1636 enum machine_mode mode
;
1638 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1639 mode
= GET_MODE_WIDER_MODE (mode
))
1641 enum insn_code code
= movstr_optab
[(int) mode
];
1643 if (code
!= CODE_FOR_nothing
1644 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1645 here because if SIZE is less than the mode mask, as it is
1646 returned by the macro, it will definitely be less than the
1647 actual mode mask. */
1648 && ((GET_CODE (size
) == CONST_INT
1649 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1650 <= GET_MODE_MASK (mode
)))
1651 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1652 && (insn_operand_predicate
[(int) code
][0] == 0
1653 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1654 && (insn_operand_predicate
[(int) code
][1] == 0
1655 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1656 && (insn_operand_predicate
[(int) code
][3] == 0
1657 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1661 rtx last
= get_last_insn ();
1664 op2
= convert_to_mode (mode
, size
, 1);
1665 if (insn_operand_predicate
[(int) code
][2] != 0
1666 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1667 op2
= copy_to_mode_reg (mode
, op2
);
1669 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1676 delete_insns_since (last
);
1680 #ifdef TARGET_MEM_FUNCTIONS
1682 = emit_library_call_value (memcpy_libfunc
, NULL_RTX
, 0,
1683 ptr_mode
, 3, XEXP (x
, 0), Pmode
,
1685 convert_to_mode (TYPE_MODE (sizetype
), size
,
1686 TREE_UNSIGNED (sizetype
)),
1687 TYPE_MODE (sizetype
));
1689 emit_library_call (bcopy_libfunc
, 0,
1690 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1692 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1693 TREE_UNSIGNED (integer_type_node
)),
1694 TYPE_MODE (integer_type_node
));
1701 /* Copy all or part of a value X into registers starting at REGNO.
1702 The number of registers to be filled is NREGS. */
1705 move_block_to_reg (regno
, x
, nregs
, mode
)
1709 enum machine_mode mode
;
1717 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1718 x
= validize_mem (force_const_mem (mode
, x
));
1720 /* See if the machine can do this with a load multiple insn. */
1721 #ifdef HAVE_load_multiple
1722 if (HAVE_load_multiple
)
1724 last
= get_last_insn ();
1725 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1733 delete_insns_since (last
);
1737 for (i
= 0; i
< nregs
; i
++)
1738 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1739 operand_subword_force (x
, i
, mode
));
1742 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1743 The number of registers to be filled is NREGS. SIZE indicates the number
1744 of bytes in the object X. */
1748 move_block_from_reg (regno
, x
, nregs
, size
)
1756 enum machine_mode mode
;
1758 /* If SIZE is that of a mode no bigger than a word, just use that
1759 mode's store operation. */
1760 if (size
<= UNITS_PER_WORD
1761 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1763 emit_move_insn (change_address (x
, mode
, NULL
),
1764 gen_rtx (REG
, mode
, regno
));
1768 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1769 to the left before storing to memory. Note that the previous test
1770 doesn't handle all cases (e.g. SIZE == 3). */
1771 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1773 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1779 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1780 gen_rtx (REG
, word_mode
, regno
),
1781 build_int_2 ((UNITS_PER_WORD
- size
)
1782 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1783 emit_move_insn (tem
, shift
);
1787 /* See if the machine can do this with a store multiple insn. */
1788 #ifdef HAVE_store_multiple
1789 if (HAVE_store_multiple
)
1791 last
= get_last_insn ();
1792 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1800 delete_insns_since (last
);
1804 for (i
= 0; i
< nregs
; i
++)
1806 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1811 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1815 /* Emit code to move a block Y to a block X, where X is non-consecutive
1816 registers represented by a PARALLEL. */
1819 emit_group_load (x
, y
)
1822 rtx target_reg
, source
;
1825 if (GET_CODE (x
) != PARALLEL
)
1828 /* Check for a NULL entry, used to indicate that the parameter goes
1829 both on the stack and in registers. */
1830 if (XEXP (XVECEXP (x
, 0, 0), 0))
1835 for (; i
< XVECLEN (x
, 0); i
++)
1837 rtx element
= XVECEXP (x
, 0, i
);
1839 target_reg
= XEXP (element
, 0);
1841 if (GET_CODE (y
) == MEM
)
1842 source
= change_address (y
, GET_MODE (target_reg
),
1843 plus_constant (XEXP (y
, 0),
1844 INTVAL (XEXP (element
, 1))));
1845 else if (XEXP (element
, 1) == const0_rtx
)
1847 if (GET_MODE (target_reg
) == GET_MODE (y
))
1849 /* Allow for the target_reg to be smaller than the input register
1850 to allow for AIX with 4 DF arguments after a single SI arg. The
1851 last DF argument will only load 1 word into the integer registers,
1852 but load a DF value into the float registers. */
1853 else if ((GET_MODE_SIZE (GET_MODE (target_reg
))
1854 <= GET_MODE_SIZE (GET_MODE (y
)))
1855 && GET_MODE (target_reg
) == word_mode
)
1856 /* This might be a const_double, so we can't just use SUBREG. */
1857 source
= operand_subword (y
, 0, 0, VOIDmode
);
1858 else if (GET_MODE_SIZE (GET_MODE (target_reg
))
1859 == GET_MODE_SIZE (GET_MODE (y
)))
1860 source
= gen_lowpart (GET_MODE (target_reg
), y
);
1867 emit_move_insn (target_reg
, source
);
1871 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1872 registers represented by a PARALLEL. */
1875 emit_group_store (x
, y
)
1878 rtx source_reg
, target
;
1881 if (GET_CODE (y
) != PARALLEL
)
1884 /* Check for a NULL entry, used to indicate that the parameter goes
1885 both on the stack and in registers. */
1886 if (XEXP (XVECEXP (y
, 0, 0), 0))
1891 for (; i
< XVECLEN (y
, 0); i
++)
1893 rtx element
= XVECEXP (y
, 0, i
);
1895 source_reg
= XEXP (element
, 0);
1897 if (GET_CODE (x
) == MEM
)
1898 target
= change_address (x
, GET_MODE (source_reg
),
1899 plus_constant (XEXP (x
, 0),
1900 INTVAL (XEXP (element
, 1))));
1901 else if (XEXP (element
, 1) == const0_rtx
)
1904 if (GET_MODE (target
) != GET_MODE (source_reg
))
1905 target
= gen_lowpart (GET_MODE (source_reg
), target
);
1910 emit_move_insn (target
, source_reg
);
1914 /* Add a USE expression for REG to the (possibly empty) list pointed
1915 to by CALL_FUSAGE. REG must denote a hard register. */
1918 use_reg (call_fusage
, reg
)
1919 rtx
*call_fusage
, reg
;
1921 if (GET_CODE (reg
) != REG
1922 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1926 = gen_rtx (EXPR_LIST
, VOIDmode
,
1927 gen_rtx (USE
, VOIDmode
, reg
), *call_fusage
);
1930 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1931 starting at REGNO. All of these registers must be hard registers. */
1934 use_regs (call_fusage
, regno
, nregs
)
1941 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1944 for (i
= 0; i
< nregs
; i
++)
1945 use_reg (call_fusage
, gen_rtx (REG
, reg_raw_mode
[regno
+ i
], regno
+ i
));
1948 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1949 PARALLEL REGS. This is for calls that pass values in multiple
1950 non-contiguous locations. The Irix 6 ABI has examples of this. */
1953 use_group_regs (call_fusage
, regs
)
1959 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
1961 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
1963 /* A NULL entry means the parameter goes both on the stack and in
1964 registers. This can also be a MEM for targets that pass values
1965 partially on the stack and partially in registers. */
1966 if (reg
!= 0 && GET_CODE (reg
) == REG
)
1967 use_reg (call_fusage
, reg
);
1971 /* Generate several move instructions to clear LEN bytes of block TO.
1972 (A MEM rtx with BLKmode). The caller must pass TO through
1973 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1977 clear_by_pieces (to
, len
, align
)
1981 struct clear_by_pieces data
;
1982 rtx to_addr
= XEXP (to
, 0);
1983 int max_size
= MOVE_MAX
+ 1;
1986 data
.to_addr
= to_addr
;
1989 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1990 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1992 data
.explicit_inc_to
= 0;
1994 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1995 if (data
.reverse
) data
.offset
= len
;
1998 data
.to_struct
= MEM_IN_STRUCT_P (to
);
2000 /* If copying requires more than two move insns,
2001 copy addresses to registers (to make displacements shorter)
2002 and use post-increment if available. */
2004 && move_by_pieces_ninsns (len
, align
) > 2)
2006 #ifdef HAVE_PRE_DECREMENT
2007 if (data
.reverse
&& ! data
.autinc_to
)
2009 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2011 data
.explicit_inc_to
= -1;
2014 #ifdef HAVE_POST_INCREMENT
2015 if (! data
.reverse
&& ! data
.autinc_to
)
2017 data
.to_addr
= copy_addr_to_reg (to_addr
);
2019 data
.explicit_inc_to
= 1;
2022 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
2023 data
.to_addr
= copy_addr_to_reg (to_addr
);
2026 if (! SLOW_UNALIGNED_ACCESS
2027 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
2030 /* First move what we can in the largest integer mode, then go to
2031 successively smaller modes. */
2033 while (max_size
> 1)
2035 enum machine_mode mode
= VOIDmode
, tmode
;
2036 enum insn_code icode
;
2038 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2039 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2040 if (GET_MODE_SIZE (tmode
) < max_size
)
2043 if (mode
== VOIDmode
)
2046 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2047 if (icode
!= CODE_FOR_nothing
2048 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
2049 GET_MODE_SIZE (mode
)))
2050 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2052 max_size
= GET_MODE_SIZE (mode
);
2055 /* The code above should have handled everything. */
2060 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2061 with move instructions for mode MODE. GENFUN is the gen_... function
2062 to make a move insn for that mode. DATA has all the other info. */
2065 clear_by_pieces_1 (genfun
, mode
, data
)
2066 rtx (*genfun
) PROTO ((rtx
, ...));
2067 enum machine_mode mode
;
2068 struct clear_by_pieces
*data
;
2070 register int size
= GET_MODE_SIZE (mode
);
2073 while (data
->len
>= size
)
2075 if (data
->reverse
) data
->offset
-= size
;
2077 to1
= (data
->autinc_to
2078 ? gen_rtx (MEM
, mode
, data
->to_addr
)
2079 : copy_rtx (change_address (data
->to
, mode
,
2080 plus_constant (data
->to_addr
,
2082 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2084 #ifdef HAVE_PRE_DECREMENT
2085 if (data
->explicit_inc_to
< 0)
2086 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2089 emit_insn ((*genfun
) (to1
, const0_rtx
));
2090 #ifdef HAVE_POST_INCREMENT
2091 if (data
->explicit_inc_to
> 0)
2092 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2095 if (! data
->reverse
) data
->offset
+= size
;
2101 /* Write zeros through the storage of OBJECT.
2102 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2103 the maximum alignment we can is has, measured in bytes.
2105 If we call a function that returns the length of the block, return it. */
2108 clear_storage (object
, size
, align
)
2115 if (GET_MODE (object
) == BLKmode
)
2117 object
= protect_from_queue (object
, 1);
2118 size
= protect_from_queue (size
, 0);
2120 if (GET_CODE (size
) == CONST_INT
2121 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2122 clear_by_pieces (object
, INTVAL (size
), align
);
2126 /* Try the most limited insn first, because there's no point
2127 including more than one in the machine description unless
2128 the more limited one has some advantage. */
2130 rtx opalign
= GEN_INT (align
);
2131 enum machine_mode mode
;
2133 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2134 mode
= GET_MODE_WIDER_MODE (mode
))
2136 enum insn_code code
= clrstr_optab
[(int) mode
];
2138 if (code
!= CODE_FOR_nothing
2139 /* We don't need MODE to be narrower than
2140 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2141 the mode mask, as it is returned by the macro, it will
2142 definitely be less than the actual mode mask. */
2143 && ((GET_CODE (size
) == CONST_INT
2144 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2145 <= GET_MODE_MASK (mode
)))
2146 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2147 && (insn_operand_predicate
[(int) code
][0] == 0
2148 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2150 && (insn_operand_predicate
[(int) code
][2] == 0
2151 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2155 rtx last
= get_last_insn ();
2158 op1
= convert_to_mode (mode
, size
, 1);
2159 if (insn_operand_predicate
[(int) code
][1] != 0
2160 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2162 op1
= copy_to_mode_reg (mode
, op1
);
2164 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2171 delete_insns_since (last
);
2176 #ifdef TARGET_MEM_FUNCTIONS
2178 = emit_library_call_value (memset_libfunc
, NULL_RTX
, 0,
2180 XEXP (object
, 0), Pmode
,
2182 TYPE_MODE (integer_type_node
),
2184 (TYPE_MODE (sizetype
), size
,
2185 TREE_UNSIGNED (sizetype
)),
2186 TYPE_MODE (sizetype
));
2188 emit_library_call (bzero_libfunc
, 0,
2190 XEXP (object
, 0), Pmode
,
2192 (TYPE_MODE (integer_type_node
), size
,
2193 TREE_UNSIGNED (integer_type_node
)),
2194 TYPE_MODE (integer_type_node
));
2199 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2204 /* Generate code to copy Y into X.
2205 Both Y and X must have the same mode, except that
2206 Y can be a constant with VOIDmode.
2207 This mode cannot be BLKmode; use emit_block_move for that.
2209 Return the last instruction emitted. */
2212 emit_move_insn (x
, y
)
2215 enum machine_mode mode
= GET_MODE (x
);
2217 x
= protect_from_queue (x
, 1);
2218 y
= protect_from_queue (y
, 0);
2220 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2223 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2224 y
= force_const_mem (mode
, y
);
2226 /* If X or Y are memory references, verify that their addresses are valid
2228 if (GET_CODE (x
) == MEM
2229 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2230 && ! push_operand (x
, GET_MODE (x
)))
2232 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2233 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2235 if (GET_CODE (y
) == MEM
2236 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2238 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2239 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2241 if (mode
== BLKmode
)
2244 return emit_move_insn_1 (x
, y
);
2247 /* Low level part of emit_move_insn.
2248 Called just like emit_move_insn, but assumes X and Y
2249 are basically valid. */
2252 emit_move_insn_1 (x
, y
)
2255 enum machine_mode mode
= GET_MODE (x
);
2256 enum machine_mode submode
;
2257 enum mode_class
class = GET_MODE_CLASS (mode
);
2260 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2262 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2264 /* Expand complex moves by moving real part and imag part, if possible. */
2265 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2266 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2268 (class == MODE_COMPLEX_INT
2269 ? MODE_INT
: MODE_FLOAT
),
2271 && (mov_optab
->handlers
[(int) submode
].insn_code
2272 != CODE_FOR_nothing
))
2274 /* Don't split destination if it is a stack push. */
2275 int stack
= push_operand (x
, GET_MODE (x
));
2278 /* If this is a stack, push the highpart first, so it
2279 will be in the argument order.
2281 In that case, change_address is used only to convert
2282 the mode, not to change the address. */
2285 /* Note that the real part always precedes the imag part in memory
2286 regardless of machine's endianness. */
2287 #ifdef STACK_GROWS_DOWNWARD
2288 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2289 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2290 gen_imagpart (submode
, y
)));
2291 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2292 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2293 gen_realpart (submode
, y
)));
2295 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2296 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2297 gen_realpart (submode
, y
)));
2298 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2299 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2300 gen_imagpart (submode
, y
)));
2305 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2306 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2307 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2308 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2311 return get_last_insn ();
2314 /* This will handle any multi-word mode that lacks a move_insn pattern.
2315 However, you will get better code if you define such patterns,
2316 even if they must turn into multiple assembler instructions. */
2317 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2322 #ifdef PUSH_ROUNDING
2324 /* If X is a push on the stack, do the push now and replace
2325 X with a reference to the stack pointer. */
2326 if (push_operand (x
, GET_MODE (x
)))
2328 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2329 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2333 /* Show the output dies here. */
2335 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
2338 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2341 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2342 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2344 /* If we can't get a part of Y, put Y into memory if it is a
2345 constant. Otherwise, force it into a register. If we still
2346 can't get a part of Y, abort. */
2347 if (ypart
== 0 && CONSTANT_P (y
))
2349 y
= force_const_mem (mode
, y
);
2350 ypart
= operand_subword (y
, i
, 1, mode
);
2352 else if (ypart
== 0)
2353 ypart
= operand_subword_force (y
, i
, mode
);
2355 if (xpart
== 0 || ypart
== 0)
2358 last_insn
= emit_move_insn (xpart
, ypart
);
2367 /* Pushing data onto the stack. */
2369 /* Push a block of length SIZE (perhaps variable)
2370 and return an rtx to address the beginning of the block.
2371 Note that it is not possible for the value returned to be a QUEUED.
2372 The value may be virtual_outgoing_args_rtx.
2374 EXTRA is the number of bytes of padding to push in addition to SIZE.
2375 BELOW nonzero means this padding comes at low addresses;
2376 otherwise, the padding comes at high addresses. */
2379 push_block (size
, extra
, below
)
2385 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2386 if (CONSTANT_P (size
))
2387 anti_adjust_stack (plus_constant (size
, extra
));
2388 else if (GET_CODE (size
) == REG
&& extra
== 0)
2389 anti_adjust_stack (size
);
2392 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2394 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2395 temp
, 0, OPTAB_LIB_WIDEN
);
2396 anti_adjust_stack (temp
);
2399 #ifdef STACK_GROWS_DOWNWARD
2400 temp
= virtual_outgoing_args_rtx
;
2401 if (extra
!= 0 && below
)
2402 temp
= plus_constant (temp
, extra
);
2404 if (GET_CODE (size
) == CONST_INT
)
2405 temp
= plus_constant (virtual_outgoing_args_rtx
,
2406 - INTVAL (size
) - (below
? 0 : extra
));
2407 else if (extra
!= 0 && !below
)
2408 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2409 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2411 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2412 negate_rtx (Pmode
, size
));
2415 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2421 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2424 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2425 block of SIZE bytes. */
2428 get_push_address (size
)
2433 if (STACK_PUSH_CODE
== POST_DEC
)
2434 temp
= gen_rtx (PLUS
, Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2435 else if (STACK_PUSH_CODE
== POST_INC
)
2436 temp
= gen_rtx (MINUS
, Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2438 temp
= stack_pointer_rtx
;
2440 return force_operand (temp
, NULL_RTX
);
2443 /* Generate code to push X onto the stack, assuming it has mode MODE and
2445 MODE is redundant except when X is a CONST_INT (since they don't
2447 SIZE is an rtx for the size of data to be copied (in bytes),
2448 needed only if X is BLKmode.
2450 ALIGN (in bytes) is maximum alignment we can assume.
2452 If PARTIAL and REG are both nonzero, then copy that many of the first
2453 words of X into registers starting with REG, and push the rest of X.
2454 The amount of space pushed is decreased by PARTIAL words,
2455 rounded *down* to a multiple of PARM_BOUNDARY.
2456 REG must be a hard register in this case.
2457 If REG is zero but PARTIAL is not, take any all others actions for an
2458 argument partially in registers, but do not actually load any
2461 EXTRA is the amount in bytes of extra space to leave next to this arg.
2462 This is ignored if an argument block has already been allocated.
2464 On a machine that lacks real push insns, ARGS_ADDR is the address of
2465 the bottom of the argument block for this call. We use indexing off there
2466 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2467 argument block has not been preallocated.
2469 ARGS_SO_FAR is the size of args previously pushed for this call. */
2472 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2473 args_addr
, args_so_far
)
2475 enum machine_mode mode
;
2486 enum direction stack_direction
2487 #ifdef STACK_GROWS_DOWNWARD
2493 /* Decide where to pad the argument: `downward' for below,
2494 `upward' for above, or `none' for don't pad it.
2495 Default is below for small data on big-endian machines; else above. */
2496 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2498 /* Invert direction if stack is post-update. */
2499 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2500 if (where_pad
!= none
)
2501 where_pad
= (where_pad
== downward
? upward
: downward
);
2503 xinner
= x
= protect_from_queue (x
, 0);
2505 if (mode
== BLKmode
)
2507 /* Copy a block into the stack, entirely or partially. */
2510 int used
= partial
* UNITS_PER_WORD
;
2511 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2519 /* USED is now the # of bytes we need not copy to the stack
2520 because registers will take care of them. */
2523 xinner
= change_address (xinner
, BLKmode
,
2524 plus_constant (XEXP (xinner
, 0), used
));
2526 /* If the partial register-part of the arg counts in its stack size,
2527 skip the part of stack space corresponding to the registers.
2528 Otherwise, start copying to the beginning of the stack space,
2529 by setting SKIP to 0. */
2530 #ifndef REG_PARM_STACK_SPACE
2536 #ifdef PUSH_ROUNDING
2537 /* Do it with several push insns if that doesn't take lots of insns
2538 and if there is no difficulty with push insns that skip bytes
2539 on the stack for alignment purposes. */
2541 && GET_CODE (size
) == CONST_INT
2543 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2545 /* Here we avoid the case of a structure whose weak alignment
2546 forces many pushes of a small amount of data,
2547 and such small pushes do rounding that causes trouble. */
2548 && ((! SLOW_UNALIGNED_ACCESS
)
2549 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2550 || PUSH_ROUNDING (align
) == align
)
2551 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2553 /* Push padding now if padding above and stack grows down,
2554 or if padding below and stack grows up.
2555 But if space already allocated, this has already been done. */
2556 if (extra
&& args_addr
== 0
2557 && where_pad
!= none
&& where_pad
!= stack_direction
)
2558 anti_adjust_stack (GEN_INT (extra
));
2560 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
2561 INTVAL (size
) - used
, align
);
2563 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2567 in_check_memory_usage
= 1;
2568 temp
= get_push_address (INTVAL(size
) - used
);
2569 if (GET_CODE (x
) == MEM
&& AGGREGATE_TYPE_P (type
))
2570 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2572 XEXP (xinner
, 0), ptr_mode
,
2573 GEN_INT (INTVAL(size
) - used
),
2574 TYPE_MODE (sizetype
));
2576 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2578 GEN_INT (INTVAL(size
) - used
),
2579 TYPE_MODE (sizetype
),
2580 GEN_INT (MEMORY_USE_RW
),
2581 TYPE_MODE (integer_type_node
));
2582 in_check_memory_usage
= 0;
2586 #endif /* PUSH_ROUNDING */
2588 /* Otherwise make space on the stack and copy the data
2589 to the address of that space. */
2591 /* Deduct words put into registers from the size we must copy. */
2594 if (GET_CODE (size
) == CONST_INT
)
2595 size
= GEN_INT (INTVAL (size
) - used
);
2597 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2598 GEN_INT (used
), NULL_RTX
, 0,
2602 /* Get the address of the stack space.
2603 In this case, we do not deal with EXTRA separately.
2604 A single stack adjust will do. */
2607 temp
= push_block (size
, extra
, where_pad
== downward
);
2610 else if (GET_CODE (args_so_far
) == CONST_INT
)
2611 temp
= memory_address (BLKmode
,
2612 plus_constant (args_addr
,
2613 skip
+ INTVAL (args_so_far
)));
2615 temp
= memory_address (BLKmode
,
2616 plus_constant (gen_rtx (PLUS
, Pmode
,
2617 args_addr
, args_so_far
),
2619 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2623 in_check_memory_usage
= 1;
2624 target
= copy_to_reg (temp
);
2625 if (GET_CODE (x
) == MEM
&& AGGREGATE_TYPE_P (type
))
2626 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2628 XEXP (xinner
, 0), ptr_mode
,
2629 size
, TYPE_MODE (sizetype
));
2631 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2633 size
, TYPE_MODE (sizetype
),
2634 GEN_INT (MEMORY_USE_RW
),
2635 TYPE_MODE (integer_type_node
));
2636 in_check_memory_usage
= 0;
2639 /* TEMP is the address of the block. Copy the data there. */
2640 if (GET_CODE (size
) == CONST_INT
2641 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2644 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
2645 INTVAL (size
), align
);
2648 /* Try the most limited insn first, because there's no point
2649 including more than one in the machine description unless
2650 the more limited one has some advantage. */
2651 #ifdef HAVE_movstrqi
2653 && GET_CODE (size
) == CONST_INT
2654 && ((unsigned) INTVAL (size
)
2655 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
2657 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
2658 xinner
, size
, GEN_INT (align
));
2666 #ifdef HAVE_movstrhi
2668 && GET_CODE (size
) == CONST_INT
2669 && ((unsigned) INTVAL (size
)
2670 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
2672 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
2673 xinner
, size
, GEN_INT (align
));
2681 #ifdef HAVE_movstrsi
2684 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
2685 xinner
, size
, GEN_INT (align
));
2693 #ifdef HAVE_movstrdi
2696 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
2697 xinner
, size
, GEN_INT (align
));
2706 #ifndef ACCUMULATE_OUTGOING_ARGS
2707 /* If the source is referenced relative to the stack pointer,
2708 copy it to another register to stabilize it. We do not need
2709 to do this if we know that we won't be changing sp. */
2711 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2712 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2713 temp
= copy_to_reg (temp
);
2716 /* Make inhibit_defer_pop nonzero around the library call
2717 to force it to pop the bcopy-arguments right away. */
2719 #ifdef TARGET_MEM_FUNCTIONS
2720 emit_library_call (memcpy_libfunc
, 0,
2721 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2722 convert_to_mode (TYPE_MODE (sizetype
),
2723 size
, TREE_UNSIGNED (sizetype
)),
2724 TYPE_MODE (sizetype
));
2726 emit_library_call (bcopy_libfunc
, 0,
2727 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2728 convert_to_mode (TYPE_MODE (integer_type_node
),
2730 TREE_UNSIGNED (integer_type_node
)),
2731 TYPE_MODE (integer_type_node
));
2736 else if (partial
> 0)
2738 /* Scalar partly in registers. */
2740 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2743 /* # words of start of argument
2744 that we must make space for but need not store. */
2745 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2746 int args_offset
= INTVAL (args_so_far
);
2749 /* Push padding now if padding above and stack grows down,
2750 or if padding below and stack grows up.
2751 But if space already allocated, this has already been done. */
2752 if (extra
&& args_addr
== 0
2753 && where_pad
!= none
&& where_pad
!= stack_direction
)
2754 anti_adjust_stack (GEN_INT (extra
));
2756 /* If we make space by pushing it, we might as well push
2757 the real data. Otherwise, we can leave OFFSET nonzero
2758 and leave the space uninitialized. */
2762 /* Now NOT_STACK gets the number of words that we don't need to
2763 allocate on the stack. */
2764 not_stack
= partial
- offset
;
2766 /* If the partial register-part of the arg counts in its stack size,
2767 skip the part of stack space corresponding to the registers.
2768 Otherwise, start copying to the beginning of the stack space,
2769 by setting SKIP to 0. */
2770 #ifndef REG_PARM_STACK_SPACE
2776 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2777 x
= validize_mem (force_const_mem (mode
, x
));
2779 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2780 SUBREGs of such registers are not allowed. */
2781 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2782 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2783 x
= copy_to_reg (x
);
2785 /* Loop over all the words allocated on the stack for this arg. */
2786 /* We can do it by words, because any scalar bigger than a word
2787 has a size a multiple of a word. */
2788 #ifndef PUSH_ARGS_REVERSED
2789 for (i
= not_stack
; i
< size
; i
++)
2791 for (i
= size
- 1; i
>= not_stack
; i
--)
2793 if (i
>= not_stack
+ offset
)
2794 emit_push_insn (operand_subword_force (x
, i
, mode
),
2795 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2797 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2798 * UNITS_PER_WORD
)));
2803 rtx target
= NULL_RTX
;
2805 /* Push padding now if padding above and stack grows down,
2806 or if padding below and stack grows up.
2807 But if space already allocated, this has already been done. */
2808 if (extra
&& args_addr
== 0
2809 && where_pad
!= none
&& where_pad
!= stack_direction
)
2810 anti_adjust_stack (GEN_INT (extra
));
2812 #ifdef PUSH_ROUNDING
2814 addr
= gen_push_operand ();
2818 if (GET_CODE (args_so_far
) == CONST_INT
)
2820 = memory_address (mode
,
2821 plus_constant (args_addr
,
2822 INTVAL (args_so_far
)));
2824 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2829 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2831 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2833 in_check_memory_usage
= 1;
2835 target
= get_push_address (GET_MODE_SIZE (mode
));
2837 if (GET_CODE (x
) == MEM
&& AGGREGATE_TYPE_P (type
))
2838 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2840 XEXP (x
, 0), ptr_mode
,
2841 GEN_INT (GET_MODE_SIZE (mode
)),
2842 TYPE_MODE (sizetype
));
2844 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2846 GEN_INT (GET_MODE_SIZE (mode
)),
2847 TYPE_MODE (sizetype
),
2848 GEN_INT (MEMORY_USE_RW
),
2849 TYPE_MODE (integer_type_node
));
2850 in_check_memory_usage
= 0;
2855 /* If part should go in registers, copy that part
2856 into the appropriate registers. Do this now, at the end,
2857 since mem-to-mem copies above may do function calls. */
2858 if (partial
> 0 && reg
!= 0)
2860 /* Handle calls that pass values in multiple non-contiguous locations.
2861 The Irix 6 ABI has examples of this. */
2862 if (GET_CODE (reg
) == PARALLEL
)
2863 emit_group_load (reg
, x
);
2865 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2868 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2869 anti_adjust_stack (GEN_INT (extra
));
2872 /* Expand an assignment that stores the value of FROM into TO.
2873 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2874 (This may contain a QUEUED rtx;
2875 if the value is constant, this rtx is a constant.)
2876 Otherwise, the returned value is NULL_RTX.
2878 SUGGEST_REG is no longer actually used.
2879 It used to mean, copy the value through a register
2880 and return that register, if that is possible.
2881 We now use WANT_VALUE to decide whether to do this. */
2884 expand_assignment (to
, from
, want_value
, suggest_reg
)
2889 register rtx to_rtx
= 0;
2892 /* Don't crash if the lhs of the assignment was erroneous. */
2894 if (TREE_CODE (to
) == ERROR_MARK
)
2896 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2897 return want_value
? result
: NULL_RTX
;
2900 if (output_bytecode
)
2902 tree dest_innermost
;
2904 bc_expand_expr (from
);
2905 bc_emit_instruction (duplicate
);
2907 dest_innermost
= bc_expand_address (to
);
2909 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2910 take care of it here. */
2912 bc_store_memory (TREE_TYPE (to
), dest_innermost
);
2916 /* Assignment of a structure component needs special treatment
2917 if the structure component's rtx is not simply a MEM.
2918 Assignment of an array element at a constant index, and assignment of
2919 an array element in an unaligned packed structure field, has the same
2922 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
2923 || TREE_CODE (to
) == ARRAY_REF
)
2925 enum machine_mode mode1
;
2935 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
2936 &unsignedp
, &volatilep
, &alignment
);
2938 /* If we are going to use store_bit_field and extract_bit_field,
2939 make sure to_rtx will be safe for multiple use. */
2941 if (mode1
== VOIDmode
&& want_value
)
2942 tem
= stabilize_reference (tem
);
2944 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
2947 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2949 if (GET_CODE (to_rtx
) != MEM
)
2951 to_rtx
= change_address (to_rtx
, VOIDmode
,
2952 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
2953 force_reg (ptr_mode
, offset_rtx
)));
2957 if (GET_CODE (to_rtx
) == MEM
)
2959 /* When the offset is zero, to_rtx is the address of the
2960 structure we are storing into, and hence may be shared.
2961 We must make a new MEM before setting the volatile bit. */
2963 to_rtx
= copy_rtx (to_rtx
);
2965 MEM_VOLATILE_P (to_rtx
) = 1;
2967 #if 0 /* This was turned off because, when a field is volatile
2968 in an object which is not volatile, the object may be in a register,
2969 and then we would abort over here. */
2975 if (TREE_CODE (to
) == COMPONENT_REF
2976 && TREE_READONLY (TREE_OPERAND (to
, 1)))
2979 to_rtx
= copy_rtx (to_rtx
);
2981 RTX_UNCHANGING_P (to_rtx
) = 1;
2984 /* Check the access. */
2985 if (flag_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
2990 enum machine_mode best_mode
;
2992 best_mode
= get_best_mode (bitsize
, bitpos
,
2993 TYPE_ALIGN (TREE_TYPE (tem
)),
2995 if (best_mode
== VOIDmode
)
2998 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
2999 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3000 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3001 size
*= GET_MODE_SIZE (best_mode
);
3003 /* Check the access right of the pointer. */
3005 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3007 GEN_INT (size
), TYPE_MODE (sizetype
),
3008 GEN_INT (MEMORY_USE_WO
),
3009 TYPE_MODE (integer_type_node
));
3012 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3014 /* Spurious cast makes HPUX compiler happy. */
3015 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
3018 /* Required alignment of containing datum. */
3020 int_size_in_bytes (TREE_TYPE (tem
)));
3021 preserve_temp_slots (result
);
3025 /* If the value is meaningful, convert RESULT to the proper mode.
3026 Otherwise, return nothing. */
3027 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3028 TYPE_MODE (TREE_TYPE (from
)),
3030 TREE_UNSIGNED (TREE_TYPE (to
)))
3034 /* If the rhs is a function call and its value is not an aggregate,
3035 call the function before we start to compute the lhs.
3036 This is needed for correct code for cases such as
3037 val = setjmp (buf) on machines where reference to val
3038 requires loading up part of an address in a separate insn.
3040 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3041 a promoted variable where the zero- or sign- extension needs to be done.
3042 Handling this in the normal way is safe because no computation is done
3044 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3045 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3046 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
3051 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3053 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3055 /* Handle calls that return values in multiple non-contiguous locations.
3056 The Irix 6 ABI has examples of this. */
3057 if (GET_CODE (to_rtx
) == PARALLEL
)
3058 emit_group_load (to_rtx
, value
);
3059 else if (GET_MODE (to_rtx
) == BLKmode
)
3060 emit_block_move (to_rtx
, value
, expr_size (from
),
3061 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3063 emit_move_insn (to_rtx
, value
);
3064 preserve_temp_slots (to_rtx
);
3067 return want_value
? to_rtx
: NULL_RTX
;
3070 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3071 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3074 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3076 /* Don't move directly into a return register. */
3077 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
3082 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3083 emit_move_insn (to_rtx
, temp
);
3084 preserve_temp_slots (to_rtx
);
3087 return want_value
? to_rtx
: NULL_RTX
;
3090 /* In case we are returning the contents of an object which overlaps
3091 the place the value is being stored, use a safe function when copying
3092 a value through a pointer into a structure value return block. */
3093 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3094 && current_function_returns_struct
3095 && !current_function_returns_pcc_struct
)
3100 size
= expr_size (from
);
3101 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3102 EXPAND_MEMORY_USE_DONT
);
3104 /* Copy the rights of the bitmap. */
3105 if (flag_check_memory_usage
)
3106 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3107 XEXP (to_rtx
, 0), ptr_mode
,
3108 XEXP (from_rtx
, 0), ptr_mode
,
3109 convert_to_mode (TYPE_MODE (sizetype
),
3110 size
, TREE_UNSIGNED (sizetype
)),
3111 TYPE_MODE (sizetype
));
3113 #ifdef TARGET_MEM_FUNCTIONS
3114 emit_library_call (memcpy_libfunc
, 0,
3115 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3116 XEXP (from_rtx
, 0), Pmode
,
3117 convert_to_mode (TYPE_MODE (sizetype
),
3118 size
, TREE_UNSIGNED (sizetype
)),
3119 TYPE_MODE (sizetype
));
3121 emit_library_call (bcopy_libfunc
, 0,
3122 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3123 XEXP (to_rtx
, 0), Pmode
,
3124 convert_to_mode (TYPE_MODE (integer_type_node
),
3125 size
, TREE_UNSIGNED (integer_type_node
)),
3126 TYPE_MODE (integer_type_node
));
3129 preserve_temp_slots (to_rtx
);
3132 return want_value
? to_rtx
: NULL_RTX
;
3135 /* Compute FROM and store the value in the rtx we got. */
3138 result
= store_expr (from
, to_rtx
, want_value
);
3139 preserve_temp_slots (result
);
3142 return want_value
? result
: NULL_RTX
;
3145 /* Generate code for computing expression EXP,
3146 and storing the value into TARGET.
3147 TARGET may contain a QUEUED rtx.
3149 If WANT_VALUE is nonzero, return a copy of the value
3150 not in TARGET, so that we can be sure to use the proper
3151 value in a containing expression even if TARGET has something
3152 else stored in it. If possible, we copy the value through a pseudo
3153 and return that pseudo. Or, if the value is constant, we try to
3154 return the constant. In some cases, we return a pseudo
3155 copied *from* TARGET.
3157 If the mode is BLKmode then we may return TARGET itself.
3158 It turns out that in BLKmode it doesn't cause a problem.
3159 because C has no operators that could combine two different
3160 assignments into the same BLKmode object with different values
3161 with no sequence point. Will other languages need this to
3164 If WANT_VALUE is 0, we return NULL, to make sure
3165 to catch quickly any cases where the caller uses the value
3166 and fails to set WANT_VALUE. */
3169 store_expr (exp
, target
, want_value
)
3171 register rtx target
;
3175 int dont_return_target
= 0;
3177 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3179 /* Perform first part of compound expression, then assign from second
3181 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3183 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3185 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3187 /* For conditional expression, get safe form of the target. Then
3188 test the condition, doing the appropriate assignment on either
3189 side. This avoids the creation of unnecessary temporaries.
3190 For non-BLKmode, it is more efficient not to do this. */
3192 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3195 target
= protect_from_queue (target
, 1);
3197 do_pending_stack_adjust ();
3199 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3200 start_cleanup_deferral ();
3201 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3202 end_cleanup_deferral ();
3204 emit_jump_insn (gen_jump (lab2
));
3207 start_cleanup_deferral ();
3208 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3209 end_cleanup_deferral ();
3214 return want_value
? target
: NULL_RTX
;
3216 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3217 && GET_MODE (target
) != BLKmode
)
3218 /* If target is in memory and caller wants value in a register instead,
3219 arrange that. Pass TARGET as target for expand_expr so that,
3220 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3221 We know expand_expr will not use the target in that case.
3222 Don't do this if TARGET is volatile because we are supposed
3223 to write it and then read it. */
3225 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3226 GET_MODE (target
), 0);
3227 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3228 temp
= copy_to_reg (temp
);
3229 dont_return_target
= 1;
3231 else if (queued_subexp_p (target
))
3232 /* If target contains a postincrement, let's not risk
3233 using it as the place to generate the rhs. */
3235 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3237 /* Expand EXP into a new pseudo. */
3238 temp
= gen_reg_rtx (GET_MODE (target
));
3239 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3242 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3244 /* If target is volatile, ANSI requires accessing the value
3245 *from* the target, if it is accessed. So make that happen.
3246 In no case return the target itself. */
3247 if (! MEM_VOLATILE_P (target
) && want_value
)
3248 dont_return_target
= 1;
3250 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3251 /* If this is an scalar in a register that is stored in a wider mode
3252 than the declared mode, compute the result into its declared mode
3253 and then convert to the wider mode. Our value is the computed
3256 /* If we don't want a value, we can do the conversion inside EXP,
3257 which will often result in some optimizations. Do the conversion
3258 in two steps: first change the signedness, if needed, then
3259 the extend. But don't do this if the type of EXP is a subtype
3260 of something else since then the conversion might involve
3261 more than just converting modes. */
3262 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3263 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3265 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3266 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3269 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3273 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3274 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3278 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3280 /* If TEMP is a volatile MEM and we want a result value, make
3281 the access now so it gets done only once. Likewise if
3282 it contains TARGET. */
3283 if (GET_CODE (temp
) == MEM
&& want_value
3284 && (MEM_VOLATILE_P (temp
)
3285 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3286 temp
= copy_to_reg (temp
);
3288 /* If TEMP is a VOIDmode constant, use convert_modes to make
3289 sure that we properly convert it. */
3290 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3291 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3292 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3293 SUBREG_PROMOTED_UNSIGNED_P (target
));
3295 convert_move (SUBREG_REG (target
), temp
,
3296 SUBREG_PROMOTED_UNSIGNED_P (target
));
3297 return want_value
? temp
: NULL_RTX
;
3301 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3302 /* Return TARGET if it's a specified hardware register.
3303 If TARGET is a volatile mem ref, either return TARGET
3304 or return a reg copied *from* TARGET; ANSI requires this.
3306 Otherwise, if TEMP is not TARGET, return TEMP
3307 if it is constant (for efficiency),
3308 or if we really want the correct value. */
3309 if (!(target
&& GET_CODE (target
) == REG
3310 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3311 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3312 && ! rtx_equal_p (temp
, target
)
3313 && (CONSTANT_P (temp
) || want_value
))
3314 dont_return_target
= 1;
3317 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3318 the same as that of TARGET, adjust the constant. This is needed, for
3319 example, in case it is a CONST_DOUBLE and we want only a word-sized
3321 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3322 && TREE_CODE (exp
) != ERROR_MARK
3323 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3324 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3325 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3327 if (flag_check_memory_usage
3328 && GET_CODE (target
) == MEM
3329 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3331 if (GET_CODE (temp
) == MEM
)
3332 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3333 XEXP (target
, 0), ptr_mode
,
3334 XEXP (temp
, 0), ptr_mode
,
3335 expr_size (exp
), TYPE_MODE (sizetype
));
3337 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3338 XEXP (target
, 0), ptr_mode
,
3339 expr_size (exp
), TYPE_MODE (sizetype
),
3340 GEN_INT (MEMORY_USE_WO
),
3341 TYPE_MODE (integer_type_node
));
3344 /* If value was not generated in the target, store it there.
3345 Convert the value to TARGET's type first if nec. */
3347 if (! rtx_equal_p (temp
, target
) && TREE_CODE (exp
) != ERROR_MARK
)
3349 target
= protect_from_queue (target
, 1);
3350 if (GET_MODE (temp
) != GET_MODE (target
)
3351 && GET_MODE (temp
) != VOIDmode
)
3353 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3354 if (dont_return_target
)
3356 /* In this case, we will return TEMP,
3357 so make sure it has the proper mode.
3358 But don't forget to store the value into TARGET. */
3359 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3360 emit_move_insn (target
, temp
);
3363 convert_move (target
, temp
, unsignedp
);
3366 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3368 /* Handle copying a string constant into an array.
3369 The string constant may be shorter than the array.
3370 So copy just the string's actual length, and clear the rest. */
3374 /* Get the size of the data type of the string,
3375 which is actually the size of the target. */
3376 size
= expr_size (exp
);
3377 if (GET_CODE (size
) == CONST_INT
3378 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3379 emit_block_move (target
, temp
, size
,
3380 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3383 /* Compute the size of the data to copy from the string. */
3385 = size_binop (MIN_EXPR
,
3386 make_tree (sizetype
, size
),
3388 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3389 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3393 /* Copy that much. */
3394 emit_block_move (target
, temp
, copy_size_rtx
,
3395 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3397 /* Figure out how much is left in TARGET that we have to clear.
3398 Do all calculations in ptr_mode. */
3400 addr
= XEXP (target
, 0);
3401 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3403 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3405 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3406 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3410 addr
= force_reg (ptr_mode
, addr
);
3411 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3412 copy_size_rtx
, NULL_RTX
, 0,
3415 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3416 copy_size_rtx
, NULL_RTX
, 0,
3419 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3420 GET_MODE (size
), 0, 0);
3421 label
= gen_label_rtx ();
3422 emit_jump_insn (gen_blt (label
));
3425 if (size
!= const0_rtx
)
3427 /* Be sure we can write on ADDR. */
3428 if (flag_check_memory_usage
)
3429 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3431 size
, TYPE_MODE (sizetype
),
3432 GEN_INT (MEMORY_USE_WO
),
3433 TYPE_MODE (integer_type_node
));
3434 #ifdef TARGET_MEM_FUNCTIONS
3435 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3437 const0_rtx
, TYPE_MODE (integer_type_node
),
3438 convert_to_mode (TYPE_MODE (sizetype
),
3440 TREE_UNSIGNED (sizetype
)),
3441 TYPE_MODE (sizetype
));
3443 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3445 convert_to_mode (TYPE_MODE (integer_type_node
),
3447 TREE_UNSIGNED (integer_type_node
)),
3448 TYPE_MODE (integer_type_node
));
3456 /* Handle calls that return values in multiple non-contiguous locations.
3457 The Irix 6 ABI has examples of this. */
3458 else if (GET_CODE (target
) == PARALLEL
)
3459 emit_group_load (target
, temp
);
3460 else if (GET_MODE (temp
) == BLKmode
)
3461 emit_block_move (target
, temp
, expr_size (exp
),
3462 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3464 emit_move_insn (target
, temp
);
3467 /* If we don't want a value, return NULL_RTX. */
3471 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3472 ??? The latter test doesn't seem to make sense. */
3473 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3476 /* Return TARGET itself if it is a hard register. */
3477 else if (want_value
&& GET_MODE (target
) != BLKmode
3478 && ! (GET_CODE (target
) == REG
3479 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3480 return copy_to_reg (target
);
3486 /* Return 1 if EXP just contains zeros. */
3494 switch (TREE_CODE (exp
))
3498 case NON_LVALUE_EXPR
:
3499 return is_zeros_p (TREE_OPERAND (exp
, 0));
3502 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3506 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3509 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
3512 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3513 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3514 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3515 if (! is_zeros_p (TREE_VALUE (elt
)))
3525 /* Return 1 if EXP contains mostly (3/4) zeros. */
3528 mostly_zeros_p (exp
)
3531 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3533 int elts
= 0, zeros
= 0;
3534 tree elt
= CONSTRUCTOR_ELTS (exp
);
3535 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3537 /* If there are no ranges of true bits, it is all zero. */
3538 return elt
== NULL_TREE
;
3540 for (; elt
; elt
= TREE_CHAIN (elt
))
3542 /* We do not handle the case where the index is a RANGE_EXPR,
3543 so the statistic will be somewhat inaccurate.
3544 We do make a more accurate count in store_constructor itself,
3545 so since this function is only used for nested array elements,
3546 this should be close enough. */
3547 if (mostly_zeros_p (TREE_VALUE (elt
)))
3552 return 4 * zeros
>= 3 * elts
;
3555 return is_zeros_p (exp
);
3558 /* Helper function for store_constructor.
3559 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3560 TYPE is the type of the CONSTRUCTOR, not the element type.
3561 CLEARED is as for store_constructor.
3563 This provides a recursive shortcut back to store_constructor when it isn't
3564 necessary to go through store_field. This is so that we can pass through
3565 the cleared field to let store_constructor know that we may not have to
3566 clear a substructure if the outer structure has already been cleared. */
3569 store_constructor_field (target
, bitsize
, bitpos
,
3570 mode
, exp
, type
, cleared
)
3572 int bitsize
, bitpos
;
3573 enum machine_mode mode
;
3577 if (TREE_CODE (exp
) == CONSTRUCTOR
3578 && bitpos
% BITS_PER_UNIT
== 0
3579 /* If we have a non-zero bitpos for a register target, then we just
3580 let store_field do the bitfield handling. This is unlikely to
3581 generate unnecessary clear instructions anyways. */
3582 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3585 target
= change_address (target
, VOIDmode
,
3586 plus_constant (XEXP (target
, 0),
3587 bitpos
/ BITS_PER_UNIT
));
3588 store_constructor (exp
, target
, cleared
);
3591 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3592 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3593 int_size_in_bytes (type
));
3596 /* Store the value of constructor EXP into the rtx TARGET.
3597 TARGET is either a REG or a MEM.
3598 CLEARED is true if TARGET is known to have been zero'd. */
3601 store_constructor (exp
, target
, cleared
)
3606 tree type
= TREE_TYPE (exp
);
3608 /* We know our target cannot conflict, since safe_from_p has been called. */
3610 /* Don't try copying piece by piece into a hard register
3611 since that is vulnerable to being clobbered by EXP.
3612 Instead, construct in a pseudo register and then copy it all. */
3613 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3615 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3616 store_constructor (exp
, temp
, 0);
3617 emit_move_insn (target
, temp
);
3622 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3623 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3627 /* Inform later passes that the whole union value is dead. */
3628 if (TREE_CODE (type
) == UNION_TYPE
3629 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3630 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3632 /* If we are building a static constructor into a register,
3633 set the initial value as zero so we can fold the value into
3634 a constant. But if more than one register is involved,
3635 this probably loses. */
3636 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3637 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3640 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
3645 /* If the constructor has fewer fields than the structure
3646 or if we are initializing the structure to mostly zeros,
3647 clear the whole structure first. */
3648 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3649 != list_length (TYPE_FIELDS (type
)))
3650 || mostly_zeros_p (exp
))
3653 clear_storage (target
, expr_size (exp
),
3654 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3659 /* Inform later passes that the old value is dead. */
3660 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3662 /* Store each element of the constructor into
3663 the corresponding field of TARGET. */
3665 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3667 register tree field
= TREE_PURPOSE (elt
);
3668 register enum machine_mode mode
;
3672 tree pos
, constant
= 0, offset
= 0;
3673 rtx to_rtx
= target
;
3675 /* Just ignore missing fields.
3676 We cleared the whole structure, above,
3677 if any fields are missing. */
3681 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3684 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3685 unsignedp
= TREE_UNSIGNED (field
);
3686 mode
= DECL_MODE (field
);
3687 if (DECL_BIT_FIELD (field
))
3690 pos
= DECL_FIELD_BITPOS (field
);
3691 if (TREE_CODE (pos
) == INTEGER_CST
)
3693 else if (TREE_CODE (pos
) == PLUS_EXPR
3694 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3695 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3700 bitpos
= TREE_INT_CST_LOW (constant
);
3706 if (contains_placeholder_p (offset
))
3707 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3708 offset
, make_tree (TREE_TYPE (exp
), target
));
3710 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3711 size_int (BITS_PER_UNIT
));
3713 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3714 if (GET_CODE (to_rtx
) != MEM
)
3718 = change_address (to_rtx
, VOIDmode
,
3719 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
3720 force_reg (ptr_mode
, offset_rtx
)));
3722 if (TREE_READONLY (field
))
3724 if (GET_CODE (to_rtx
) == MEM
)
3725 to_rtx
= copy_rtx (to_rtx
);
3727 RTX_UNCHANGING_P (to_rtx
) = 1;
3730 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3731 mode
, TREE_VALUE (elt
), type
, cleared
);
3734 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3739 tree domain
= TYPE_DOMAIN (type
);
3740 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3741 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3742 tree elttype
= TREE_TYPE (type
);
3744 /* If the constructor has fewer elements than the array,
3745 clear the whole array first. Similarly if this this is
3746 static constructor of a non-BLKmode object. */
3747 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3751 HOST_WIDE_INT count
= 0, zero_count
= 0;
3753 /* This loop is a more accurate version of the loop in
3754 mostly_zeros_p (it handles RANGE_EXPR in an index).
3755 It is also needed to check for missing elements. */
3756 for (elt
= CONSTRUCTOR_ELTS (exp
);
3758 elt
= TREE_CHAIN (elt
))
3760 tree index
= TREE_PURPOSE (elt
);
3761 HOST_WIDE_INT this_node_count
;
3762 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3764 tree lo_index
= TREE_OPERAND (index
, 0);
3765 tree hi_index
= TREE_OPERAND (index
, 1);
3766 if (TREE_CODE (lo_index
) != INTEGER_CST
3767 || TREE_CODE (hi_index
) != INTEGER_CST
)
3772 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3773 - TREE_INT_CST_LOW (lo_index
) + 1;
3776 this_node_count
= 1;
3777 count
+= this_node_count
;
3778 if (mostly_zeros_p (TREE_VALUE (elt
)))
3779 zero_count
+= this_node_count
;
3781 /* Clear the entire array first if there are any missing elements,
3782 or if the incidence of zero elements is >= 75%. */
3783 if (count
< maxelt
- minelt
+ 1
3784 || 4 * zero_count
>= 3 * count
)
3790 clear_storage (target
, expr_size (exp
),
3791 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3795 /* Inform later passes that the old value is dead. */
3796 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3798 /* Store each element of the constructor into
3799 the corresponding element of TARGET, determined
3800 by counting the elements. */
3801 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3803 elt
= TREE_CHAIN (elt
), i
++)
3805 register enum machine_mode mode
;
3809 tree value
= TREE_VALUE (elt
);
3810 tree index
= TREE_PURPOSE (elt
);
3811 rtx xtarget
= target
;
3813 if (cleared
&& is_zeros_p (value
))
3816 mode
= TYPE_MODE (elttype
);
3817 bitsize
= GET_MODE_BITSIZE (mode
);
3818 unsignedp
= TREE_UNSIGNED (elttype
);
3820 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3822 tree lo_index
= TREE_OPERAND (index
, 0);
3823 tree hi_index
= TREE_OPERAND (index
, 1);
3824 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3825 struct nesting
*loop
;
3826 HOST_WIDE_INT lo
, hi
, count
;
3829 /* If the range is constant and "small", unroll the loop. */
3830 if (TREE_CODE (lo_index
) == INTEGER_CST
3831 && TREE_CODE (hi_index
) == INTEGER_CST
3832 && (lo
= TREE_INT_CST_LOW (lo_index
),
3833 hi
= TREE_INT_CST_LOW (hi_index
),
3834 count
= hi
- lo
+ 1,
3835 (GET_CODE (target
) != MEM
3837 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3838 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3841 lo
-= minelt
; hi
-= minelt
;
3842 for (; lo
<= hi
; lo
++)
3844 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3845 store_constructor_field (target
, bitsize
, bitpos
,
3846 mode
, value
, type
, cleared
);
3851 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3852 loop_top
= gen_label_rtx ();
3853 loop_end
= gen_label_rtx ();
3855 unsignedp
= TREE_UNSIGNED (domain
);
3857 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3859 DECL_RTL (index
) = index_r
3860 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3863 if (TREE_CODE (value
) == SAVE_EXPR
3864 && SAVE_EXPR_RTL (value
) == 0)
3866 /* Make sure value gets expanded once before the
3868 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3871 store_expr (lo_index
, index_r
, 0);
3872 loop
= expand_start_loop (0);
3874 /* Assign value to element index. */
3875 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3876 size_int (BITS_PER_UNIT
));
3877 position
= size_binop (MULT_EXPR
,
3878 size_binop (MINUS_EXPR
, index
,
3879 TYPE_MIN_VALUE (domain
)),
3881 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3882 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3883 xtarget
= change_address (target
, mode
, addr
);
3884 if (TREE_CODE (value
) == CONSTRUCTOR
)
3885 store_constructor (value
, xtarget
, cleared
);
3887 store_expr (value
, xtarget
, 0);
3889 expand_exit_loop_if_false (loop
,
3890 build (LT_EXPR
, integer_type_node
,
3893 expand_increment (build (PREINCREMENT_EXPR
,
3895 index
, integer_one_node
), 0, 0);
3897 emit_label (loop_end
);
3899 /* Needed by stupid register allocation. to extend the
3900 lifetime of pseudo-regs used by target past the end
3902 emit_insn (gen_rtx (USE
, GET_MODE (target
), target
));
3905 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3906 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3912 index
= size_int (i
);
3915 index
= size_binop (MINUS_EXPR
, index
,
3916 TYPE_MIN_VALUE (domain
));
3917 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3918 size_int (BITS_PER_UNIT
));
3919 position
= size_binop (MULT_EXPR
, index
, position
);
3920 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3921 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3922 xtarget
= change_address (target
, mode
, addr
);
3923 store_expr (value
, xtarget
, 0);
3928 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3929 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3931 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3932 store_constructor_field (target
, bitsize
, bitpos
,
3933 mode
, value
, type
, cleared
);
3937 /* set constructor assignments */
3938 else if (TREE_CODE (type
) == SET_TYPE
)
3940 tree elt
= CONSTRUCTOR_ELTS (exp
);
3941 rtx xtarget
= XEXP (target
, 0);
3942 int set_word_size
= TYPE_ALIGN (type
);
3943 int nbytes
= int_size_in_bytes (type
), nbits
;
3944 tree domain
= TYPE_DOMAIN (type
);
3945 tree domain_min
, domain_max
, bitlength
;
3947 /* The default implementation strategy is to extract the constant
3948 parts of the constructor, use that to initialize the target,
3949 and then "or" in whatever non-constant ranges we need in addition.
3951 If a large set is all zero or all ones, it is
3952 probably better to set it using memset (if available) or bzero.
3953 Also, if a large set has just a single range, it may also be
3954 better to first clear all the first clear the set (using
3955 bzero/memset), and set the bits we want. */
3957 /* Check for all zeros. */
3958 if (elt
== NULL_TREE
)
3961 clear_storage (target
, expr_size (exp
),
3962 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3966 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3967 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3968 bitlength
= size_binop (PLUS_EXPR
,
3969 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3972 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
3974 nbits
= TREE_INT_CST_LOW (bitlength
);
3976 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3977 are "complicated" (more than one range), initialize (the
3978 constant parts) by copying from a constant. */
3979 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
3980 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
3982 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3983 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3984 char *bit_buffer
= (char *) alloca (nbits
);
3985 HOST_WIDE_INT word
= 0;
3988 int offset
= 0; /* In bytes from beginning of set. */
3989 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
3992 if (bit_buffer
[ibit
])
3994 if (BYTES_BIG_ENDIAN
)
3995 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3997 word
|= 1 << bit_pos
;
4000 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4002 if (word
!= 0 || ! cleared
)
4004 rtx datum
= GEN_INT (word
);
4006 /* The assumption here is that it is safe to use
4007 XEXP if the set is multi-word, but not if
4008 it's single-word. */
4009 if (GET_CODE (target
) == MEM
)
4011 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4012 to_rtx
= change_address (target
, mode
, to_rtx
);
4014 else if (offset
== 0)
4018 emit_move_insn (to_rtx
, datum
);
4024 offset
+= set_word_size
/ BITS_PER_UNIT
;
4030 /* Don't bother clearing storage if the set is all ones. */
4031 if (TREE_CHAIN (elt
) != NULL_TREE
4032 || (TREE_PURPOSE (elt
) == NULL_TREE
4034 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
4035 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
4036 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
4037 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
4039 clear_storage (target
, expr_size (exp
),
4040 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4043 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4045 /* start of range of element or NULL */
4046 tree startbit
= TREE_PURPOSE (elt
);
4047 /* end of range of element, or element value */
4048 tree endbit
= TREE_VALUE (elt
);
4049 HOST_WIDE_INT startb
, endb
;
4050 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4052 bitlength_rtx
= expand_expr (bitlength
,
4053 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4055 /* handle non-range tuple element like [ expr ] */
4056 if (startbit
== NULL_TREE
)
4058 startbit
= save_expr (endbit
);
4061 startbit
= convert (sizetype
, startbit
);
4062 endbit
= convert (sizetype
, endbit
);
4063 if (! integer_zerop (domain_min
))
4065 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4066 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4068 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4069 EXPAND_CONST_ADDRESS
);
4070 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4071 EXPAND_CONST_ADDRESS
);
4075 targetx
= assign_stack_temp (GET_MODE (target
),
4076 GET_MODE_SIZE (GET_MODE (target
)),
4078 emit_move_insn (targetx
, target
);
4080 else if (GET_CODE (target
) == MEM
)
4085 #ifdef TARGET_MEM_FUNCTIONS
4086 /* Optimization: If startbit and endbit are
4087 constants divisible by BITS_PER_UNIT,
4088 call memset instead. */
4089 if (TREE_CODE (startbit
) == INTEGER_CST
4090 && TREE_CODE (endbit
) == INTEGER_CST
4091 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4092 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4094 emit_library_call (memset_libfunc
, 0,
4096 plus_constant (XEXP (targetx
, 0),
4097 startb
/ BITS_PER_UNIT
),
4099 constm1_rtx
, TYPE_MODE (integer_type_node
),
4100 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4101 TYPE_MODE (sizetype
));
4106 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__setbits"),
4107 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
4108 bitlength_rtx
, TYPE_MODE (sizetype
),
4109 startbit_rtx
, TYPE_MODE (sizetype
),
4110 endbit_rtx
, TYPE_MODE (sizetype
));
4113 emit_move_insn (target
, targetx
);
4121 /* Store the value of EXP (an expression tree)
4122 into a subfield of TARGET which has mode MODE and occupies
4123 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4124 If MODE is VOIDmode, it means that we are storing into a bit-field.
4126 If VALUE_MODE is VOIDmode, return nothing in particular.
4127 UNSIGNEDP is not used in this case.
4129 Otherwise, return an rtx for the value stored. This rtx
4130 has mode VALUE_MODE if that is convenient to do.
4131 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4133 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4134 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4137 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4138 unsignedp
, align
, total_size
)
4140 int bitsize
, bitpos
;
4141 enum machine_mode mode
;
4143 enum machine_mode value_mode
;
4148 HOST_WIDE_INT width_mask
= 0;
4150 if (TREE_CODE (exp
) == ERROR_MARK
)
4153 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4154 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4156 /* If we are storing into an unaligned field of an aligned union that is
4157 in a register, we may have the mode of TARGET being an integer mode but
4158 MODE == BLKmode. In that case, get an aligned object whose size and
4159 alignment are the same as TARGET and store TARGET into it (we can avoid
4160 the store if the field being stored is the entire width of TARGET). Then
4161 call ourselves recursively to store the field into a BLKmode version of
4162 that object. Finally, load from the object into TARGET. This is not
4163 very efficient in general, but should only be slightly more expensive
4164 than the otherwise-required unaligned accesses. Perhaps this can be
4165 cleaned up later. */
4168 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4170 rtx object
= assign_stack_temp (GET_MODE (target
),
4171 GET_MODE_SIZE (GET_MODE (target
)), 0);
4172 rtx blk_object
= copy_rtx (object
);
4174 MEM_IN_STRUCT_P (object
) = 1;
4175 MEM_IN_STRUCT_P (blk_object
) = 1;
4176 PUT_MODE (blk_object
, BLKmode
);
4178 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4179 emit_move_insn (object
, target
);
4181 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4184 /* Even though we aren't returning target, we need to
4185 give it the updated value. */
4186 emit_move_insn (target
, object
);
4191 /* If the structure is in a register or if the component
4192 is a bit field, we cannot use addressing to access it.
4193 Use bit-field techniques or SUBREG to store in it. */
4195 if (mode
== VOIDmode
4196 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
4197 || GET_CODE (target
) == REG
4198 || GET_CODE (target
) == SUBREG
4199 /* If the field isn't aligned enough to store as an ordinary memref,
4200 store it as a bit field. */
4201 || (SLOW_UNALIGNED_ACCESS
4202 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
4203 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
4205 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4207 /* If BITSIZE is narrower than the size of the type of EXP
4208 we will be narrowing TEMP. Normally, what's wanted are the
4209 low-order bits. However, if EXP's type is a record and this is
4210 big-endian machine, we want the upper BITSIZE bits. */
4211 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4212 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4213 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4214 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4215 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4219 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4221 if (mode
!= VOIDmode
&& mode
!= BLKmode
4222 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4223 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4225 /* If the modes of TARGET and TEMP are both BLKmode, both
4226 must be in memory and BITPOS must be aligned on a byte
4227 boundary. If so, we simply do a block copy. */
4228 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4230 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4231 || bitpos
% BITS_PER_UNIT
!= 0)
4234 target
= change_address (target
, VOIDmode
,
4235 plus_constant (XEXP (target
, 0),
4236 bitpos
/ BITS_PER_UNIT
));
4238 emit_block_move (target
, temp
,
4239 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4243 return value_mode
== VOIDmode
? const0_rtx
: target
;
4246 /* Store the value in the bitfield. */
4247 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4248 if (value_mode
!= VOIDmode
)
4250 /* The caller wants an rtx for the value. */
4251 /* If possible, avoid refetching from the bitfield itself. */
4253 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4256 enum machine_mode tmode
;
4259 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4260 tmode
= GET_MODE (temp
);
4261 if (tmode
== VOIDmode
)
4263 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4264 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4265 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4267 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4268 NULL_RTX
, value_mode
, 0, align
,
4275 rtx addr
= XEXP (target
, 0);
4278 /* If a value is wanted, it must be the lhs;
4279 so make the address stable for multiple use. */
4281 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4282 && ! CONSTANT_ADDRESS_P (addr
)
4283 /* A frame-pointer reference is already stable. */
4284 && ! (GET_CODE (addr
) == PLUS
4285 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4286 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4287 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4288 addr
= copy_to_reg (addr
);
4290 /* Now build a reference to just the desired component. */
4292 to_rtx
= copy_rtx (change_address (target
, mode
,
4293 plus_constant (addr
,
4295 / BITS_PER_UNIT
))));
4296 MEM_IN_STRUCT_P (to_rtx
) = 1;
4298 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4302 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4303 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4304 ARRAY_REFs and find the ultimate containing object, which we return.
4306 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4307 bit position, and *PUNSIGNEDP to the signedness of the field.
4308 If the position of the field is variable, we store a tree
4309 giving the variable offset (in units) in *POFFSET.
4310 This offset is in addition to the bit position.
4311 If the position is not variable, we store 0 in *POFFSET.
4312 We set *PALIGNMENT to the alignment in bytes of the address that will be
4313 computed. This is the alignment of the thing we return if *POFFSET
4314 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4316 If any of the extraction expressions is volatile,
4317 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4319 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4320 is a mode that can be used to access the field. In that case, *PBITSIZE
4323 If the field describes a variable-sized object, *PMODE is set to
4324 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4325 this case, but the address of the object can be found. */
4328 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4329 punsignedp
, pvolatilep
, palignment
)
4334 enum machine_mode
*pmode
;
4339 tree orig_exp
= exp
;
4341 enum machine_mode mode
= VOIDmode
;
4342 tree offset
= integer_zero_node
;
4343 int alignment
= BIGGEST_ALIGNMENT
;
4345 if (TREE_CODE (exp
) == COMPONENT_REF
)
4347 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4348 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4349 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4350 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4352 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4354 size_tree
= TREE_OPERAND (exp
, 1);
4355 *punsignedp
= TREE_UNSIGNED (exp
);
4359 mode
= TYPE_MODE (TREE_TYPE (exp
));
4360 *pbitsize
= GET_MODE_BITSIZE (mode
);
4361 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4366 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4367 mode
= BLKmode
, *pbitsize
= -1;
4369 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4372 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4373 and find the ultimate containing object. */
4379 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4381 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4382 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4383 : TREE_OPERAND (exp
, 2));
4384 tree constant
= integer_zero_node
, var
= pos
;
4386 /* If this field hasn't been filled in yet, don't go
4387 past it. This should only happen when folding expressions
4388 made during type construction. */
4392 /* Assume here that the offset is a multiple of a unit.
4393 If not, there should be an explicitly added constant. */
4394 if (TREE_CODE (pos
) == PLUS_EXPR
4395 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4396 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4397 else if (TREE_CODE (pos
) == INTEGER_CST
)
4398 constant
= pos
, var
= integer_zero_node
;
4400 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4401 offset
= size_binop (PLUS_EXPR
, offset
,
4402 size_binop (EXACT_DIV_EXPR
, var
,
4403 size_int (BITS_PER_UNIT
)));
4406 else if (TREE_CODE (exp
) == ARRAY_REF
)
4408 /* This code is based on the code in case ARRAY_REF in expand_expr
4409 below. We assume here that the size of an array element is
4410 always an integral multiple of BITS_PER_UNIT. */
4412 tree index
= TREE_OPERAND (exp
, 1);
4413 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4415 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4416 tree index_type
= TREE_TYPE (index
);
4418 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4420 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4422 index_type
= TREE_TYPE (index
);
4425 if (! integer_zerop (low_bound
))
4426 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4428 index
= fold (build (MULT_EXPR
, index_type
, index
,
4429 convert (index_type
,
4430 TYPE_SIZE (TREE_TYPE (exp
)))));
4432 if (TREE_CODE (index
) == INTEGER_CST
4433 && TREE_INT_CST_HIGH (index
) == 0)
4434 *pbitpos
+= TREE_INT_CST_LOW (index
);
4437 offset
= size_binop (PLUS_EXPR
, offset
,
4438 size_binop (FLOOR_DIV_EXPR
, index
,
4439 size_int (BITS_PER_UNIT
)));
4441 if (contains_placeholder_p (offset
))
4442 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, exp
);
4445 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4446 && ! ((TREE_CODE (exp
) == NOP_EXPR
4447 || TREE_CODE (exp
) == CONVERT_EXPR
)
4448 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4449 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4451 && (TYPE_MODE (TREE_TYPE (exp
))
4452 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4455 /* If any reference in the chain is volatile, the effect is volatile. */
4456 if (TREE_THIS_VOLATILE (exp
))
4459 /* If the offset is non-constant already, then we can't assume any
4460 alignment more than the alignment here. */
4461 if (! integer_zerop (offset
))
4462 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4464 exp
= TREE_OPERAND (exp
, 0);
4467 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4468 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
4469 else if (TREE_TYPE (exp
) != 0)
4470 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4472 if (integer_zerop (offset
))
4475 if (offset
!= 0 && contains_placeholder_p (offset
))
4476 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4480 *palignment
= alignment
/ BITS_PER_UNIT
;
4484 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4485 static enum memory_use_mode
4486 get_memory_usage_from_modifier (modifier
)
4487 enum expand_modifier modifier
;
4492 return MEMORY_USE_RO
;
4494 case EXPAND_MEMORY_USE_WO
:
4495 return MEMORY_USE_WO
;
4497 case EXPAND_MEMORY_USE_RW
:
4498 return MEMORY_USE_RW
;
4500 case EXPAND_INITIALIZER
:
4501 case EXPAND_MEMORY_USE_DONT
:
4503 case EXPAND_CONST_ADDRESS
:
4504 return MEMORY_USE_DONT
;
4505 case EXPAND_MEMORY_USE_BAD
:
4511 /* Given an rtx VALUE that may contain additions and multiplications,
4512 return an equivalent value that just refers to a register or memory.
4513 This is done by generating instructions to perform the arithmetic
4514 and returning a pseudo-register containing the value.
4516 The returned value may be a REG, SUBREG, MEM or constant. */
4519 force_operand (value
, target
)
4522 register optab binoptab
= 0;
4523 /* Use a temporary to force order of execution of calls to
4527 /* Use subtarget as the target for operand 0 of a binary operation. */
4528 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4530 if (GET_CODE (value
) == PLUS
)
4531 binoptab
= add_optab
;
4532 else if (GET_CODE (value
) == MINUS
)
4533 binoptab
= sub_optab
;
4534 else if (GET_CODE (value
) == MULT
)
4536 op2
= XEXP (value
, 1);
4537 if (!CONSTANT_P (op2
)
4538 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4540 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4541 return expand_mult (GET_MODE (value
), tmp
,
4542 force_operand (op2
, NULL_RTX
),
4548 op2
= XEXP (value
, 1);
4549 if (!CONSTANT_P (op2
)
4550 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4552 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4554 binoptab
= add_optab
;
4555 op2
= negate_rtx (GET_MODE (value
), op2
);
4558 /* Check for an addition with OP2 a constant integer and our first
4559 operand a PLUS of a virtual register and something else. In that
4560 case, we want to emit the sum of the virtual register and the
4561 constant first and then add the other value. This allows virtual
4562 register instantiation to simply modify the constant rather than
4563 creating another one around this addition. */
4564 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4565 && GET_CODE (XEXP (value
, 0)) == PLUS
4566 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4567 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4568 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4570 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4571 XEXP (XEXP (value
, 0), 0), op2
,
4572 subtarget
, 0, OPTAB_LIB_WIDEN
);
4573 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4574 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4575 target
, 0, OPTAB_LIB_WIDEN
);
4578 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4579 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4580 force_operand (op2
, NULL_RTX
),
4581 target
, 0, OPTAB_LIB_WIDEN
);
4582 /* We give UNSIGNEDP = 0 to expand_binop
4583 because the only operations we are expanding here are signed ones. */
4588 /* Subroutine of expand_expr:
4589 save the non-copied parts (LIST) of an expr (LHS), and return a list
4590 which can restore these values to their previous values,
4591 should something modify their storage. */
4594 save_noncopied_parts (lhs
, list
)
4601 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4602 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4603 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4606 tree part
= TREE_VALUE (tail
);
4607 tree part_type
= TREE_TYPE (part
);
4608 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4609 rtx target
= assign_temp (part_type
, 0, 1, 1);
4610 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4611 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4612 parts
= tree_cons (to_be_saved
,
4613 build (RTL_EXPR
, part_type
, NULL_TREE
,
4616 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4621 /* Subroutine of expand_expr:
4622 record the non-copied parts (LIST) of an expr (LHS), and return a list
4623 which specifies the initial values of these parts. */
4626 init_noncopied_parts (lhs
, list
)
4633 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4634 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4635 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4638 tree part
= TREE_VALUE (tail
);
4639 tree part_type
= TREE_TYPE (part
);
4640 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4641 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4646 /* Subroutine of expand_expr: return nonzero iff there is no way that
4647 EXP can reference X, which is being modified. */
4650 safe_from_p (x
, exp
)
4658 /* If EXP has varying size, we MUST use a target since we currently
4659 have no way of allocating temporaries of variable size
4660 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4661 So we assume here that something at a higher level has prevented a
4662 clash. This is somewhat bogus, but the best we can do. Only
4663 do this when X is BLKmode. */
4664 || (TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4665 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4666 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
4667 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
4668 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
4670 && GET_MODE (x
) == BLKmode
))
4673 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4674 find the underlying pseudo. */
4675 if (GET_CODE (x
) == SUBREG
)
4678 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4682 /* If X is a location in the outgoing argument area, it is always safe. */
4683 if (GET_CODE (x
) == MEM
4684 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4685 || (GET_CODE (XEXP (x
, 0)) == PLUS
4686 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4689 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4692 exp_rtl
= DECL_RTL (exp
);
4699 if (TREE_CODE (exp
) == TREE_LIST
)
4700 return ((TREE_VALUE (exp
) == 0
4701 || safe_from_p (x
, TREE_VALUE (exp
)))
4702 && (TREE_CHAIN (exp
) == 0
4703 || safe_from_p (x
, TREE_CHAIN (exp
))));
4708 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4712 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
4713 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
4717 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4718 the expression. If it is set, we conflict iff we are that rtx or
4719 both are in memory. Otherwise, we check all operands of the
4720 expression recursively. */
4722 switch (TREE_CODE (exp
))
4725 return (staticp (TREE_OPERAND (exp
, 0))
4726 || safe_from_p (x
, TREE_OPERAND (exp
, 0)));
4729 if (GET_CODE (x
) == MEM
)
4734 exp_rtl
= CALL_EXPR_RTL (exp
);
4737 /* Assume that the call will clobber all hard registers and
4739 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4740 || GET_CODE (x
) == MEM
)
4747 /* If a sequence exists, we would have to scan every instruction
4748 in the sequence to see if it was safe. This is probably not
4750 if (RTL_EXPR_SEQUENCE (exp
))
4753 exp_rtl
= RTL_EXPR_RTL (exp
);
4756 case WITH_CLEANUP_EXPR
:
4757 exp_rtl
= RTL_EXPR_RTL (exp
);
4760 case CLEANUP_POINT_EXPR
:
4761 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4764 exp_rtl
= SAVE_EXPR_RTL (exp
);
4768 /* The only operand we look at is operand 1. The rest aren't
4769 part of the expression. */
4770 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
4772 case METHOD_CALL_EXPR
:
4773 /* This takes a rtx argument, but shouldn't appear here. */
4780 /* If we have an rtx, we do not need to scan our operands. */
4784 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
4785 for (i
= 0; i
< nops
; i
++)
4786 if (TREE_OPERAND (exp
, i
) != 0
4787 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
4791 /* If we have an rtl, find any enclosed object. Then see if we conflict
4795 if (GET_CODE (exp_rtl
) == SUBREG
)
4797 exp_rtl
= SUBREG_REG (exp_rtl
);
4798 if (GET_CODE (exp_rtl
) == REG
4799 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
4803 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4804 are memory and EXP is not readonly. */
4805 return ! (rtx_equal_p (x
, exp_rtl
)
4806 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
4807 && ! TREE_READONLY (exp
)));
4810 /* If we reach here, it is safe. */
4814 /* Subroutine of expand_expr: return nonzero iff EXP is an
4815 expression whose type is statically determinable. */
4821 if (TREE_CODE (exp
) == PARM_DECL
4822 || TREE_CODE (exp
) == VAR_DECL
4823 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
4824 || TREE_CODE (exp
) == COMPONENT_REF
4825 || TREE_CODE (exp
) == ARRAY_REF
)
4830 /* Subroutine of expand_expr: return rtx if EXP is a
4831 variable or parameter; else return 0. */
4838 switch (TREE_CODE (exp
))
4842 return DECL_RTL (exp
);
4848 /* expand_expr: generate code for computing expression EXP.
4849 An rtx for the computed value is returned. The value is never null.
4850 In the case of a void EXP, const0_rtx is returned.
4852 The value may be stored in TARGET if TARGET is nonzero.
4853 TARGET is just a suggestion; callers must assume that
4854 the rtx returned may not be the same as TARGET.
4856 If TARGET is CONST0_RTX, it means that the value will be ignored.
4858 If TMODE is not VOIDmode, it suggests generating the
4859 result in mode TMODE. But this is done only when convenient.
4860 Otherwise, TMODE is ignored and the value generated in its natural mode.
4861 TMODE is just a suggestion; callers must assume that
4862 the rtx returned may not have mode TMODE.
4864 Note that TARGET may have neither TMODE nor MODE. In that case, it
4865 probably will not be used.
4867 If MODIFIER is EXPAND_SUM then when EXP is an addition
4868 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4869 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4870 products as above, or REG or MEM, or constant.
4871 Ordinarily in such cases we would output mul or add instructions
4872 and then return a pseudo reg containing the sum.
4874 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4875 it also marks a label as absolutely required (it can't be dead).
4876 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4877 This is used for outputting expressions used in initializers.
4879 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4880 with a constant address even if that address is not normally legitimate.
4881 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4884 expand_expr (exp
, target
, tmode
, modifier
)
4887 enum machine_mode tmode
;
4888 enum expand_modifier modifier
;
4890 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4891 This is static so it will be accessible to our recursive callees. */
4892 static tree placeholder_list
= 0;
4893 register rtx op0
, op1
, temp
;
4894 tree type
= TREE_TYPE (exp
);
4895 int unsignedp
= TREE_UNSIGNED (type
);
4896 register enum machine_mode mode
= TYPE_MODE (type
);
4897 register enum tree_code code
= TREE_CODE (exp
);
4899 /* Use subtarget as the target for operand 0 of a binary operation. */
4900 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4901 rtx original_target
= target
;
4902 /* Maybe defer this until sure not doing bytecode? */
4903 int ignore
= (target
== const0_rtx
4904 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4905 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4906 || code
== COND_EXPR
)
4907 && TREE_CODE (type
) == VOID_TYPE
));
4909 /* Used by check-memory-usage to make modifier read only. */
4910 enum expand_modifier ro_modifier
;
4912 /* Make a read-only version of the modifier. */
4913 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
4914 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
4915 ro_modifier
= modifier
;
4917 ro_modifier
= EXPAND_NORMAL
;
4919 if (output_bytecode
&& modifier
!= EXPAND_INITIALIZER
)
4921 bc_expand_expr (exp
);
4925 /* Don't use hard regs as subtargets, because the combiner
4926 can only handle pseudo regs. */
4927 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4929 /* Avoid subtargets inside loops,
4930 since they hide some invariant expressions. */
4931 if (preserve_subexpressions_p ())
4934 /* If we are going to ignore this result, we need only do something
4935 if there is a side-effect somewhere in the expression. If there
4936 is, short-circuit the most common cases here. Note that we must
4937 not call expand_expr with anything but const0_rtx in case this
4938 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4942 if (! TREE_SIDE_EFFECTS (exp
))
4945 /* Ensure we reference a volatile object even if value is ignored. */
4946 if (TREE_THIS_VOLATILE (exp
)
4947 && TREE_CODE (exp
) != FUNCTION_DECL
4948 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4950 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
4951 if (GET_CODE (temp
) == MEM
)
4952 temp
= copy_to_reg (temp
);
4956 if (TREE_CODE_CLASS (code
) == '1')
4957 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4958 VOIDmode
, ro_modifier
);
4959 else if (TREE_CODE_CLASS (code
) == '2'
4960 || TREE_CODE_CLASS (code
) == '<')
4962 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
4963 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
4966 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4967 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4968 /* If the second operand has no side effects, just evaluate
4970 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4971 VOIDmode
, ro_modifier
);
4976 /* If will do cse, generate all results into pseudo registers
4977 since 1) that allows cse to find more things
4978 and 2) otherwise cse could produce an insn the machine
4981 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4982 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4989 tree function
= decl_function_context (exp
);
4990 /* Handle using a label in a containing function. */
4991 if (function
!= current_function_decl
4992 && function
!= inline_function_decl
&& function
!= 0)
4994 struct function
*p
= find_function_data (function
);
4995 /* Allocate in the memory associated with the function
4996 that the label is in. */
4997 push_obstacks (p
->function_obstack
,
4998 p
->function_maybepermanent_obstack
);
5000 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
5001 label_rtx (exp
), p
->forced_labels
);
5004 else if (modifier
== EXPAND_INITIALIZER
)
5005 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
5006 label_rtx (exp
), forced_labels
);
5007 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
5008 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
5009 if (function
!= current_function_decl
5010 && function
!= inline_function_decl
&& function
!= 0)
5011 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
5016 if (DECL_RTL (exp
) == 0)
5018 error_with_decl (exp
, "prior parameter's size depends on `%s'");
5019 return CONST0_RTX (mode
);
5022 /* ... fall through ... */
5025 /* If a static var's type was incomplete when the decl was written,
5026 but the type is complete now, lay out the decl now. */
5027 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
5028 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
5030 push_obstacks_nochange ();
5031 end_temporary_allocation ();
5032 layout_decl (exp
, 0);
5033 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5037 /* Only check automatic variables. Currently, function arguments are
5038 not checked (this can be done at compile-time with prototypes).
5039 Aggregates are not checked. */
5040 if (flag_check_memory_usage
&& code
== VAR_DECL
5041 && GET_CODE (DECL_RTL (exp
)) == MEM
5042 && DECL_CONTEXT (exp
) != NULL_TREE
5043 && ! TREE_STATIC (exp
)
5044 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5046 enum memory_use_mode memory_usage
;
5047 memory_usage
= get_memory_usage_from_modifier (modifier
);
5049 if (memory_usage
!= MEMORY_USE_DONT
)
5050 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5051 XEXP (DECL_RTL (exp
), 0), ptr_mode
,
5052 GEN_INT (int_size_in_bytes (type
)),
5053 TYPE_MODE (sizetype
),
5054 GEN_INT (memory_usage
),
5055 TYPE_MODE (integer_type_node
));
5058 /* ... fall through ... */
5062 if (DECL_RTL (exp
) == 0)
5065 /* Ensure variable marked as used even if it doesn't go through
5066 a parser. If it hasn't be used yet, write out an external
5068 if (! TREE_USED (exp
))
5070 assemble_external (exp
);
5071 TREE_USED (exp
) = 1;
5074 /* Show we haven't gotten RTL for this yet. */
5077 /* Handle variables inherited from containing functions. */
5078 context
= decl_function_context (exp
);
5080 /* We treat inline_function_decl as an alias for the current function
5081 because that is the inline function whose vars, types, etc.
5082 are being merged into the current function.
5083 See expand_inline_function. */
5085 if (context
!= 0 && context
!= current_function_decl
5086 && context
!= inline_function_decl
5087 /* If var is static, we don't need a static chain to access it. */
5088 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
5089 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
5093 /* Mark as non-local and addressable. */
5094 DECL_NONLOCAL (exp
) = 1;
5095 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
5097 mark_addressable (exp
);
5098 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
5100 addr
= XEXP (DECL_RTL (exp
), 0);
5101 if (GET_CODE (addr
) == MEM
)
5102 addr
= gen_rtx (MEM
, Pmode
,
5103 fix_lexical_addr (XEXP (addr
, 0), exp
));
5105 addr
= fix_lexical_addr (addr
, exp
);
5106 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
5109 /* This is the case of an array whose size is to be determined
5110 from its initializer, while the initializer is still being parsed.
5113 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5114 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
5115 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
5116 XEXP (DECL_RTL (exp
), 0));
5118 /* If DECL_RTL is memory, we are in the normal case and either
5119 the address is not valid or it is not a register and -fforce-addr
5120 is specified, get the address into a register. */
5122 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5123 && modifier
!= EXPAND_CONST_ADDRESS
5124 && modifier
!= EXPAND_SUM
5125 && modifier
!= EXPAND_INITIALIZER
5126 && (! memory_address_p (DECL_MODE (exp
),
5127 XEXP (DECL_RTL (exp
), 0))
5129 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
5130 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
5131 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
5133 /* If we got something, return it. But first, set the alignment
5134 the address is a register. */
5137 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
5138 mark_reg_pointer (XEXP (temp
, 0),
5139 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
5144 /* If the mode of DECL_RTL does not match that of the decl, it
5145 must be a promoted value. We return a SUBREG of the wanted mode,
5146 but mark it so that we know that it was already extended. */
5148 if (GET_CODE (DECL_RTL (exp
)) == REG
5149 && GET_MODE (DECL_RTL (exp
)) != mode
)
5151 /* Get the signedness used for this variable. Ensure we get the
5152 same mode we got when the variable was declared. */
5153 if (GET_MODE (DECL_RTL (exp
))
5154 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
5157 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
5158 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5159 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5163 return DECL_RTL (exp
);
5166 return immed_double_const (TREE_INT_CST_LOW (exp
),
5167 TREE_INT_CST_HIGH (exp
),
5171 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
5172 EXPAND_MEMORY_USE_BAD
);
5175 /* If optimized, generate immediate CONST_DOUBLE
5176 which will be turned into memory by reload if necessary.
5178 We used to force a register so that loop.c could see it. But
5179 this does not allow gen_* patterns to perform optimizations with
5180 the constants. It also produces two insns in cases like "x = 1.0;".
5181 On most machines, floating-point constants are not permitted in
5182 many insns, so we'd end up copying it to a register in any case.
5184 Now, we do the copying in expand_binop, if appropriate. */
5185 return immed_real_const (exp
);
5189 if (! TREE_CST_RTL (exp
))
5190 output_constant_def (exp
);
5192 /* TREE_CST_RTL probably contains a constant address.
5193 On RISC machines where a constant address isn't valid,
5194 make some insns to get that address into a register. */
5195 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
5196 && modifier
!= EXPAND_CONST_ADDRESS
5197 && modifier
!= EXPAND_INITIALIZER
5198 && modifier
!= EXPAND_SUM
5199 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
5201 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
5202 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
5203 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
5204 return TREE_CST_RTL (exp
);
5207 context
= decl_function_context (exp
);
5209 /* If this SAVE_EXPR was at global context, assume we are an
5210 initialization function and move it into our context. */
5212 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
5214 /* We treat inline_function_decl as an alias for the current function
5215 because that is the inline function whose vars, types, etc.
5216 are being merged into the current function.
5217 See expand_inline_function. */
5218 if (context
== current_function_decl
|| context
== inline_function_decl
)
5221 /* If this is non-local, handle it. */
5224 /* The following call just exists to abort if the context is
5225 not of a containing function. */
5226 find_function_data (context
);
5228 temp
= SAVE_EXPR_RTL (exp
);
5229 if (temp
&& GET_CODE (temp
) == REG
)
5231 put_var_into_stack (exp
);
5232 temp
= SAVE_EXPR_RTL (exp
);
5234 if (temp
== 0 || GET_CODE (temp
) != MEM
)
5236 return change_address (temp
, mode
,
5237 fix_lexical_addr (XEXP (temp
, 0), exp
));
5239 if (SAVE_EXPR_RTL (exp
) == 0)
5241 if (mode
== VOIDmode
)
5244 temp
= assign_temp (type
, 0, 0, 0);
5246 SAVE_EXPR_RTL (exp
) = temp
;
5247 if (!optimize
&& GET_CODE (temp
) == REG
)
5248 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
5251 /* If the mode of TEMP does not match that of the expression, it
5252 must be a promoted value. We pass store_expr a SUBREG of the
5253 wanted mode but mark it so that we know that it was already
5254 extended. Note that `unsignedp' was modified above in
5257 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5259 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5260 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5261 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5264 if (temp
== const0_rtx
)
5265 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5266 EXPAND_MEMORY_USE_BAD
);
5268 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5271 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5272 must be a promoted value. We return a SUBREG of the wanted mode,
5273 but mark it so that we know that it was already extended. */
5275 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5276 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5278 /* Compute the signedness and make the proper SUBREG. */
5279 promote_mode (type
, mode
, &unsignedp
, 0);
5280 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5281 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5282 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5286 return SAVE_EXPR_RTL (exp
);
5291 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5292 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5296 case PLACEHOLDER_EXPR
:
5298 tree placeholder_expr
;
5300 /* If there is an object on the head of the placeholder list,
5301 see if some object in its references is of type TYPE. For
5302 further information, see tree.def. */
5303 for (placeholder_expr
= placeholder_list
;
5304 placeholder_expr
!= 0;
5305 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
5307 tree need_type
= TYPE_MAIN_VARIANT (type
);
5309 tree old_list
= placeholder_list
;
5312 /* See if the object is the type that we want. */
5313 if ((TYPE_MAIN_VARIANT (TREE_TYPE
5314 (TREE_PURPOSE (placeholder_expr
)))
5316 object
= TREE_PURPOSE (placeholder_expr
);
5318 /* Find the outermost reference that is of the type we want. */
5319 for (elt
= TREE_PURPOSE (placeholder_expr
);
5320 elt
!= 0 && object
== 0
5321 && (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5322 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5323 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5324 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e');
5325 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
5326 || TREE_CODE (elt
) == COND_EXPR
)
5327 ? TREE_OPERAND (elt
, 1) : TREE_OPERAND (elt
, 0)))
5328 if (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5329 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt
, 0)))
5331 object
= TREE_OPERAND (elt
, 0);
5335 /* Expand this object skipping the list entries before
5336 it was found in case it is also a PLACEHOLDER_EXPR.
5337 In that case, we want to translate it using subsequent
5339 placeholder_list
= TREE_CHAIN (placeholder_expr
);
5340 temp
= expand_expr (object
, original_target
, tmode
,
5342 placeholder_list
= old_list
;
5348 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5351 case WITH_RECORD_EXPR
:
5352 /* Put the object on the placeholder list, expand our first operand,
5353 and pop the list. */
5354 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5356 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5357 tmode
, ro_modifier
);
5358 placeholder_list
= TREE_CHAIN (placeholder_list
);
5362 expand_exit_loop_if_false (NULL_PTR
,
5363 invert_truthvalue (TREE_OPERAND (exp
, 0)));
5368 expand_start_loop (1);
5369 expand_expr_stmt (TREE_OPERAND (exp
, 0));
5377 tree vars
= TREE_OPERAND (exp
, 0);
5378 int vars_need_expansion
= 0;
5380 /* Need to open a binding contour here because
5381 if there are any cleanups they must be contained here. */
5382 expand_start_bindings (0);
5384 /* Mark the corresponding BLOCK for output in its proper place. */
5385 if (TREE_OPERAND (exp
, 2) != 0
5386 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
5387 insert_block (TREE_OPERAND (exp
, 2));
5389 /* If VARS have not yet been expanded, expand them now. */
5392 if (DECL_RTL (vars
) == 0)
5394 vars_need_expansion
= 1;
5397 expand_decl_init (vars
);
5398 vars
= TREE_CHAIN (vars
);
5401 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
5403 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
5409 if (RTL_EXPR_SEQUENCE (exp
))
5411 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
5413 emit_insns (RTL_EXPR_SEQUENCE (exp
));
5414 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
5416 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
5417 free_temps_for_rtl_expr (exp
);
5418 return RTL_EXPR_RTL (exp
);
5421 /* If we don't need the result, just ensure we evaluate any
5426 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5427 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
5428 EXPAND_MEMORY_USE_BAD
);
5432 /* All elts simple constants => refer to a constant in memory. But
5433 if this is a non-BLKmode mode, let it store a field at a time
5434 since that should make a CONST_INT or CONST_DOUBLE when we
5435 fold. Likewise, if we have a target we can use, it is best to
5436 store directly into the target unless the type is large enough
5437 that memcpy will be used. If we are making an initializer and
5438 all operands are constant, put it in memory as well. */
5439 else if ((TREE_STATIC (exp
)
5440 && ((mode
== BLKmode
5441 && ! (target
!= 0 && safe_from_p (target
, exp
)))
5442 || TREE_ADDRESSABLE (exp
)
5443 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5444 && (move_by_pieces_ninsns
5445 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
5446 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
5448 && ! mostly_zeros_p (exp
))))
5449 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
5451 rtx constructor
= output_constant_def (exp
);
5452 if (modifier
!= EXPAND_CONST_ADDRESS
5453 && modifier
!= EXPAND_INITIALIZER
5454 && modifier
!= EXPAND_SUM
5455 && (! memory_address_p (GET_MODE (constructor
),
5456 XEXP (constructor
, 0))
5458 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5459 constructor
= change_address (constructor
, VOIDmode
,
5460 XEXP (constructor
, 0));
5466 /* Handle calls that pass values in multiple non-contiguous
5467 locations. The Irix 6 ABI has examples of this. */
5468 if (target
== 0 || ! safe_from_p (target
, exp
)
5469 || GET_CODE (target
) == PARALLEL
)
5471 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5472 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5474 target
= assign_temp (type
, 0, 1, 1);
5477 if (TREE_READONLY (exp
))
5479 if (GET_CODE (target
) == MEM
)
5480 target
= copy_rtx (target
);
5482 RTX_UNCHANGING_P (target
) = 1;
5485 store_constructor (exp
, target
, 0);
5491 tree exp1
= TREE_OPERAND (exp
, 0);
5494 tree string
= string_constant (exp1
, &index
);
5498 && TREE_CODE (string
) == STRING_CST
5499 && TREE_CODE (index
) == INTEGER_CST
5500 && !TREE_INT_CST_HIGH (index
)
5501 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (string
)
5502 && GET_MODE_CLASS (mode
) == MODE_INT
5503 && GET_MODE_SIZE (mode
) == 1)
5504 return GEN_INT (TREE_STRING_POINTER (string
)[i
]);
5506 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5507 op0
= memory_address (mode
, op0
);
5509 if (flag_check_memory_usage
&& !AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5511 enum memory_use_mode memory_usage
;
5512 memory_usage
= get_memory_usage_from_modifier (modifier
);
5514 if (memory_usage
!= MEMORY_USE_DONT
)
5515 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5517 GEN_INT (int_size_in_bytes (type
)),
5518 TYPE_MODE (sizetype
),
5519 GEN_INT (memory_usage
),
5520 TYPE_MODE (integer_type_node
));
5523 temp
= gen_rtx (MEM
, mode
, op0
);
5524 /* If address was computed by addition,
5525 mark this as an element of an aggregate. */
5526 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5527 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
5528 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
5529 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5530 || (TREE_CODE (exp1
) == ADDR_EXPR
5531 && (exp2
= TREE_OPERAND (exp1
, 0))
5532 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
5533 MEM_IN_STRUCT_P (temp
) = 1;
5534 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5536 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5537 here, because, in C and C++, the fact that a location is accessed
5538 through a pointer to const does not mean that the value there can
5539 never change. Languages where it can never change should
5540 also set TREE_STATIC. */
5541 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5546 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5550 tree array
= TREE_OPERAND (exp
, 0);
5551 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5552 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5553 tree index
= TREE_OPERAND (exp
, 1);
5554 tree index_type
= TREE_TYPE (index
);
5557 /* Optimize the special-case of a zero lower bound.
5559 We convert the low_bound to sizetype to avoid some problems
5560 with constant folding. (E.g. suppose the lower bound is 1,
5561 and its mode is QI. Without the conversion, (ARRAY
5562 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5563 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5565 But sizetype isn't quite right either (especially if
5566 the lowbound is negative). FIXME */
5568 if (! integer_zerop (low_bound
))
5569 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5570 convert (sizetype
, low_bound
)));
5572 /* Fold an expression like: "foo"[2].
5573 This is not done in fold so it won't happen inside &.
5574 Don't fold if this is for wide characters since it's too
5575 difficult to do correctly and this is a very rare case. */
5577 if (TREE_CODE (array
) == STRING_CST
5578 && TREE_CODE (index
) == INTEGER_CST
5579 && !TREE_INT_CST_HIGH (index
)
5580 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5581 && GET_MODE_CLASS (mode
) == MODE_INT
5582 && GET_MODE_SIZE (mode
) == 1)
5583 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5585 /* If this is a constant index into a constant array,
5586 just get the value from the array. Handle both the cases when
5587 we have an explicit constructor and when our operand is a variable
5588 that was declared const. */
5590 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5592 if (TREE_CODE (index
) == INTEGER_CST
5593 && TREE_INT_CST_HIGH (index
) == 0)
5595 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5597 i
= TREE_INT_CST_LOW (index
);
5599 elem
= TREE_CHAIN (elem
);
5601 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5602 tmode
, ro_modifier
);
5606 else if (optimize
>= 1
5607 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5608 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5609 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5611 if (TREE_CODE (index
) == INTEGER_CST
)
5613 tree init
= DECL_INITIAL (array
);
5615 i
= TREE_INT_CST_LOW (index
);
5616 if (TREE_CODE (init
) == CONSTRUCTOR
)
5618 tree elem
= CONSTRUCTOR_ELTS (init
);
5621 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5622 elem
= TREE_CHAIN (elem
);
5624 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5625 tmode
, ro_modifier
);
5627 else if (TREE_CODE (init
) == STRING_CST
5628 && TREE_INT_CST_HIGH (index
) == 0
5629 && (TREE_INT_CST_LOW (index
)
5630 < TREE_STRING_LENGTH (init
)))
5632 (TREE_STRING_POINTER
5633 (init
)[TREE_INT_CST_LOW (index
)]));
5638 /* ... fall through ... */
5642 /* If the operand is a CONSTRUCTOR, we can just extract the
5643 appropriate field if it is present. Don't do this if we have
5644 already written the data since we want to refer to that copy
5645 and varasm.c assumes that's what we'll do. */
5646 if (code
!= ARRAY_REF
5647 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5648 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5652 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5653 elt
= TREE_CHAIN (elt
))
5654 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
5655 /* We can normally use the value of the field in the
5656 CONSTRUCTOR. However, if this is a bitfield in
5657 an integral mode that we can fit in a HOST_WIDE_INT,
5658 we must mask only the number of bits in the bitfield,
5659 since this is done implicitly by the constructor. If
5660 the bitfield does not meet either of those conditions,
5661 we can't do this optimization. */
5662 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
5663 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
5665 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
5666 <= HOST_BITS_PER_WIDE_INT
))))
5668 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5669 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
5671 int bitsize
= DECL_FIELD_SIZE (TREE_PURPOSE (elt
));
5672 enum machine_mode imode
5673 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
5675 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
5677 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
5678 op0
= expand_and (op0
, op1
, target
);
5683 = build_int_2 (imode
- bitsize
, 0);
5685 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
5687 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
5697 enum machine_mode mode1
;
5703 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
5704 &mode1
, &unsignedp
, &volatilep
,
5707 /* If we got back the original object, something is wrong. Perhaps
5708 we are evaluating an expression too early. In any event, don't
5709 infinitely recurse. */
5713 /* If TEM's type is a union of variable size, pass TARGET to the inner
5714 computation, since it will need a temporary and TARGET is known
5715 to have to do. This occurs in unchecked conversion in Ada. */
5717 op0
= expand_expr (tem
,
5718 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
5719 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
5721 ? target
: NULL_RTX
),
5723 modifier
== EXPAND_INITIALIZER
? modifier
: 0);
5725 /* If this is a constant, put it into a register if it is a
5726 legitimate constant and memory if it isn't. */
5727 if (CONSTANT_P (op0
))
5729 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
5730 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
5731 op0
= force_reg (mode
, op0
);
5733 op0
= validize_mem (force_const_mem (mode
, op0
));
5738 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5740 if (GET_CODE (op0
) != MEM
)
5743 if (GET_MODE (offset_rtx
) != ptr_mode
)
5744 #ifdef POINTERS_EXTEND_UNSIGNED
5745 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 1);
5747 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5750 op0
= change_address (op0
, VOIDmode
,
5751 gen_rtx (PLUS
, ptr_mode
, XEXP (op0
, 0),
5752 force_reg (ptr_mode
, offset_rtx
)));
5755 /* Don't forget about volatility even if this is a bitfield. */
5756 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
5758 op0
= copy_rtx (op0
);
5759 MEM_VOLATILE_P (op0
) = 1;
5762 /* Check the access. */
5763 if (flag_check_memory_usage
&& GET_CODE (op0
) == MEM
)
5765 enum memory_use_mode memory_usage
;
5766 memory_usage
= get_memory_usage_from_modifier (modifier
);
5768 if (memory_usage
!= MEMORY_USE_DONT
)
5773 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
5774 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
5776 /* Check the access right of the pointer. */
5777 if (size
> BITS_PER_UNIT
)
5778 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5780 GEN_INT (size
/ BITS_PER_UNIT
),
5781 TYPE_MODE (sizetype
),
5782 GEN_INT (memory_usage
),
5783 TYPE_MODE (integer_type_node
));
5787 /* In cases where an aligned union has an unaligned object
5788 as a field, we might be extracting a BLKmode value from
5789 an integer-mode (e.g., SImode) object. Handle this case
5790 by doing the extract into an object as wide as the field
5791 (which we know to be the width of a basic mode), then
5792 storing into memory, and changing the mode to BLKmode.
5793 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5794 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5795 if (mode1
== VOIDmode
5796 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
5797 || (modifier
!= EXPAND_CONST_ADDRESS
5798 && modifier
!= EXPAND_INITIALIZER
5799 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
5800 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5801 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5802 /* If the field isn't aligned enough to fetch as a memref,
5803 fetch it as a bit field. */
5804 || (SLOW_UNALIGNED_ACCESS
5805 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
5806 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
5808 enum machine_mode ext_mode
= mode
;
5810 if (ext_mode
== BLKmode
)
5811 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
5813 if (ext_mode
== BLKmode
)
5815 /* In this case, BITPOS must start at a byte boundary and
5816 TARGET, if specified, must be a MEM. */
5817 if (GET_CODE (op0
) != MEM
5818 || (target
!= 0 && GET_CODE (target
) != MEM
)
5819 || bitpos
% BITS_PER_UNIT
!= 0)
5822 op0
= change_address (op0
, VOIDmode
,
5823 plus_constant (XEXP (op0
, 0),
5824 bitpos
/ BITS_PER_UNIT
));
5826 target
= assign_temp (type
, 0, 1, 1);
5828 emit_block_move (target
, op0
,
5829 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5836 op0
= validize_mem (op0
);
5838 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
5839 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5841 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
5842 unsignedp
, target
, ext_mode
, ext_mode
,
5844 int_size_in_bytes (TREE_TYPE (tem
)));
5846 /* If the result is a record type and BITSIZE is narrower than
5847 the mode of OP0, an integral mode, and this is a big endian
5848 machine, we must put the field into the high-order bits. */
5849 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
5850 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
5851 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
5852 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
5853 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
5857 if (mode
== BLKmode
)
5859 rtx
new = assign_stack_temp (ext_mode
,
5860 bitsize
/ BITS_PER_UNIT
, 0);
5862 emit_move_insn (new, op0
);
5863 op0
= copy_rtx (new);
5864 PUT_MODE (op0
, BLKmode
);
5865 MEM_IN_STRUCT_P (op0
) = 1;
5871 /* If the result is BLKmode, use that to access the object
5873 if (mode
== BLKmode
)
5876 /* Get a reference to just this component. */
5877 if (modifier
== EXPAND_CONST_ADDRESS
5878 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5879 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
5880 (bitpos
/ BITS_PER_UNIT
)));
5882 op0
= change_address (op0
, mode1
,
5883 plus_constant (XEXP (op0
, 0),
5884 (bitpos
/ BITS_PER_UNIT
)));
5885 if (GET_CODE (XEXP (op0
, 0)) == REG
)
5886 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5888 MEM_IN_STRUCT_P (op0
) = 1;
5889 MEM_VOLATILE_P (op0
) |= volatilep
;
5890 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
5891 || modifier
== EXPAND_CONST_ADDRESS
5892 || modifier
== EXPAND_INITIALIZER
)
5894 else if (target
== 0)
5895 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5897 convert_move (target
, op0
, unsignedp
);
5901 /* Intended for a reference to a buffer of a file-object in Pascal.
5902 But it's not certain that a special tree code will really be
5903 necessary for these. INDIRECT_REF might work for them. */
5909 /* Pascal set IN expression.
5912 rlo = set_low - (set_low%bits_per_word);
5913 the_word = set [ (index - rlo)/bits_per_word ];
5914 bit_index = index % bits_per_word;
5915 bitmask = 1 << bit_index;
5916 return !!(the_word & bitmask); */
5918 tree set
= TREE_OPERAND (exp
, 0);
5919 tree index
= TREE_OPERAND (exp
, 1);
5920 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
5921 tree set_type
= TREE_TYPE (set
);
5922 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
5923 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
5924 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
5925 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
5926 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
5927 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
5928 rtx setaddr
= XEXP (setval
, 0);
5929 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
5931 rtx diff
, quo
, rem
, addr
, bit
, result
;
5933 preexpand_calls (exp
);
5935 /* If domain is empty, answer is no. Likewise if index is constant
5936 and out of bounds. */
5937 if ((TREE_CODE (set_high_bound
) == INTEGER_CST
5938 && TREE_CODE (set_low_bound
) == INTEGER_CST
5939 && tree_int_cst_lt (set_high_bound
, set_low_bound
)
5940 || (TREE_CODE (index
) == INTEGER_CST
5941 && TREE_CODE (set_low_bound
) == INTEGER_CST
5942 && tree_int_cst_lt (index
, set_low_bound
))
5943 || (TREE_CODE (set_high_bound
) == INTEGER_CST
5944 && TREE_CODE (index
) == INTEGER_CST
5945 && tree_int_cst_lt (set_high_bound
, index
))))
5949 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5951 /* If we get here, we have to generate the code for both cases
5952 (in range and out of range). */
5954 op0
= gen_label_rtx ();
5955 op1
= gen_label_rtx ();
5957 if (! (GET_CODE (index_val
) == CONST_INT
5958 && GET_CODE (lo_r
) == CONST_INT
))
5960 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
5961 GET_MODE (index_val
), iunsignedp
, 0);
5962 emit_jump_insn (gen_blt (op1
));
5965 if (! (GET_CODE (index_val
) == CONST_INT
5966 && GET_CODE (hi_r
) == CONST_INT
))
5968 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5969 GET_MODE (index_val
), iunsignedp
, 0);
5970 emit_jump_insn (gen_bgt (op1
));
5973 /* Calculate the element number of bit zero in the first word
5975 if (GET_CODE (lo_r
) == CONST_INT
)
5976 rlow
= GEN_INT (INTVAL (lo_r
)
5977 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5979 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5980 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5981 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5983 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5984 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5986 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5987 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5988 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5989 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5991 addr
= memory_address (byte_mode
,
5992 expand_binop (index_mode
, add_optab
, diff
,
5993 setaddr
, NULL_RTX
, iunsignedp
,
5996 /* Extract the bit we want to examine */
5997 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5998 gen_rtx (MEM
, byte_mode
, addr
),
5999 make_tree (TREE_TYPE (index
), rem
),
6001 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
6002 GET_MODE (target
) == byte_mode
? target
: 0,
6003 1, OPTAB_LIB_WIDEN
);
6005 if (result
!= target
)
6006 convert_move (target
, result
, 1);
6008 /* Output the code to handle the out-of-range case. */
6011 emit_move_insn (target
, const0_rtx
);
6016 case WITH_CLEANUP_EXPR
:
6017 if (RTL_EXPR_RTL (exp
) == 0)
6020 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6021 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
6023 /* That's it for this cleanup. */
6024 TREE_OPERAND (exp
, 2) = 0;
6026 return RTL_EXPR_RTL (exp
);
6028 case CLEANUP_POINT_EXPR
:
6030 extern int temp_slot_level
;
6031 /* Start a new binding layer that will keep track of all cleanup
6032 actions to be performed. */
6033 expand_start_bindings (0);
6035 target_temp_slot_level
= temp_slot_level
;
6037 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6038 /* If we're going to use this value, load it up now. */
6040 op0
= force_not_mem (op0
);
6041 preserve_temp_slots (op0
);
6042 expand_end_bindings (NULL_TREE
, 0, 0);
6047 /* Check for a built-in function. */
6048 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6049 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6051 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6052 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
6054 /* If this call was expanded already by preexpand_calls,
6055 just return the result we got. */
6056 if (CALL_EXPR_RTL (exp
) != 0)
6057 return CALL_EXPR_RTL (exp
);
6059 return expand_call (exp
, target
, ignore
);
6061 case NON_LVALUE_EXPR
:
6064 case REFERENCE_EXPR
:
6065 if (TREE_CODE (type
) == UNION_TYPE
)
6067 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6070 if (mode
!= BLKmode
)
6071 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6073 target
= assign_temp (type
, 0, 1, 1);
6076 if (GET_CODE (target
) == MEM
)
6077 /* Store data into beginning of memory target. */
6078 store_expr (TREE_OPERAND (exp
, 0),
6079 change_address (target
, TYPE_MODE (valtype
), 0), 0);
6081 else if (GET_CODE (target
) == REG
)
6082 /* Store this field into a union of the proper type. */
6083 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
6084 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
6086 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6090 /* Return the entire union. */
6094 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6096 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
6099 /* If the signedness of the conversion differs and OP0 is
6100 a promoted SUBREG, clear that indication since we now
6101 have to do the proper extension. */
6102 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
6103 && GET_CODE (op0
) == SUBREG
)
6104 SUBREG_PROMOTED_VAR_P (op0
) = 0;
6109 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
6110 if (GET_MODE (op0
) == mode
)
6113 /* If OP0 is a constant, just convert it into the proper mode. */
6114 if (CONSTANT_P (op0
))
6116 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6117 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6119 if (modifier
== EXPAND_INITIALIZER
)
6120 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
6124 convert_to_mode (mode
, op0
,
6125 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6127 convert_move (target
, op0
,
6128 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6132 /* We come here from MINUS_EXPR when the second operand is a
6135 this_optab
= add_optab
;
6137 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6138 something else, make sure we add the register to the constant and
6139 then to the other thing. This case can occur during strength
6140 reduction and doing it this way will produce better code if the
6141 frame pointer or argument pointer is eliminated.
6143 fold-const.c will ensure that the constant is always in the inner
6144 PLUS_EXPR, so the only case we need to do anything about is if
6145 sp, ap, or fp is our second argument, in which case we must swap
6146 the innermost first argument and our second argument. */
6148 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
6149 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
6150 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
6151 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
6152 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
6153 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
6155 tree t
= TREE_OPERAND (exp
, 1);
6157 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6158 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
6161 /* If the result is to be ptr_mode and we are adding an integer to
6162 something, we might be forming a constant. So try to use
6163 plus_constant. If it produces a sum and we can't accept it,
6164 use force_operand. This allows P = &ARR[const] to generate
6165 efficient code on machines where a SYMBOL_REF is not a valid
6168 If this is an EXPAND_SUM call, always return the sum. */
6169 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
6170 || mode
== ptr_mode
)
6172 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
6173 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
6174 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
6176 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
6178 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
6179 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6180 op1
= force_operand (op1
, target
);
6184 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6185 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
6186 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
6188 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
6190 if (! CONSTANT_P (op0
))
6192 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6193 VOIDmode
, modifier
);
6194 /* Don't go to both_summands if modifier
6195 says it's not right to return a PLUS. */
6196 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6200 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
6201 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6202 op0
= force_operand (op0
, target
);
6207 /* No sense saving up arithmetic to be done
6208 if it's all in the wrong mode to form part of an address.
6209 And force_operand won't know whether to sign-extend or
6211 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6212 || mode
!= ptr_mode
)
6215 preexpand_calls (exp
);
6216 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6219 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
6220 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
6223 /* Make sure any term that's a sum with a constant comes last. */
6224 if (GET_CODE (op0
) == PLUS
6225 && CONSTANT_P (XEXP (op0
, 1)))
6231 /* If adding to a sum including a constant,
6232 associate it to put the constant outside. */
6233 if (GET_CODE (op1
) == PLUS
6234 && CONSTANT_P (XEXP (op1
, 1)))
6236 rtx constant_term
= const0_rtx
;
6238 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
6241 /* Ensure that MULT comes first if there is one. */
6242 else if (GET_CODE (op0
) == MULT
)
6243 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
6245 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
6247 /* Let's also eliminate constants from op0 if possible. */
6248 op0
= eliminate_constant_term (op0
, &constant_term
);
6250 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6251 their sum should be a constant. Form it into OP1, since the
6252 result we want will then be OP0 + OP1. */
6254 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
6259 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
6262 /* Put a constant term last and put a multiplication first. */
6263 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
6264 temp
= op1
, op1
= op0
, op0
= temp
;
6266 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
6267 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
6270 /* For initializers, we are allowed to return a MINUS of two
6271 symbolic constants. Here we handle all cases when both operands
6273 /* Handle difference of two symbolic constants,
6274 for the sake of an initializer. */
6275 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6276 && really_constant_p (TREE_OPERAND (exp
, 0))
6277 && really_constant_p (TREE_OPERAND (exp
, 1)))
6279 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
6280 VOIDmode
, ro_modifier
);
6281 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6282 VOIDmode
, ro_modifier
);
6284 /* If the last operand is a CONST_INT, use plus_constant of
6285 the negated constant. Else make the MINUS. */
6286 if (GET_CODE (op1
) == CONST_INT
)
6287 return plus_constant (op0
, - INTVAL (op1
));
6289 return gen_rtx (MINUS
, mode
, op0
, op1
);
6291 /* Convert A - const to A + (-const). */
6292 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6294 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6295 TREE_OPERAND (exp
, 1)));
6297 /* Deal with the case where we can't negate the constant
6299 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6301 tree newtype
= signed_type (type
);
6302 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6303 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
6304 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
6306 if (! TREE_OVERFLOW (newneg
))
6307 return expand_expr (convert (type
,
6308 build (PLUS_EXPR
, newtype
,
6310 target
, tmode
, ro_modifier
);
6314 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
6318 this_optab
= sub_optab
;
6322 preexpand_calls (exp
);
6323 /* If first operand is constant, swap them.
6324 Thus the following special case checks need only
6325 check the second operand. */
6326 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6328 register tree t1
= TREE_OPERAND (exp
, 0);
6329 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
6330 TREE_OPERAND (exp
, 1) = t1
;
6333 /* Attempt to return something suitable for generating an
6334 indexed address, for machines that support that. */
6336 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
6337 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6338 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6340 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
6343 /* Apply distributive law if OP0 is x+c. */
6344 if (GET_CODE (op0
) == PLUS
6345 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
6346 return gen_rtx (PLUS
, mode
,
6347 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
6348 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
6349 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
6350 * INTVAL (XEXP (op0
, 1))));
6352 if (GET_CODE (op0
) != REG
)
6353 op0
= force_operand (op0
, NULL_RTX
);
6354 if (GET_CODE (op0
) != REG
)
6355 op0
= copy_to_mode_reg (mode
, op0
);
6357 return gen_rtx (MULT
, mode
, op0
,
6358 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
6361 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6364 /* Check for multiplying things that have been extended
6365 from a narrower type. If this machine supports multiplying
6366 in that narrower type with a result in the desired type,
6367 do it that way, and avoid the explicit type-conversion. */
6368 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
6369 && TREE_CODE (type
) == INTEGER_TYPE
6370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6371 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6372 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6373 && int_fits_type_p (TREE_OPERAND (exp
, 1),
6374 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6375 /* Don't use a widening multiply if a shift will do. */
6376 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
6377 > HOST_BITS_PER_WIDE_INT
)
6378 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
6380 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6381 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6383 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
6384 /* If both operands are extended, they must either both
6385 be zero-extended or both be sign-extended. */
6386 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6388 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
6390 enum machine_mode innermode
6391 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
6392 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6393 ? smul_widen_optab
: umul_widen_optab
);
6394 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6395 ? umul_widen_optab
: smul_widen_optab
);
6396 if (mode
== GET_MODE_WIDER_MODE (innermode
))
6398 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
6400 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6401 NULL_RTX
, VOIDmode
, 0);
6402 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6403 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6406 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6407 NULL_RTX
, VOIDmode
, 0);
6410 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
6411 && innermode
== word_mode
)
6414 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6415 NULL_RTX
, VOIDmode
, 0);
6416 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6417 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6420 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6421 NULL_RTX
, VOIDmode
, 0);
6422 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
6423 unsignedp
, OPTAB_LIB_WIDEN
);
6424 htem
= expand_mult_highpart_adjust (innermode
,
6425 gen_highpart (innermode
, temp
),
6427 gen_highpart (innermode
, temp
),
6429 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
6434 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6435 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6436 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
6438 case TRUNC_DIV_EXPR
:
6439 case FLOOR_DIV_EXPR
:
6441 case ROUND_DIV_EXPR
:
6442 case EXACT_DIV_EXPR
:
6443 preexpand_calls (exp
);
6444 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6446 /* Possible optimization: compute the dividend with EXPAND_SUM
6447 then if the divisor is constant can optimize the case
6448 where some terms of the dividend have coeffs divisible by it. */
6449 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6450 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6451 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
6454 this_optab
= flodiv_optab
;
6457 case TRUNC_MOD_EXPR
:
6458 case FLOOR_MOD_EXPR
:
6460 case ROUND_MOD_EXPR
:
6461 preexpand_calls (exp
);
6462 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6464 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6465 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6466 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
6468 case FIX_ROUND_EXPR
:
6469 case FIX_FLOOR_EXPR
:
6471 abort (); /* Not used for C. */
6473 case FIX_TRUNC_EXPR
:
6474 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6476 target
= gen_reg_rtx (mode
);
6477 expand_fix (target
, op0
, unsignedp
);
6481 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6483 target
= gen_reg_rtx (mode
);
6484 /* expand_float can't figure out what to do if FROM has VOIDmode.
6485 So give it the correct mode. With -O, cse will optimize this. */
6486 if (GET_MODE (op0
) == VOIDmode
)
6487 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6489 expand_float (target
, op0
,
6490 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6494 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6495 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6501 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6503 /* Handle complex values specially. */
6504 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6505 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6506 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6508 /* Unsigned abs is simply the operand. Testing here means we don't
6509 risk generating incorrect code below. */
6510 if (TREE_UNSIGNED (type
))
6513 return expand_abs (mode
, op0
, target
, unsignedp
,
6514 safe_from_p (target
, TREE_OPERAND (exp
, 0)));
6518 target
= original_target
;
6519 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
6520 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6521 || GET_MODE (target
) != mode
6522 || (GET_CODE (target
) == REG
6523 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6524 target
= gen_reg_rtx (mode
);
6525 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6526 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6528 /* First try to do it with a special MIN or MAX instruction.
6529 If that does not win, use a conditional jump to select the proper
6531 this_optab
= (TREE_UNSIGNED (type
)
6532 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6533 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6535 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6540 /* At this point, a MEM target is no longer useful; we will get better
6543 if (GET_CODE (target
) == MEM
)
6544 target
= gen_reg_rtx (mode
);
6547 emit_move_insn (target
, op0
);
6549 op0
= gen_label_rtx ();
6551 /* If this mode is an integer too wide to compare properly,
6552 compare word by word. Rely on cse to optimize constant cases. */
6553 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6555 if (code
== MAX_EXPR
)
6556 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6557 target
, op1
, NULL_RTX
, op0
);
6559 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6560 op1
, target
, NULL_RTX
, op0
);
6561 emit_move_insn (target
, op1
);
6565 if (code
== MAX_EXPR
)
6566 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6567 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6568 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6570 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6571 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6572 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6573 if (temp
== const0_rtx
)
6574 emit_move_insn (target
, op1
);
6575 else if (temp
!= const_true_rtx
)
6577 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6578 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6581 emit_move_insn (target
, op1
);
6588 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6589 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6595 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6596 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6601 /* ??? Can optimize bitwise operations with one arg constant.
6602 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6603 and (a bitwise1 b) bitwise2 b (etc)
6604 but that is probably not worth while. */
6606 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6607 boolean values when we want in all cases to compute both of them. In
6608 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6609 as actual zero-or-1 values and then bitwise anding. In cases where
6610 there cannot be any side effects, better code would be made by
6611 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6612 how to recognize those cases. */
6614 case TRUTH_AND_EXPR
:
6616 this_optab
= and_optab
;
6621 this_optab
= ior_optab
;
6624 case TRUTH_XOR_EXPR
:
6626 this_optab
= xor_optab
;
6633 preexpand_calls (exp
);
6634 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6636 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6637 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6640 /* Could determine the answer when only additive constants differ. Also,
6641 the addition of one can be handled by changing the condition. */
6648 preexpand_calls (exp
);
6649 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
6653 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6654 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
6656 && GET_CODE (original_target
) == REG
6657 && (GET_MODE (original_target
)
6658 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6660 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6663 if (temp
!= original_target
)
6664 temp
= copy_to_reg (temp
);
6666 op1
= gen_label_rtx ();
6667 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
6668 GET_MODE (temp
), unsignedp
, 0);
6669 emit_jump_insn (gen_beq (op1
));
6670 emit_move_insn (temp
, const1_rtx
);
6675 /* If no set-flag instruction, must generate a conditional
6676 store into a temporary variable. Drop through
6677 and handle this like && and ||. */
6679 case TRUTH_ANDIF_EXPR
:
6680 case TRUTH_ORIF_EXPR
:
6682 && (target
== 0 || ! safe_from_p (target
, exp
)
6683 /* Make sure we don't have a hard reg (such as function's return
6684 value) live across basic blocks, if not optimizing. */
6685 || (!optimize
&& GET_CODE (target
) == REG
6686 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
6687 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6690 emit_clr_insn (target
);
6692 op1
= gen_label_rtx ();
6693 jumpifnot (exp
, op1
);
6696 emit_0_to_1_insn (target
);
6699 return ignore
? const0_rtx
: target
;
6701 case TRUTH_NOT_EXPR
:
6702 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6703 /* The parser is careful to generate TRUTH_NOT_EXPR
6704 only with operands that are always zero or one. */
6705 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
6706 target
, 1, OPTAB_LIB_WIDEN
);
6712 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6714 return expand_expr (TREE_OPERAND (exp
, 1),
6715 (ignore
? const0_rtx
: target
),
6719 /* If we would have a "singleton" (see below) were it not for a
6720 conversion in each arm, bring that conversion back out. */
6721 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6722 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
6723 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
6724 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
6726 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
6727 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
6729 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6730 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6731 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6732 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6733 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6734 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6735 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6736 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6737 return expand_expr (build1 (NOP_EXPR
, type
,
6738 build (COND_EXPR
, TREE_TYPE (true),
6739 TREE_OPERAND (exp
, 0),
6741 target
, tmode
, modifier
);
6745 /* Note that COND_EXPRs whose type is a structure or union
6746 are required to be constructed to contain assignments of
6747 a temporary variable, so that we can evaluate them here
6748 for side effect only. If type is void, we must do likewise. */
6750 /* If an arm of the branch requires a cleanup,
6751 only that cleanup is performed. */
6754 tree binary_op
= 0, unary_op
= 0;
6756 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6757 convert it to our mode, if necessary. */
6758 if (integer_onep (TREE_OPERAND (exp
, 1))
6759 && integer_zerop (TREE_OPERAND (exp
, 2))
6760 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6764 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6769 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
6770 if (GET_MODE (op0
) == mode
)
6774 target
= gen_reg_rtx (mode
);
6775 convert_move (target
, op0
, unsignedp
);
6779 /* Check for X ? A + B : A. If we have this, we can copy A to the
6780 output and conditionally add B. Similarly for unary operations.
6781 Don't do this if X has side-effects because those side effects
6782 might affect A or B and the "?" operation is a sequence point in
6783 ANSI. (operand_equal_p tests for side effects.) */
6785 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
6786 && operand_equal_p (TREE_OPERAND (exp
, 2),
6787 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6788 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
6789 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
6790 && operand_equal_p (TREE_OPERAND (exp
, 1),
6791 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6792 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
6793 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
6794 && operand_equal_p (TREE_OPERAND (exp
, 2),
6795 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6796 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
6797 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
6798 && operand_equal_p (TREE_OPERAND (exp
, 1),
6799 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6800 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
6802 /* If we are not to produce a result, we have no target. Otherwise,
6803 if a target was specified use it; it will not be used as an
6804 intermediate target unless it is safe. If no target, use a
6809 else if (original_target
6810 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0))
6811 || (singleton
&& GET_CODE (original_target
) == REG
6812 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
6813 && original_target
== var_rtx (singleton
)))
6814 && GET_MODE (original_target
) == mode
6815 && ! (GET_CODE (original_target
) == MEM
6816 && MEM_VOLATILE_P (original_target
)))
6817 temp
= original_target
;
6818 else if (TREE_ADDRESSABLE (type
))
6821 temp
= assign_temp (type
, 0, 0, 1);
6823 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6824 do the test of X as a store-flag operation, do this as
6825 A + ((X != 0) << log C). Similarly for other simple binary
6826 operators. Only do for C == 1 if BRANCH_COST is low. */
6827 if (temp
&& singleton
&& binary_op
6828 && (TREE_CODE (binary_op
) == PLUS_EXPR
6829 || TREE_CODE (binary_op
) == MINUS_EXPR
6830 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
6831 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
6832 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
6833 : integer_onep (TREE_OPERAND (binary_op
, 1)))
6834 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6837 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
6838 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
6839 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
6842 /* If we had X ? A : A + 1, do this as A + (X == 0).
6844 We have to invert the truth value here and then put it
6845 back later if do_store_flag fails. We cannot simply copy
6846 TREE_OPERAND (exp, 0) to another variable and modify that
6847 because invert_truthvalue can modify the tree pointed to
6849 if (singleton
== TREE_OPERAND (exp
, 1))
6850 TREE_OPERAND (exp
, 0)
6851 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6853 result
= do_store_flag (TREE_OPERAND (exp
, 0),
6854 (safe_from_p (temp
, singleton
)
6856 mode
, BRANCH_COST
<= 1);
6858 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
6859 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
6860 build_int_2 (tree_log2
6864 (safe_from_p (temp
, singleton
)
6865 ? temp
: NULL_RTX
), 0);
6869 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
6870 return expand_binop (mode
, boptab
, op1
, result
, temp
,
6871 unsignedp
, OPTAB_LIB_WIDEN
);
6873 else if (singleton
== TREE_OPERAND (exp
, 1))
6874 TREE_OPERAND (exp
, 0)
6875 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6878 do_pending_stack_adjust ();
6880 op0
= gen_label_rtx ();
6882 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
6886 /* If the target conflicts with the other operand of the
6887 binary op, we can't use it. Also, we can't use the target
6888 if it is a hard register, because evaluating the condition
6889 might clobber it. */
6891 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
6892 || (GET_CODE (temp
) == REG
6893 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
6894 temp
= gen_reg_rtx (mode
);
6895 store_expr (singleton
, temp
, 0);
6898 expand_expr (singleton
,
6899 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6900 if (singleton
== TREE_OPERAND (exp
, 1))
6901 jumpif (TREE_OPERAND (exp
, 0), op0
);
6903 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6905 start_cleanup_deferral ();
6906 if (binary_op
&& temp
== 0)
6907 /* Just touch the other operand. */
6908 expand_expr (TREE_OPERAND (binary_op
, 1),
6909 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6911 store_expr (build (TREE_CODE (binary_op
), type
,
6912 make_tree (type
, temp
),
6913 TREE_OPERAND (binary_op
, 1)),
6916 store_expr (build1 (TREE_CODE (unary_op
), type
,
6917 make_tree (type
, temp
)),
6921 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6922 comparison operator. If we have one of these cases, set the
6923 output to A, branch on A (cse will merge these two references),
6924 then set the output to FOO. */
6926 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6927 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6928 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6929 TREE_OPERAND (exp
, 1), 0)
6930 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6931 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
6932 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
6934 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6935 temp
= gen_reg_rtx (mode
);
6936 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6937 jumpif (TREE_OPERAND (exp
, 0), op0
);
6939 start_cleanup_deferral ();
6940 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6944 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6945 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6946 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6947 TREE_OPERAND (exp
, 2), 0)
6948 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6949 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
6950 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
6952 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6953 temp
= gen_reg_rtx (mode
);
6954 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6955 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6957 start_cleanup_deferral ();
6958 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6963 op1
= gen_label_rtx ();
6964 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6966 start_cleanup_deferral ();
6968 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6970 expand_expr (TREE_OPERAND (exp
, 1),
6971 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6972 end_cleanup_deferral ();
6974 emit_jump_insn (gen_jump (op1
));
6977 start_cleanup_deferral ();
6979 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6981 expand_expr (TREE_OPERAND (exp
, 2),
6982 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6985 end_cleanup_deferral ();
6996 /* Something needs to be initialized, but we didn't know
6997 where that thing was when building the tree. For example,
6998 it could be the return value of a function, or a parameter
6999 to a function which lays down in the stack, or a temporary
7000 variable which must be passed by reference.
7002 We guarantee that the expression will either be constructed
7003 or copied into our original target. */
7005 tree slot
= TREE_OPERAND (exp
, 0);
7006 tree cleanups
= NULL_TREE
;
7010 if (TREE_CODE (slot
) != VAR_DECL
)
7014 target
= original_target
;
7018 if (DECL_RTL (slot
) != 0)
7020 target
= DECL_RTL (slot
);
7021 /* If we have already expanded the slot, so don't do
7023 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7028 target
= assign_temp (type
, 2, 0, 1);
7029 /* All temp slots at this level must not conflict. */
7030 preserve_temp_slots (target
);
7031 DECL_RTL (slot
) = target
;
7032 if (TREE_ADDRESSABLE (slot
))
7034 TREE_ADDRESSABLE (slot
) = 0;
7035 mark_addressable (slot
);
7038 /* Since SLOT is not known to the called function
7039 to belong to its stack frame, we must build an explicit
7040 cleanup. This case occurs when we must build up a reference
7041 to pass the reference as an argument. In this case,
7042 it is very likely that such a reference need not be
7045 if (TREE_OPERAND (exp
, 2) == 0)
7046 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
7047 cleanups
= TREE_OPERAND (exp
, 2);
7052 /* This case does occur, when expanding a parameter which
7053 needs to be constructed on the stack. The target
7054 is the actual stack address that we want to initialize.
7055 The function we call will perform the cleanup in this case. */
7057 /* If we have already assigned it space, use that space,
7058 not target that we were passed in, as our target
7059 parameter is only a hint. */
7060 if (DECL_RTL (slot
) != 0)
7062 target
= DECL_RTL (slot
);
7063 /* If we have already expanded the slot, so don't do
7065 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7070 DECL_RTL (slot
) = target
;
7071 /* If we must have an addressable slot, then make sure that
7072 the RTL that we just stored in slot is OK. */
7073 if (TREE_ADDRESSABLE (slot
))
7075 TREE_ADDRESSABLE (slot
) = 0;
7076 mark_addressable (slot
);
7081 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
7082 /* Mark it as expanded. */
7083 TREE_OPERAND (exp
, 1) = NULL_TREE
;
7085 store_expr (exp1
, target
, 0);
7087 expand_decl_cleanup (NULL_TREE
, cleanups
);
7094 tree lhs
= TREE_OPERAND (exp
, 0);
7095 tree rhs
= TREE_OPERAND (exp
, 1);
7096 tree noncopied_parts
= 0;
7097 tree lhs_type
= TREE_TYPE (lhs
);
7099 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7100 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
7101 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
7102 TYPE_NONCOPIED_PARTS (lhs_type
));
7103 while (noncopied_parts
!= 0)
7105 expand_assignment (TREE_VALUE (noncopied_parts
),
7106 TREE_PURPOSE (noncopied_parts
), 0, 0);
7107 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7114 /* If lhs is complex, expand calls in rhs before computing it.
7115 That's so we don't compute a pointer and save it over a call.
7116 If lhs is simple, compute it first so we can give it as a
7117 target if the rhs is just a call. This avoids an extra temp and copy
7118 and that prevents a partial-subsumption which makes bad code.
7119 Actually we could treat component_ref's of vars like vars. */
7121 tree lhs
= TREE_OPERAND (exp
, 0);
7122 tree rhs
= TREE_OPERAND (exp
, 1);
7123 tree noncopied_parts
= 0;
7124 tree lhs_type
= TREE_TYPE (lhs
);
7128 if (TREE_CODE (lhs
) != VAR_DECL
7129 && TREE_CODE (lhs
) != RESULT_DECL
7130 && TREE_CODE (lhs
) != PARM_DECL
7131 && ! (TREE_CODE (lhs
) == INDIRECT_REF
7132 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
7133 preexpand_calls (exp
);
7135 /* Check for |= or &= of a bitfield of size one into another bitfield
7136 of size 1. In this case, (unless we need the result of the
7137 assignment) we can do this more efficiently with a
7138 test followed by an assignment, if necessary.
7140 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7141 things change so we do, this code should be enhanced to
7144 && TREE_CODE (lhs
) == COMPONENT_REF
7145 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
7146 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
7147 && TREE_OPERAND (rhs
, 0) == lhs
7148 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
7149 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
7150 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
7152 rtx label
= gen_label_rtx ();
7154 do_jump (TREE_OPERAND (rhs
, 1),
7155 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
7156 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
7157 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
7158 (TREE_CODE (rhs
) == BIT_IOR_EXPR
7160 : integer_zero_node
)),
7162 do_pending_stack_adjust ();
7167 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
7168 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
7169 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
7170 TYPE_NONCOPIED_PARTS (lhs_type
));
7172 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7173 while (noncopied_parts
!= 0)
7175 expand_assignment (TREE_PURPOSE (noncopied_parts
),
7176 TREE_VALUE (noncopied_parts
), 0, 0);
7177 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7182 case PREINCREMENT_EXPR
:
7183 case PREDECREMENT_EXPR
:
7184 return expand_increment (exp
, 0, ignore
);
7186 case POSTINCREMENT_EXPR
:
7187 case POSTDECREMENT_EXPR
:
7188 /* Faster to treat as pre-increment if result is not used. */
7189 return expand_increment (exp
, ! ignore
, ignore
);
7192 /* If nonzero, TEMP will be set to the address of something that might
7193 be a MEM corresponding to a stack slot. */
7196 /* Are we taking the address of a nested function? */
7197 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
7198 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
7199 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0)))
7201 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
7202 op0
= force_operand (op0
, target
);
7204 /* If we are taking the address of something erroneous, just
7206 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
7210 /* We make sure to pass const0_rtx down if we came in with
7211 ignore set, to avoid doing the cleanups twice for something. */
7212 op0
= expand_expr (TREE_OPERAND (exp
, 0),
7213 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
7214 (modifier
== EXPAND_INITIALIZER
7215 ? modifier
: EXPAND_CONST_ADDRESS
));
7217 /* If we are going to ignore the result, OP0 will have been set
7218 to const0_rtx, so just return it. Don't get confused and
7219 think we are taking the address of the constant. */
7223 op0
= protect_from_queue (op0
, 0);
7225 /* We would like the object in memory. If it is a constant,
7226 we can have it be statically allocated into memory. For
7227 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7228 memory and store the value into it. */
7230 if (CONSTANT_P (op0
))
7231 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7233 else if (GET_CODE (op0
) == MEM
)
7235 mark_temp_addr_taken (op0
);
7236 temp
= XEXP (op0
, 0);
7239 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7240 || GET_CODE (op0
) == CONCAT
)
7242 /* If this object is in a register, it must be not
7244 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7245 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
7247 mark_temp_addr_taken (memloc
);
7248 emit_move_insn (memloc
, op0
);
7252 if (GET_CODE (op0
) != MEM
)
7255 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7257 temp
= XEXP (op0
, 0);
7258 #ifdef POINTERS_EXTEND_UNSIGNED
7259 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
7260 && mode
== ptr_mode
)
7261 temp
= convert_memory_address (ptr_mode
, temp
);
7266 op0
= force_operand (XEXP (op0
, 0), target
);
7269 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
7270 op0
= force_reg (Pmode
, op0
);
7272 if (GET_CODE (op0
) == REG
7273 && ! REG_USERVAR_P (op0
))
7274 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
7276 /* If we might have had a temp slot, add an equivalent address
7279 update_temp_slot_address (temp
, op0
);
7281 #ifdef POINTERS_EXTEND_UNSIGNED
7282 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7283 && mode
== ptr_mode
)
7284 op0
= convert_memory_address (ptr_mode
, op0
);
7289 case ENTRY_VALUE_EXPR
:
7292 /* COMPLEX type for Extended Pascal & Fortran */
7295 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7298 /* Get the rtx code of the operands. */
7299 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7300 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
7303 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7307 /* Move the real (op0) and imaginary (op1) parts to their location. */
7308 emit_move_insn (gen_realpart (mode
, target
), op0
);
7309 emit_move_insn (gen_imagpart (mode
, target
), op1
);
7311 insns
= get_insns ();
7314 /* Complex construction should appear as a single unit. */
7315 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7316 each with a separate pseudo as destination.
7317 It's not correct for flow to treat them as a unit. */
7318 if (GET_CODE (target
) != CONCAT
)
7319 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
7327 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7328 return gen_realpart (mode
, op0
);
7331 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7332 return gen_imagpart (mode
, op0
);
7336 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7340 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7343 target
= gen_reg_rtx (mode
);
7347 /* Store the realpart and the negated imagpart to target. */
7348 emit_move_insn (gen_realpart (partmode
, target
),
7349 gen_realpart (partmode
, op0
));
7351 imag_t
= gen_imagpart (partmode
, target
);
7352 temp
= expand_unop (partmode
, neg_optab
,
7353 gen_imagpart (partmode
, op0
), imag_t
, 0);
7355 emit_move_insn (imag_t
, temp
);
7357 insns
= get_insns ();
7360 /* Conjugate should appear as a single unit
7361 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7362 each with a separate pseudo as destination.
7363 It's not correct for flow to treat them as a unit. */
7364 if (GET_CODE (target
) != CONCAT
)
7365 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
7372 case TRY_CATCH_EXPR
:
7374 tree handler
= TREE_OPERAND (exp
, 1);
7376 expand_eh_region_start ();
7378 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7380 expand_eh_region_end (handler
);
7387 rtx dcc
= get_dynamic_cleanup_chain ();
7388 emit_move_insn (dcc
, validize_mem (gen_rtx (MEM
, Pmode
, dcc
)));
7394 rtx dhc
= get_dynamic_handler_chain ();
7395 emit_move_insn (dhc
, validize_mem (gen_rtx (MEM
, Pmode
, dhc
)));
7400 op0
= CONST0_RTX (tmode
);
7406 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7409 /* Here to do an ordinary binary operator, generating an instruction
7410 from the optab already placed in `this_optab'. */
7412 preexpand_calls (exp
);
7413 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
7415 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7416 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7418 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
7419 unsignedp
, OPTAB_LIB_WIDEN
);
7426 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7429 bc_expand_expr (exp
)
7432 enum tree_code code
;
7435 struct binary_operator
*binoptab
;
7436 struct unary_operator
*unoptab
;
7437 struct increment_operator
*incroptab
;
7438 struct bc_label
*lab
, *lab1
;
7439 enum bytecode_opcode opcode
;
7442 code
= TREE_CODE (exp
);
7448 if (DECL_RTL (exp
) == 0)
7450 error_with_decl (exp
, "prior parameter's size depends on `%s'");
7454 bc_load_parmaddr (DECL_RTL (exp
));
7455 bc_load_memory (TREE_TYPE (exp
), exp
);
7461 if (DECL_RTL (exp
) == 0)
7465 if (BYTECODE_LABEL (DECL_RTL (exp
)))
7466 bc_load_externaddr (DECL_RTL (exp
));
7468 bc_load_localaddr (DECL_RTL (exp
));
7470 if (TREE_PUBLIC (exp
))
7471 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
7472 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
7474 bc_load_localaddr (DECL_RTL (exp
));
7476 bc_load_memory (TREE_TYPE (exp
), exp
);
7481 #ifdef DEBUG_PRINT_CODE
7482 fprintf (stderr
, " [%x]\n", TREE_INT_CST_LOW (exp
));
7484 bc_emit_instruction (mode_to_const_map
[(int) (DECL_BIT_FIELD (exp
)
7486 : TYPE_MODE (TREE_TYPE (exp
)))],
7487 (HOST_WIDE_INT
) TREE_INT_CST_LOW (exp
));
7493 #ifdef DEBUG_PRINT_CODE
7494 fprintf (stderr
, " [%g]\n", (double) TREE_INT_CST_LOW (exp
));
7496 /* FIX THIS: find a better way to pass real_cst's. -bson */
7497 bc_emit_instruction (mode_to_const_map
[TYPE_MODE (TREE_TYPE (exp
))],
7498 (double) TREE_REAL_CST (exp
));
7507 /* We build a call description vector describing the type of
7508 the return value and of the arguments; this call vector,
7509 together with a pointer to a location for the return value
7510 and the base of the argument list, is passed to the low
7511 level machine dependent call subroutine, which is responsible
7512 for putting the arguments wherever real functions expect
7513 them, as well as getting the return value back. */
7515 tree calldesc
= 0, arg
;
7519 /* Push the evaluated args on the evaluation stack in reverse
7520 order. Also make an entry for each arg in the calldesc
7521 vector while we're at it. */
7523 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7525 for (arg
= TREE_OPERAND (exp
, 1); arg
; arg
= TREE_CHAIN (arg
))
7528 bc_expand_expr (TREE_VALUE (arg
));
7530 calldesc
= tree_cons ((tree
) 0,
7531 size_in_bytes (TREE_TYPE (TREE_VALUE (arg
))),
7533 calldesc
= tree_cons ((tree
) 0,
7534 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg
))),
7538 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7540 /* Allocate a location for the return value and push its
7541 address on the evaluation stack. Also make an entry
7542 at the front of the calldesc for the return value type. */
7544 type
= TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7545 retval
= bc_allocate_local (int_size_in_bytes (type
), TYPE_ALIGN (type
));
7546 bc_load_localaddr (retval
);
7548 calldesc
= tree_cons ((tree
) 0, size_in_bytes (type
), calldesc
);
7549 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (type
), calldesc
);
7551 /* Prepend the argument count. */
7552 calldesc
= tree_cons ((tree
) 0,
7553 build_int_2 (nargs
, 0),
7556 /* Push the address of the call description vector on the stack. */
7557 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
7558 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
7559 build_index_type (build_int_2 (nargs
* 2, 0)));
7560 r
= output_constant_def (calldesc
);
7561 bc_load_externaddr (r
);
7563 /* Push the address of the function to be called. */
7564 bc_expand_expr (TREE_OPERAND (exp
, 0));
7566 /* Call the function, popping its address and the calldesc vector
7567 address off the evaluation stack in the process. */
7568 bc_emit_instruction (call
);
7570 /* Pop the arguments off the stack. */
7571 bc_adjust_stack (nargs
);
7573 /* Load the return value onto the stack. */
7574 bc_load_localaddr (retval
);
7575 bc_load_memory (type
, TREE_OPERAND (exp
, 0));
7581 if (!SAVE_EXPR_RTL (exp
))
7583 /* First time around: copy to local variable */
7584 SAVE_EXPR_RTL (exp
) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp
)),
7585 TYPE_ALIGN (TREE_TYPE(exp
)));
7586 bc_expand_expr (TREE_OPERAND (exp
, 0));
7587 bc_emit_instruction (duplicate
);
7589 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7590 bc_store_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7594 /* Consecutive reference: use saved copy */
7595 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7596 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7601 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7602 how are they handled instead? */
7605 TREE_USED (exp
) = 1;
7606 bc_expand_expr (STMT_BODY (exp
));
7613 bc_expand_expr (TREE_OPERAND (exp
, 0));
7614 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)), TREE_TYPE (exp
));
7619 expand_assignment (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1), 0, 0);
7624 bc_expand_address (TREE_OPERAND (exp
, 0));
7629 bc_expand_expr (TREE_OPERAND (exp
, 0));
7630 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7635 bc_expand_expr (bc_canonicalize_array_ref (exp
));
7640 bc_expand_component_address (exp
);
7642 /* If we have a bitfield, generate a proper load */
7643 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp
, 1)), TREE_OPERAND (exp
, 1));
7648 bc_expand_expr (TREE_OPERAND (exp
, 0));
7649 bc_emit_instruction (drop
);
7650 bc_expand_expr (TREE_OPERAND (exp
, 1));
7655 bc_expand_expr (TREE_OPERAND (exp
, 0));
7656 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7657 lab
= bc_get_bytecode_label ();
7658 bc_emit_bytecode (xjumpifnot
);
7659 bc_emit_bytecode_labelref (lab
);
7661 #ifdef DEBUG_PRINT_CODE
7662 fputc ('\n', stderr
);
7664 bc_expand_expr (TREE_OPERAND (exp
, 1));
7665 lab1
= bc_get_bytecode_label ();
7666 bc_emit_bytecode (jump
);
7667 bc_emit_bytecode_labelref (lab1
);
7669 #ifdef DEBUG_PRINT_CODE
7670 fputc ('\n', stderr
);
7673 bc_emit_bytecode_labeldef (lab
);
7674 bc_expand_expr (TREE_OPERAND (exp
, 2));
7675 bc_emit_bytecode_labeldef (lab1
);
7678 case TRUTH_ANDIF_EXPR
:
7680 opcode
= xjumpifnot
;
7683 case TRUTH_ORIF_EXPR
:
7690 binoptab
= optab_plus_expr
;
7695 binoptab
= optab_minus_expr
;
7700 binoptab
= optab_mult_expr
;
7703 case TRUNC_DIV_EXPR
:
7704 case FLOOR_DIV_EXPR
:
7706 case ROUND_DIV_EXPR
:
7707 case EXACT_DIV_EXPR
:
7709 binoptab
= optab_trunc_div_expr
;
7712 case TRUNC_MOD_EXPR
:
7713 case FLOOR_MOD_EXPR
:
7715 case ROUND_MOD_EXPR
:
7717 binoptab
= optab_trunc_mod_expr
;
7720 case FIX_ROUND_EXPR
:
7721 case FIX_FLOOR_EXPR
:
7723 abort (); /* Not used for C. */
7725 case FIX_TRUNC_EXPR
:
7732 abort (); /* FIXME */
7736 binoptab
= optab_rdiv_expr
;
7741 binoptab
= optab_bit_and_expr
;
7746 binoptab
= optab_bit_ior_expr
;
7751 binoptab
= optab_bit_xor_expr
;
7756 binoptab
= optab_lshift_expr
;
7761 binoptab
= optab_rshift_expr
;
7764 case TRUTH_AND_EXPR
:
7766 binoptab
= optab_truth_and_expr
;
7771 binoptab
= optab_truth_or_expr
;
7776 binoptab
= optab_lt_expr
;
7781 binoptab
= optab_le_expr
;
7786 binoptab
= optab_ge_expr
;
7791 binoptab
= optab_gt_expr
;
7796 binoptab
= optab_eq_expr
;
7801 binoptab
= optab_ne_expr
;
7806 unoptab
= optab_negate_expr
;
7811 unoptab
= optab_bit_not_expr
;
7814 case TRUTH_NOT_EXPR
:
7816 unoptab
= optab_truth_not_expr
;
7819 case PREDECREMENT_EXPR
:
7821 incroptab
= optab_predecrement_expr
;
7824 case PREINCREMENT_EXPR
:
7826 incroptab
= optab_preincrement_expr
;
7829 case POSTDECREMENT_EXPR
:
7831 incroptab
= optab_postdecrement_expr
;
7834 case POSTINCREMENT_EXPR
:
7836 incroptab
= optab_postincrement_expr
;
7841 bc_expand_constructor (exp
);
7851 tree vars
= TREE_OPERAND (exp
, 0);
7852 int vars_need_expansion
= 0;
7854 /* Need to open a binding contour here because
7855 if there are any cleanups they most be contained here. */
7856 expand_start_bindings (0);
7858 /* Mark the corresponding BLOCK for output. */
7859 if (TREE_OPERAND (exp
, 2) != 0)
7860 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
7862 /* If VARS have not yet been expanded, expand them now. */
7865 if (DECL_RTL (vars
) == 0)
7867 vars_need_expansion
= 1;
7870 expand_decl_init (vars
);
7871 vars
= TREE_CHAIN (vars
);
7874 bc_expand_expr (TREE_OPERAND (exp
, 1));
7876 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7889 bc_expand_binary_operation (binoptab
, TREE_TYPE (exp
),
7890 TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1));
7896 bc_expand_unary_operation (unoptab
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7902 bc_expand_expr (TREE_OPERAND (exp
, 0));
7903 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7904 lab
= bc_get_bytecode_label ();
7906 bc_emit_instruction (duplicate
);
7907 bc_emit_bytecode (opcode
);
7908 bc_emit_bytecode_labelref (lab
);
7910 #ifdef DEBUG_PRINT_CODE
7911 fputc ('\n', stderr
);
7914 bc_emit_instruction (drop
);
7916 bc_expand_expr (TREE_OPERAND (exp
, 1));
7917 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7918 bc_emit_bytecode_labeldef (lab
);
7924 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7926 /* Push the quantum. */
7927 bc_expand_expr (TREE_OPERAND (exp
, 1));
7929 /* Convert it to the lvalue's type. */
7930 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)), type
);
7932 /* Push the address of the lvalue */
7933 bc_expand_expr (build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
), TREE_OPERAND (exp
, 0)));
7935 /* Perform actual increment */
7936 bc_expand_increment (incroptab
, type
);
7940 /* Return the alignment in bits of EXP, a pointer valued expression.
7941 But don't return more than MAX_ALIGN no matter what.
7942 The alignment returned is, by default, the alignment of the thing that
7943 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7945 Otherwise, look at the expression to see if we can do better, i.e., if the
7946 expression is actually pointing at an object whose alignment is tighter. */
7949 get_pointer_alignment (exp
, max_align
)
7953 unsigned align
, inner
;
7955 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7958 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7959 align
= MIN (align
, max_align
);
7963 switch (TREE_CODE (exp
))
7967 case NON_LVALUE_EXPR
:
7968 exp
= TREE_OPERAND (exp
, 0);
7969 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7971 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7972 align
= MIN (inner
, max_align
);
7976 /* If sum of pointer + int, restrict our maximum alignment to that
7977 imposed by the integer. If not, we can't do any better than
7979 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7982 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7987 exp
= TREE_OPERAND (exp
, 0);
7991 /* See what we are pointing at and look at its alignment. */
7992 exp
= TREE_OPERAND (exp
, 0);
7993 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7994 align
= FUNCTION_BOUNDARY
;
7995 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7996 align
= DECL_ALIGN (exp
);
7997 #ifdef CONSTANT_ALIGNMENT
7998 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7999 align
= CONSTANT_ALIGNMENT (exp
, align
);
8001 return MIN (align
, max_align
);
8009 /* Return the tree node and offset if a given argument corresponds to
8010 a string constant. */
8013 string_constant (arg
, ptr_offset
)
8019 if (TREE_CODE (arg
) == ADDR_EXPR
8020 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8022 *ptr_offset
= integer_zero_node
;
8023 return TREE_OPERAND (arg
, 0);
8025 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8027 tree arg0
= TREE_OPERAND (arg
, 0);
8028 tree arg1
= TREE_OPERAND (arg
, 1);
8033 if (TREE_CODE (arg0
) == ADDR_EXPR
8034 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8037 return TREE_OPERAND (arg0
, 0);
8039 else if (TREE_CODE (arg1
) == ADDR_EXPR
8040 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8043 return TREE_OPERAND (arg1
, 0);
8050 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8051 way, because it could contain a zero byte in the middle.
8052 TREE_STRING_LENGTH is the size of the character array, not the string.
8054 Unfortunately, string_constant can't access the values of const char
8055 arrays with initializers, so neither can we do so here. */
8065 src
= string_constant (src
, &offset_node
);
8068 max
= TREE_STRING_LENGTH (src
);
8069 ptr
= TREE_STRING_POINTER (src
);
8070 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
8072 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8073 compute the offset to the following null if we don't know where to
8074 start searching for it. */
8076 for (i
= 0; i
< max
; i
++)
8079 /* We don't know the starting offset, but we do know that the string
8080 has no internal zero bytes. We can assume that the offset falls
8081 within the bounds of the string; otherwise, the programmer deserves
8082 what he gets. Subtract the offset from the length of the string,
8084 /* This would perhaps not be valid if we were dealing with named
8085 arrays in addition to literal string constants. */
8086 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
8089 /* We have a known offset into the string. Start searching there for
8090 a null character. */
8091 if (offset_node
== 0)
8095 /* Did we get a long long offset? If so, punt. */
8096 if (TREE_INT_CST_HIGH (offset_node
) != 0)
8098 offset
= TREE_INT_CST_LOW (offset_node
);
8100 /* If the offset is known to be out of bounds, warn, and call strlen at
8102 if (offset
< 0 || offset
> max
)
8104 warning ("offset outside bounds of constant string");
8107 /* Use strlen to search for the first zero byte. Since any strings
8108 constructed with build_string will have nulls appended, we win even
8109 if we get handed something like (char[4])"abcd".
8111 Since OFFSET is our starting index into the string, no further
8112 calculation is needed. */
8113 return size_int (strlen (ptr
+ offset
));
8117 expand_builtin_return_addr (fndecl_code
, count
, tem
)
8118 enum built_in_function fndecl_code
;
8124 /* Some machines need special handling before we can access
8125 arbitrary frames. For example, on the sparc, we must first flush
8126 all register windows to the stack. */
8127 #ifdef SETUP_FRAME_ADDRESSES
8129 SETUP_FRAME_ADDRESSES ();
8132 /* On the sparc, the return address is not in the frame, it is in a
8133 register. There is no way to access it off of the current frame
8134 pointer, but it can be accessed off the previous frame pointer by
8135 reading the value from the register window save area. */
8136 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8137 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
8141 /* Scan back COUNT frames to the specified frame. */
8142 for (i
= 0; i
< count
; i
++)
8144 /* Assume the dynamic chain pointer is in the word that the
8145 frame address points to, unless otherwise specified. */
8146 #ifdef DYNAMIC_CHAIN_ADDRESS
8147 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
8149 tem
= memory_address (Pmode
, tem
);
8150 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
8153 /* For __builtin_frame_address, return what we've got. */
8154 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
8157 /* For __builtin_return_address, Get the return address from that
8159 #ifdef RETURN_ADDR_RTX
8160 tem
= RETURN_ADDR_RTX (count
, tem
);
8162 tem
= memory_address (Pmode
,
8163 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
8164 tem
= gen_rtx (MEM
, Pmode
, tem
);
8169 /* __builtin_setjmp is passed a pointer to an array of five words (not
8170 all will be used on all machines). It operates similarly to the C
8171 library function of the same name, but is more efficient. Much of
8172 the code below (and for longjmp) is copied from the handling of
8175 NOTE: This is intended for use by GNAT and the exception handling
8176 scheme in the compiler and will only work in the method used by
8180 expand_builtin_setjmp (buf_addr
, target
)
8184 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
8185 enum machine_mode sa_mode
= Pmode
, value_mode
;
8187 int old_inhibit_defer_pop
= inhibit_defer_pop
;
8189 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8190 build_function_type (void_type_node
, NULL_TREE
),
8193 CUMULATIVE_ARGS args_so_far
;
8197 value_mode
= TYPE_MODE (integer_type_node
);
8199 #ifdef POINTERS_EXTEND_UNSIGNED
8200 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
8203 buf_addr
= force_reg (Pmode
, buf_addr
);
8205 if (target
== 0 || GET_CODE (target
) != REG
8206 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
8207 target
= gen_reg_rtx (value_mode
);
8211 /* We store the frame pointer and the address of lab1 in the buffer
8212 and use the rest of it for the stack save area, which is
8213 machine-dependent. */
8214 emit_move_insn (gen_rtx (MEM
, Pmode
, buf_addr
),
8215 virtual_stack_vars_rtx
);
8217 (validize_mem (gen_rtx (MEM
, Pmode
,
8218 plus_constant (buf_addr
,
8219 GET_MODE_SIZE (Pmode
)))),
8220 gen_rtx (LABEL_REF
, Pmode
, lab1
));
8222 #ifdef HAVE_save_stack_nonlocal
8223 if (HAVE_save_stack_nonlocal
)
8224 sa_mode
= insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0];
8227 stack_save
= gen_rtx (MEM
, sa_mode
,
8228 plus_constant (buf_addr
,
8229 2 * GET_MODE_SIZE (Pmode
)));
8230 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
8234 emit_insn (gen_setjmp ());
8237 /* Set TARGET to zero and branch around the other case. */
8238 emit_move_insn (target
, const0_rtx
);
8239 emit_jump_insn (gen_jump (lab2
));
8243 /* Note that setjmp clobbers FP when we get here, so we have to make
8244 sure it's marked as used by this function. */
8245 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
8247 /* Mark the static chain as clobbered here so life information
8248 doesn't get messed up for it. */
8249 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, static_chain_rtx
));
8251 /* Now put in the code to restore the frame pointer, and argument
8252 pointer, if needed. The code below is from expand_end_bindings
8253 in stmt.c; see detailed documentation there. */
8254 #ifdef HAVE_nonlocal_goto
8255 if (! HAVE_nonlocal_goto
)
8257 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
8259 current_function_has_nonlocal_label
= 1;
8261 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8262 if (fixed_regs
[ARG_POINTER_REGNUM
])
8264 #ifdef ELIMINABLE_REGS
8265 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
8267 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
8268 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
8269 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
8272 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
8275 /* Now restore our arg pointer from the address at which it
8276 was saved in our stack frame.
8277 If there hasn't be space allocated for it yet, make
8279 if (arg_pointer_save_area
== 0)
8280 arg_pointer_save_area
8281 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8282 emit_move_insn (virtual_incoming_args_rtx
,
8283 copy_to_reg (arg_pointer_save_area
));
8288 #ifdef HAVE_nonlocal_goto_receiver
8289 if (HAVE_nonlocal_goto_receiver
)
8290 emit_insn (gen_nonlocal_goto_receiver ());
8292 /* The static chain pointer contains the address of dummy function.
8293 We need to call it here to handle some PIC cases of restoring a
8294 global pointer. Then return 1. */
8295 op0
= copy_to_mode_reg (Pmode
, static_chain_rtx
);
8297 /* We can't actually call emit_library_call here, so do everything
8298 it does, which isn't much for a libfunc with no args. */
8299 op0
= memory_address (FUNCTION_MODE
, op0
);
8301 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
,
8302 gen_rtx (SYMBOL_REF
, Pmode
, "__dummy"), 1);
8303 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1);
8305 #ifndef ACCUMULATE_OUTGOING_ARGS
8306 #ifdef HAVE_call_pop
8308 emit_call_insn (gen_call_pop (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8309 const0_rtx
, next_arg_reg
,
8310 GEN_INT (return_pops
)));
8317 emit_call_insn (gen_call (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8318 const0_rtx
, next_arg_reg
, const0_rtx
));
8323 emit_move_insn (target
, const1_rtx
);
8329 /* Expand an expression EXP that calls a built-in function,
8330 with result going to TARGET if that's convenient
8331 (and in mode MODE if that's convenient).
8332 SUBTARGET may be used as the target for computing one of EXP's operands.
8333 IGNORE is nonzero if the value is to be ignored. */
8335 #define CALLED_AS_BUILT_IN(NODE) \
8336 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8339 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
8343 enum machine_mode mode
;
8346 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8347 tree arglist
= TREE_OPERAND (exp
, 1);
8350 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
8351 optab builtin_optab
;
8353 switch (DECL_FUNCTION_CODE (fndecl
))
8358 /* build_function_call changes these into ABS_EXPR. */
8363 /* Treat these like sqrt, but only if the user asks for them. */
8364 if (! flag_fast_math
)
8366 case BUILT_IN_FSQRT
:
8367 /* If not optimizing, call the library function. */
8372 /* Arg could be wrong type if user redeclared this fcn wrong. */
8373 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
8376 /* Stabilize and compute the argument. */
8377 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
8378 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
8380 exp
= copy_node (exp
);
8381 arglist
= copy_node (arglist
);
8382 TREE_OPERAND (exp
, 1) = arglist
;
8383 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
8385 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8387 /* Make a suitable register to place result in. */
8388 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8393 switch (DECL_FUNCTION_CODE (fndecl
))
8396 builtin_optab
= sin_optab
; break;
8398 builtin_optab
= cos_optab
; break;
8399 case BUILT_IN_FSQRT
:
8400 builtin_optab
= sqrt_optab
; break;
8405 /* Compute into TARGET.
8406 Set TARGET to wherever the result comes back. */
8407 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8408 builtin_optab
, op0
, target
, 0);
8410 /* If we were unable to expand via the builtin, stop the
8411 sequence (without outputting the insns) and break, causing
8412 a call the the library function. */
8419 /* Check the results by default. But if flag_fast_math is turned on,
8420 then assume sqrt will always be called with valid arguments. */
8422 if (! flag_fast_math
)
8424 /* Don't define the builtin FP instructions
8425 if your machine is not IEEE. */
8426 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
8429 lab1
= gen_label_rtx ();
8431 /* Test the result; if it is NaN, set errno=EDOM because
8432 the argument was not in the domain. */
8433 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
8434 emit_jump_insn (gen_beq (lab1
));
8438 #ifdef GEN_ERRNO_RTX
8439 rtx errno_rtx
= GEN_ERRNO_RTX
;
8442 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "errno"));
8445 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
8448 /* We can't set errno=EDOM directly; let the library call do it.
8449 Pop the arguments right away in case the call gets deleted. */
8451 expand_call (exp
, target
, 0);
8458 /* Output the entire sequence. */
8459 insns
= get_insns ();
8468 /* __builtin_apply_args returns block of memory allocated on
8469 the stack into which is stored the arg pointer, structure
8470 value address, static chain, and all the registers that might
8471 possibly be used in performing a function call. The code is
8472 moved to the start of the function so the incoming values are
8474 case BUILT_IN_APPLY_ARGS
:
8475 /* Don't do __builtin_apply_args more than once in a function.
8476 Save the result of the first call and reuse it. */
8477 if (apply_args_value
!= 0)
8478 return apply_args_value
;
8480 /* When this function is called, it means that registers must be
8481 saved on entry to this function. So we migrate the
8482 call to the first insn of this function. */
8487 temp
= expand_builtin_apply_args ();
8491 apply_args_value
= temp
;
8493 /* Put the sequence after the NOTE that starts the function.
8494 If this is inside a SEQUENCE, make the outer-level insn
8495 chain current, so the code is placed at the start of the
8497 push_topmost_sequence ();
8498 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8499 pop_topmost_sequence ();
8503 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8504 FUNCTION with a copy of the parameters described by
8505 ARGUMENTS, and ARGSIZE. It returns a block of memory
8506 allocated on the stack into which is stored all the registers
8507 that might possibly be used for returning the result of a
8508 function. ARGUMENTS is the value returned by
8509 __builtin_apply_args. ARGSIZE is the number of bytes of
8510 arguments that must be copied. ??? How should this value be
8511 computed? We'll also need a safe worst case value for varargs
8513 case BUILT_IN_APPLY
:
8515 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8516 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8517 || TREE_CHAIN (arglist
) == 0
8518 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8519 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8520 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8528 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8529 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8531 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8534 /* __builtin_return (RESULT) causes the function to return the
8535 value described by RESULT. RESULT is address of the block of
8536 memory returned by __builtin_apply. */
8537 case BUILT_IN_RETURN
:
8539 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8540 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8541 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8542 NULL_RTX
, VOIDmode
, 0));
8545 case BUILT_IN_SAVEREGS
:
8546 /* Don't do __builtin_saveregs more than once in a function.
8547 Save the result of the first call and reuse it. */
8548 if (saveregs_value
!= 0)
8549 return saveregs_value
;
8551 /* When this function is called, it means that registers must be
8552 saved on entry to this function. So we migrate the
8553 call to the first insn of this function. */
8557 /* Now really call the function. `expand_call' does not call
8558 expand_builtin, so there is no danger of infinite recursion here. */
8561 #ifdef EXPAND_BUILTIN_SAVEREGS
8562 /* Do whatever the machine needs done in this case. */
8563 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8565 /* The register where the function returns its value
8566 is likely to have something else in it, such as an argument.
8567 So preserve that register around the call. */
8569 if (value_mode
!= VOIDmode
)
8571 rtx valreg
= hard_libcall_value (value_mode
);
8572 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8574 emit_move_insn (saved_valreg
, valreg
);
8575 temp
= expand_call (exp
, target
, ignore
);
8576 emit_move_insn (valreg
, saved_valreg
);
8579 /* Generate the call, putting the value in a pseudo. */
8580 temp
= expand_call (exp
, target
, ignore
);
8586 saveregs_value
= temp
;
8588 /* Put the sequence after the NOTE that starts the function.
8589 If this is inside a SEQUENCE, make the outer-level insn
8590 chain current, so the code is placed at the start of the
8592 push_topmost_sequence ();
8593 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8594 pop_topmost_sequence ();
8598 /* __builtin_args_info (N) returns word N of the arg space info
8599 for the current function. The number and meanings of words
8600 is controlled by the definition of CUMULATIVE_ARGS. */
8601 case BUILT_IN_ARGS_INFO
:
8603 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8605 int *word_ptr
= (int *) ¤t_function_args_info
;
8606 tree type
, elts
, result
;
8608 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8609 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8610 __FILE__
, __LINE__
);
8614 tree arg
= TREE_VALUE (arglist
);
8615 if (TREE_CODE (arg
) != INTEGER_CST
)
8616 error ("argument of `__builtin_args_info' must be constant");
8619 int wordnum
= TREE_INT_CST_LOW (arg
);
8621 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8622 error ("argument of `__builtin_args_info' out of range");
8624 return GEN_INT (word_ptr
[wordnum
]);
8628 error ("missing argument in `__builtin_args_info'");
8633 for (i
= 0; i
< nwords
; i
++)
8634 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8636 type
= build_array_type (integer_type_node
,
8637 build_index_type (build_int_2 (nwords
, 0)));
8638 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8639 TREE_CONSTANT (result
) = 1;
8640 TREE_STATIC (result
) = 1;
8641 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8642 TREE_CONSTANT (result
) = 1;
8643 return expand_expr (result
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
);
8647 /* Return the address of the first anonymous stack arg. */
8648 case BUILT_IN_NEXT_ARG
:
8650 tree fntype
= TREE_TYPE (current_function_decl
);
8652 if ((TYPE_ARG_TYPES (fntype
) == 0
8653 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8655 && ! current_function_varargs
)
8657 error ("`va_start' used in function with fixed args");
8663 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8664 tree arg
= TREE_VALUE (arglist
);
8666 /* Strip off all nops for the sake of the comparison. This
8667 is not quite the same as STRIP_NOPS. It does more.
8668 We must also strip off INDIRECT_EXPR for C++ reference
8670 while (TREE_CODE (arg
) == NOP_EXPR
8671 || TREE_CODE (arg
) == CONVERT_EXPR
8672 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8673 || TREE_CODE (arg
) == INDIRECT_REF
)
8674 arg
= TREE_OPERAND (arg
, 0);
8675 if (arg
!= last_parm
)
8676 warning ("second parameter of `va_start' not last named argument");
8678 else if (! current_function_varargs
)
8679 /* Evidently an out of date version of <stdarg.h>; can't validate
8680 va_start's second argument, but can still work as intended. */
8681 warning ("`__builtin_next_arg' called without an argument");
8684 return expand_binop (Pmode
, add_optab
,
8685 current_function_internal_arg_pointer
,
8686 current_function_arg_offset_rtx
,
8687 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8689 case BUILT_IN_CLASSIFY_TYPE
:
8692 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8693 enum tree_code code
= TREE_CODE (type
);
8694 if (code
== VOID_TYPE
)
8695 return GEN_INT (void_type_class
);
8696 if (code
== INTEGER_TYPE
)
8697 return GEN_INT (integer_type_class
);
8698 if (code
== CHAR_TYPE
)
8699 return GEN_INT (char_type_class
);
8700 if (code
== ENUMERAL_TYPE
)
8701 return GEN_INT (enumeral_type_class
);
8702 if (code
== BOOLEAN_TYPE
)
8703 return GEN_INT (boolean_type_class
);
8704 if (code
== POINTER_TYPE
)
8705 return GEN_INT (pointer_type_class
);
8706 if (code
== REFERENCE_TYPE
)
8707 return GEN_INT (reference_type_class
);
8708 if (code
== OFFSET_TYPE
)
8709 return GEN_INT (offset_type_class
);
8710 if (code
== REAL_TYPE
)
8711 return GEN_INT (real_type_class
);
8712 if (code
== COMPLEX_TYPE
)
8713 return GEN_INT (complex_type_class
);
8714 if (code
== FUNCTION_TYPE
)
8715 return GEN_INT (function_type_class
);
8716 if (code
== METHOD_TYPE
)
8717 return GEN_INT (method_type_class
);
8718 if (code
== RECORD_TYPE
)
8719 return GEN_INT (record_type_class
);
8720 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8721 return GEN_INT (union_type_class
);
8722 if (code
== ARRAY_TYPE
)
8724 if (TYPE_STRING_FLAG (type
))
8725 return GEN_INT (string_type_class
);
8727 return GEN_INT (array_type_class
);
8729 if (code
== SET_TYPE
)
8730 return GEN_INT (set_type_class
);
8731 if (code
== FILE_TYPE
)
8732 return GEN_INT (file_type_class
);
8733 if (code
== LANG_TYPE
)
8734 return GEN_INT (lang_type_class
);
8736 return GEN_INT (no_type_class
);
8738 case BUILT_IN_CONSTANT_P
:
8743 tree arg
= TREE_VALUE (arglist
);
8746 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
8747 || (TREE_CODE (arg
) == ADDR_EXPR
8748 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8749 ? const1_rtx
: const0_rtx
);
8752 case BUILT_IN_FRAME_ADDRESS
:
8753 /* The argument must be a nonnegative integer constant.
8754 It counts the number of frames to scan up the stack.
8755 The value is the address of that frame. */
8756 case BUILT_IN_RETURN_ADDRESS
:
8757 /* The argument must be a nonnegative integer constant.
8758 It counts the number of frames to scan up the stack.
8759 The value is the return address saved in that frame. */
8761 /* Warning about missing arg was already issued. */
8763 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
8764 || tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8766 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8767 error ("invalid arg to `__builtin_frame_address'");
8769 error ("invalid arg to `__builtin_return_address'");
8774 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8775 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8776 hard_frame_pointer_rtx
);
8778 /* Some ports cannot access arbitrary stack frames. */
8781 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8782 warning ("unsupported arg to `__builtin_frame_address'");
8784 warning ("unsupported arg to `__builtin_return_address'");
8788 /* For __builtin_frame_address, return what we've got. */
8789 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8792 if (GET_CODE (tem
) != REG
)
8793 tem
= copy_to_reg (tem
);
8797 /* Returns the address of the area where the structure is returned.
8799 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
8801 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
8802 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) != MEM
)
8805 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
8807 case BUILT_IN_ALLOCA
:
8809 /* Arg could be non-integer if user redeclared this fcn wrong. */
8810 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8813 /* Compute the argument. */
8814 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8816 /* Allocate the desired space. */
8817 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8820 /* If not optimizing, call the library function. */
8821 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8825 /* Arg could be non-integer if user redeclared this fcn wrong. */
8826 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8829 /* Compute the argument. */
8830 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8831 /* Compute ffs, into TARGET if possible.
8832 Set TARGET to wherever the result comes back. */
8833 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8834 ffs_optab
, op0
, target
, 1);
8839 case BUILT_IN_STRLEN
:
8840 /* If not optimizing, call the library function. */
8841 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8845 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8846 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8850 tree src
= TREE_VALUE (arglist
);
8851 tree len
= c_strlen (src
);
8854 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8856 rtx result
, src_rtx
, char_rtx
;
8857 enum machine_mode insn_mode
= value_mode
, char_mode
;
8858 enum insn_code icode
;
8860 /* If the length is known, just return it. */
8862 return expand_expr (len
, target
, mode
, EXPAND_MEMORY_USE_BAD
);
8864 /* If SRC is not a pointer type, don't do this operation inline. */
8868 /* Call a function if we can't compute strlen in the right mode. */
8870 while (insn_mode
!= VOIDmode
)
8872 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8873 if (icode
!= CODE_FOR_nothing
)
8876 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8878 if (insn_mode
== VOIDmode
)
8881 /* Make a place to write the result of the instruction. */
8884 && GET_CODE (result
) == REG
8885 && GET_MODE (result
) == insn_mode
8886 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8887 result
= gen_reg_rtx (insn_mode
);
8889 /* Make sure the operands are acceptable to the predicates. */
8891 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8892 result
= gen_reg_rtx (insn_mode
);
8893 src_rtx
= memory_address (BLKmode
,
8894 expand_expr (src
, NULL_RTX
, ptr_mode
,
8897 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8898 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8900 /* Check the string is readable and has an end. */
8901 if (flag_check_memory_usage
)
8902 emit_library_call (chkr_check_str_libfunc
, 1, VOIDmode
, 2,
8904 GEN_INT (MEMORY_USE_RO
),
8905 TYPE_MODE (integer_type_node
));
8907 char_rtx
= const0_rtx
;
8908 char_mode
= insn_operand_mode
[(int)icode
][2];
8909 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8910 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8912 emit_insn (GEN_FCN (icode
) (result
,
8913 gen_rtx (MEM
, BLKmode
, src_rtx
),
8914 char_rtx
, GEN_INT (align
)));
8916 /* Return the value in the proper mode for this function. */
8917 if (GET_MODE (result
) == value_mode
)
8919 else if (target
!= 0)
8921 convert_move (target
, result
, 0);
8925 return convert_to_mode (value_mode
, result
, 0);
8928 case BUILT_IN_STRCPY
:
8929 /* If not optimizing, call the library function. */
8930 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8934 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8935 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8936 || TREE_CHAIN (arglist
) == 0
8937 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8941 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8946 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8948 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8952 case BUILT_IN_MEMCPY
:
8953 /* If not optimizing, call the library function. */
8954 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8958 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8959 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8960 || TREE_CHAIN (arglist
) == 0
8961 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8963 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8964 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8965 (TREE_CHAIN (TREE_CHAIN (arglist
)))))
8970 tree dest
= TREE_VALUE (arglist
);
8971 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8972 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8976 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8978 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8979 rtx dest_rtx
, dest_mem
, src_mem
, src_rtx
, dest_addr
, len_rtx
;
8981 /* If either SRC or DEST is not a pointer type, don't do
8982 this operation in-line. */
8983 if (src_align
== 0 || dest_align
== 0)
8985 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8986 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8990 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8991 dest_mem
= gen_rtx (MEM
, BLKmode
,
8992 memory_address (BLKmode
, dest_rtx
));
8993 /* There could be a void* cast on top of the object. */
8994 while (TREE_CODE (dest
) == NOP_EXPR
)
8995 dest
= TREE_OPERAND (dest
, 0);
8996 type
= TREE_TYPE (TREE_TYPE (dest
));
8997 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8998 src_rtx
= expand_expr (src
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8999 src_mem
= gen_rtx (MEM
, BLKmode
,
9000 memory_address (BLKmode
, src_rtx
));
9001 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
9003 /* Just copy the rights of SRC to the rights of DEST. */
9004 if (flag_check_memory_usage
)
9005 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
9008 len_rtx
, TYPE_MODE (sizetype
));
9010 /* There could be a void* cast on top of the object. */
9011 while (TREE_CODE (src
) == NOP_EXPR
)
9012 src
= TREE_OPERAND (src
, 0);
9013 type
= TREE_TYPE (TREE_TYPE (src
));
9014 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
9016 /* Copy word part most expediently. */
9018 = emit_block_move (dest_mem
, src_mem
, len_rtx
,
9019 MIN (src_align
, dest_align
));
9022 dest_addr
= force_operand (dest_rtx
, NULL_RTX
);
9027 case BUILT_IN_MEMSET
:
9028 /* If not optimizing, call the library function. */
9029 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9033 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9034 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9035 || TREE_CHAIN (arglist
) == 0
9036 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
9038 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
9040 != (TREE_CODE (TREE_TYPE
9042 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
9046 tree dest
= TREE_VALUE (arglist
);
9047 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
9048 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
9052 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9053 rtx dest_rtx
, dest_mem
, dest_addr
, len_rtx
;
9055 /* If DEST is not a pointer type, don't do this
9056 operation in-line. */
9057 if (dest_align
== 0)
9060 /* If VAL is not 0, don't do this operation in-line. */
9061 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
9064 /* If LEN does not expand to a constant, don't do this
9065 operation in-line. */
9066 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
9067 if (GET_CODE (len_rtx
) != CONST_INT
)
9070 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
9071 dest_mem
= gen_rtx (MEM
, BLKmode
,
9072 memory_address (BLKmode
, dest_rtx
));
9074 /* Just check DST is writable and mark it as readable. */
9075 if (flag_check_memory_usage
)
9076 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
9078 len_rtx
, TYPE_MODE (sizetype
),
9079 GEN_INT (MEMORY_USE_WO
),
9080 TYPE_MODE (integer_type_node
));
9082 /* There could be a void* cast on top of the object. */
9083 while (TREE_CODE (dest
) == NOP_EXPR
)
9084 dest
= TREE_OPERAND (dest
, 0);
9085 type
= TREE_TYPE (TREE_TYPE (dest
));
9086 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
9088 dest_addr
= clear_storage (dest_mem
, len_rtx
, dest_align
);
9091 dest_addr
= force_operand (dest_rtx
, NULL_RTX
);
9096 /* These comparison functions need an instruction that returns an actual
9097 index. An ordinary compare that just sets the condition codes
9099 #ifdef HAVE_cmpstrsi
9100 case BUILT_IN_STRCMP
:
9101 /* If not optimizing, call the library function. */
9102 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9105 /* If we need to check memory accesses, call the library function. */
9106 if (flag_check_memory_usage
)
9110 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9111 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9112 || TREE_CHAIN (arglist
) == 0
9113 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
9115 else if (!HAVE_cmpstrsi
)
9118 tree arg1
= TREE_VALUE (arglist
);
9119 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
9123 len
= c_strlen (arg1
);
9125 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
9126 len2
= c_strlen (arg2
);
9128 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
9130 /* If we don't have a constant length for the first, use the length
9131 of the second, if we know it. We don't require a constant for
9132 this case; some cost analysis could be done if both are available
9133 but neither is constant. For now, assume they're equally cheap.
9135 If both strings have constant lengths, use the smaller. This
9136 could arise if optimization results in strcpy being called with
9137 two fixed strings, or if the code was machine-generated. We should
9138 add some code to the `memcmp' handler below to deal with such
9139 situations, someday. */
9140 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
9147 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
9149 if (tree_int_cst_lt (len2
, len
))
9153 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
9157 case BUILT_IN_MEMCMP
:
9158 /* If not optimizing, call the library function. */
9159 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9162 /* If we need to check memory accesses, call the library function. */
9163 if (flag_check_memory_usage
)
9167 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9168 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9169 || TREE_CHAIN (arglist
) == 0
9170 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
9171 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
9172 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
9174 else if (!HAVE_cmpstrsi
)
9177 tree arg1
= TREE_VALUE (arglist
);
9178 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
9179 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
9183 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9185 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9186 enum machine_mode insn_mode
9187 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
9189 /* If we don't have POINTER_TYPE, call the function. */
9190 if (arg1_align
== 0 || arg2_align
== 0)
9192 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
9193 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
9197 /* Make a place to write the result of the instruction. */
9200 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
9201 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
9202 result
= gen_reg_rtx (insn_mode
);
9204 emit_insn (gen_cmpstrsi (result
,
9205 gen_rtx (MEM
, BLKmode
,
9206 expand_expr (arg1
, NULL_RTX
,
9209 gen_rtx (MEM
, BLKmode
,
9210 expand_expr (arg2
, NULL_RTX
,
9213 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
9214 GEN_INT (MIN (arg1_align
, arg2_align
))));
9216 /* Return the value in the proper mode for this function. */
9217 mode
= TYPE_MODE (TREE_TYPE (exp
));
9218 if (GET_MODE (result
) == mode
)
9220 else if (target
!= 0)
9222 convert_move (target
, result
, 0);
9226 return convert_to_mode (mode
, result
, 0);
9229 case BUILT_IN_STRCMP
:
9230 case BUILT_IN_MEMCMP
:
9234 case BUILT_IN_SETJMP
:
9236 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
9240 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
9242 return expand_builtin_setjmp (buf_addr
, target
);
9245 /* __builtin_longjmp is passed a pointer to an array of five words
9246 and a value, which is a dummy. It's similar to the C library longjmp
9247 function but works with __builtin_setjmp above. */
9248 case BUILT_IN_LONGJMP
:
9249 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
9250 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
9254 tree dummy_id
= get_identifier ("__dummy");
9255 tree dummy_type
= build_function_type (void_type_node
, NULL_TREE
);
9256 tree dummy_decl
= build_decl (FUNCTION_DECL
, dummy_id
, dummy_type
);
9257 #ifdef POINTERS_EXTEND_UNSIGNED
9260 convert_memory_address
9262 expand_expr (TREE_VALUE (arglist
),
9263 NULL_RTX
, VOIDmode
, 0)));
9266 = force_reg (Pmode
, expand_expr (TREE_VALUE (arglist
),
9270 rtx fp
= gen_rtx (MEM
, Pmode
, buf_addr
);
9271 rtx lab
= gen_rtx (MEM
, Pmode
,
9272 plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
)));
9273 enum machine_mode sa_mode
9274 #ifdef HAVE_save_stack_nonlocal
9275 = (HAVE_save_stack_nonlocal
9276 ? insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0]
9281 rtx stack
= gen_rtx (MEM
, sa_mode
,
9282 plus_constant (buf_addr
,
9283 2 * GET_MODE_SIZE (Pmode
)));
9285 DECL_EXTERNAL (dummy_decl
) = 1;
9286 TREE_PUBLIC (dummy_decl
) = 1;
9287 make_decl_rtl (dummy_decl
, NULL_PTR
, 1);
9289 /* Expand the second expression just for side-effects. */
9290 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
9291 const0_rtx
, VOIDmode
, 0);
9293 assemble_external (dummy_decl
);
9295 /* Pick up FP, label, and SP from the block and jump. This code is
9296 from expand_goto in stmt.c; see there for detailed comments. */
9297 #if HAVE_nonlocal_goto
9298 if (HAVE_nonlocal_goto
)
9299 emit_insn (gen_nonlocal_goto (fp
, lab
, stack
,
9300 XEXP (DECL_RTL (dummy_decl
), 0)));
9304 lab
= copy_to_reg (lab
);
9305 emit_move_insn (hard_frame_pointer_rtx
, fp
);
9306 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
9308 /* Put in the static chain register the address of the dummy
9310 emit_move_insn (static_chain_rtx
, XEXP (DECL_RTL (dummy_decl
), 0));
9311 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
9312 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
9313 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
9314 emit_indirect_jump (lab
);
9320 /* Various hooks for the DWARF 2 __throw routine. */
9321 case BUILT_IN_UNWIND_INIT
:
9322 expand_builtin_unwind_init ();
9325 return frame_pointer_rtx
;
9327 return stack_pointer_rtx
;
9328 #ifdef DWARF2_UNWIND_INFO
9329 case BUILT_IN_DWARF_FP_REGNUM
:
9330 return expand_builtin_dwarf_fp_regnum ();
9331 case BUILT_IN_DWARF_REG_SIZE
:
9332 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist
), target
);
9334 case BUILT_IN_FROB_RETURN_ADDR
:
9335 return expand_builtin_frob_return_addr (TREE_VALUE (arglist
));
9336 case BUILT_IN_EXTRACT_RETURN_ADDR
:
9337 return expand_builtin_extract_return_addr (TREE_VALUE (arglist
));
9338 case BUILT_IN_SET_RETURN_ADDR_REG
:
9339 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist
));
9341 case BUILT_IN_EH_STUB
:
9342 return expand_builtin_eh_stub ();
9343 case BUILT_IN_SET_EH_REGS
:
9344 expand_builtin_set_eh_regs (TREE_VALUE (arglist
),
9345 TREE_VALUE (TREE_CHAIN (arglist
)));
9348 default: /* just do library call, if unknown builtin */
9349 error ("built-in function `%s' not currently supported",
9350 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9353 /* The switch statement above can drop through to cause the function
9354 to be called normally. */
9356 return expand_call (exp
, target
, ignore
);
9359 /* Built-in functions to perform an untyped call and return. */
9361 /* For each register that may be used for calling a function, this
9362 gives a mode used to copy the register's value. VOIDmode indicates
9363 the register is not used for calling a function. If the machine
9364 has register windows, this gives only the outbound registers.
9365 INCOMING_REGNO gives the corresponding inbound register. */
9366 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
9368 /* For each register that may be used for returning values, this gives
9369 a mode used to copy the register's value. VOIDmode indicates the
9370 register is not used for returning values. If the machine has
9371 register windows, this gives only the outbound registers.
9372 INCOMING_REGNO gives the corresponding inbound register. */
9373 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
9375 /* For each register that may be used for calling a function, this
9376 gives the offset of that register into the block returned by
9377 __builtin_apply_args. 0 indicates that the register is not
9378 used for calling a function. */
9379 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
9381 /* Return the offset of register REGNO into the block returned by
9382 __builtin_apply_args. This is not declared static, since it is
9383 needed in objc-act.c. */
9386 apply_args_register_offset (regno
)
9391 /* Arguments are always put in outgoing registers (in the argument
9392 block) if such make sense. */
9393 #ifdef OUTGOING_REGNO
9394 regno
= OUTGOING_REGNO(regno
);
9396 return apply_args_reg_offset
[regno
];
9399 /* Return the size required for the block returned by __builtin_apply_args,
9400 and initialize apply_args_mode. */
9405 static int size
= -1;
9407 enum machine_mode mode
;
9409 /* The values computed by this function never change. */
9412 /* The first value is the incoming arg-pointer. */
9413 size
= GET_MODE_SIZE (Pmode
);
9415 /* The second value is the structure value address unless this is
9416 passed as an "invisible" first argument. */
9417 if (struct_value_rtx
)
9418 size
+= GET_MODE_SIZE (Pmode
);
9420 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9421 if (FUNCTION_ARG_REGNO_P (regno
))
9423 /* Search for the proper mode for copying this register's
9424 value. I'm not sure this is right, but it works so far. */
9425 enum machine_mode best_mode
= VOIDmode
;
9427 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9429 mode
= GET_MODE_WIDER_MODE (mode
))
9430 if (HARD_REGNO_MODE_OK (regno
, mode
)
9431 && HARD_REGNO_NREGS (regno
, mode
) == 1)
9434 if (best_mode
== VOIDmode
)
9435 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9437 mode
= GET_MODE_WIDER_MODE (mode
))
9438 if (HARD_REGNO_MODE_OK (regno
, mode
)
9439 && (mov_optab
->handlers
[(int) mode
].insn_code
9440 != CODE_FOR_nothing
))
9444 if (mode
== VOIDmode
)
9447 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9448 if (size
% align
!= 0)
9449 size
= CEIL (size
, align
) * align
;
9450 apply_args_reg_offset
[regno
] = size
;
9451 size
+= GET_MODE_SIZE (mode
);
9452 apply_args_mode
[regno
] = mode
;
9456 apply_args_mode
[regno
] = VOIDmode
;
9457 apply_args_reg_offset
[regno
] = 0;
9463 /* Return the size required for the block returned by __builtin_apply,
9464 and initialize apply_result_mode. */
9467 apply_result_size ()
9469 static int size
= -1;
9471 enum machine_mode mode
;
9473 /* The values computed by this function never change. */
9478 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9479 if (FUNCTION_VALUE_REGNO_P (regno
))
9481 /* Search for the proper mode for copying this register's
9482 value. I'm not sure this is right, but it works so far. */
9483 enum machine_mode best_mode
= VOIDmode
;
9485 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9487 mode
= GET_MODE_WIDER_MODE (mode
))
9488 if (HARD_REGNO_MODE_OK (regno
, mode
))
9491 if (best_mode
== VOIDmode
)
9492 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9494 mode
= GET_MODE_WIDER_MODE (mode
))
9495 if (HARD_REGNO_MODE_OK (regno
, mode
)
9496 && (mov_optab
->handlers
[(int) mode
].insn_code
9497 != CODE_FOR_nothing
))
9501 if (mode
== VOIDmode
)
9504 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9505 if (size
% align
!= 0)
9506 size
= CEIL (size
, align
) * align
;
9507 size
+= GET_MODE_SIZE (mode
);
9508 apply_result_mode
[regno
] = mode
;
9511 apply_result_mode
[regno
] = VOIDmode
;
9513 /* Allow targets that use untyped_call and untyped_return to override
9514 the size so that machine-specific information can be stored here. */
9515 #ifdef APPLY_RESULT_SIZE
9516 size
= APPLY_RESULT_SIZE
;
9522 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9523 /* Create a vector describing the result block RESULT. If SAVEP is true,
9524 the result block is used to save the values; otherwise it is used to
9525 restore the values. */
9528 result_vector (savep
, result
)
9532 int regno
, size
, align
, nelts
;
9533 enum machine_mode mode
;
9535 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9538 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9539 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9541 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9542 if (size
% align
!= 0)
9543 size
= CEIL (size
, align
) * align
;
9544 reg
= gen_rtx (REG
, mode
, savep
? regno
: INCOMING_REGNO (regno
));
9545 mem
= change_address (result
, mode
,
9546 plus_constant (XEXP (result
, 0), size
));
9547 savevec
[nelts
++] = (savep
9548 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
9549 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
9550 size
+= GET_MODE_SIZE (mode
);
9552 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9554 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9556 /* Save the state required to perform an untyped call with the same
9557 arguments as were passed to the current function. */
9560 expand_builtin_apply_args ()
9563 int size
, align
, regno
;
9564 enum machine_mode mode
;
9566 /* Create a block where the arg-pointer, structure value address,
9567 and argument registers can be saved. */
9568 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9570 /* Walk past the arg-pointer and structure value address. */
9571 size
= GET_MODE_SIZE (Pmode
);
9572 if (struct_value_rtx
)
9573 size
+= GET_MODE_SIZE (Pmode
);
9575 /* Save each register used in calling a function to the block. */
9576 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9577 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9581 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9582 if (size
% align
!= 0)
9583 size
= CEIL (size
, align
) * align
;
9585 tem
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9588 /* For reg-stack.c's stack register household.
9589 Compare with a similar piece of code in function.c. */
9591 emit_insn (gen_rtx (USE
, mode
, tem
));
9594 emit_move_insn (change_address (registers
, mode
,
9595 plus_constant (XEXP (registers
, 0),
9598 size
+= GET_MODE_SIZE (mode
);
9601 /* Save the arg pointer to the block. */
9602 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9603 copy_to_reg (virtual_incoming_args_rtx
));
9604 size
= GET_MODE_SIZE (Pmode
);
9606 /* Save the structure value address unless this is passed as an
9607 "invisible" first argument. */
9608 if (struct_value_incoming_rtx
)
9610 emit_move_insn (change_address (registers
, Pmode
,
9611 plus_constant (XEXP (registers
, 0),
9613 copy_to_reg (struct_value_incoming_rtx
));
9614 size
+= GET_MODE_SIZE (Pmode
);
9617 /* Return the address of the block. */
9618 return copy_addr_to_reg (XEXP (registers
, 0));
9621 /* Perform an untyped call and save the state required to perform an
9622 untyped return of whatever value was returned by the given function. */
9625 expand_builtin_apply (function
, arguments
, argsize
)
9626 rtx function
, arguments
, argsize
;
9628 int size
, align
, regno
;
9629 enum machine_mode mode
;
9630 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9631 rtx old_stack_level
= 0;
9632 rtx call_fusage
= 0;
9634 /* Create a block where the return registers can be saved. */
9635 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9637 /* ??? The argsize value should be adjusted here. */
9639 /* Fetch the arg pointer from the ARGUMENTS block. */
9640 incoming_args
= gen_reg_rtx (Pmode
);
9641 emit_move_insn (incoming_args
,
9642 gen_rtx (MEM
, Pmode
, arguments
));
9643 #ifndef STACK_GROWS_DOWNWARD
9644 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9645 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9648 /* Perform postincrements before actually calling the function. */
9651 /* Push a new argument block and copy the arguments. */
9652 do_pending_stack_adjust ();
9654 /* Save the stack with nonlocal if available */
9655 #ifdef HAVE_save_stack_nonlocal
9656 if (HAVE_save_stack_nonlocal
)
9657 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
9660 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9662 /* Push a block of memory onto the stack to store the memory arguments.
9663 Save the address in a register, and copy the memory arguments. ??? I
9664 haven't figured out how the calling convention macros effect this,
9665 but it's likely that the source and/or destination addresses in
9666 the block copy will need updating in machine specific ways. */
9667 dest
= allocate_dynamic_stack_space (argsize
, 0, 0);
9668 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
9669 gen_rtx (MEM
, BLKmode
, incoming_args
),
9671 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9673 /* Refer to the argument block. */
9675 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
9677 /* Walk past the arg-pointer and structure value address. */
9678 size
= GET_MODE_SIZE (Pmode
);
9679 if (struct_value_rtx
)
9680 size
+= GET_MODE_SIZE (Pmode
);
9682 /* Restore each of the registers previously saved. Make USE insns
9683 for each of these registers for use in making the call. */
9684 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9685 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9687 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9688 if (size
% align
!= 0)
9689 size
= CEIL (size
, align
) * align
;
9690 reg
= gen_rtx (REG
, mode
, regno
);
9691 emit_move_insn (reg
,
9692 change_address (arguments
, mode
,
9693 plus_constant (XEXP (arguments
, 0),
9696 use_reg (&call_fusage
, reg
);
9697 size
+= GET_MODE_SIZE (mode
);
9700 /* Restore the structure value address unless this is passed as an
9701 "invisible" first argument. */
9702 size
= GET_MODE_SIZE (Pmode
);
9703 if (struct_value_rtx
)
9705 rtx value
= gen_reg_rtx (Pmode
);
9706 emit_move_insn (value
,
9707 change_address (arguments
, Pmode
,
9708 plus_constant (XEXP (arguments
, 0),
9710 emit_move_insn (struct_value_rtx
, value
);
9711 if (GET_CODE (struct_value_rtx
) == REG
)
9712 use_reg (&call_fusage
, struct_value_rtx
);
9713 size
+= GET_MODE_SIZE (Pmode
);
9716 /* All arguments and registers used for the call are set up by now! */
9717 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9719 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9720 and we don't want to load it into a register as an optimization,
9721 because prepare_call_address already did it if it should be done. */
9722 if (GET_CODE (function
) != SYMBOL_REF
)
9723 function
= memory_address (FUNCTION_MODE
, function
);
9725 /* Generate the actual call instruction and save the return value. */
9726 #ifdef HAVE_untyped_call
9727 if (HAVE_untyped_call
)
9728 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
9729 result
, result_vector (1, result
)));
9732 #ifdef HAVE_call_value
9733 if (HAVE_call_value
)
9737 /* Locate the unique return register. It is not possible to
9738 express a call that sets more than one return register using
9739 call_value; use untyped_call for that. In fact, untyped_call
9740 only needs to save the return registers in the given block. */
9741 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9742 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9745 abort (); /* HAVE_untyped_call required. */
9746 valreg
= gen_rtx (REG
, mode
, regno
);
9749 emit_call_insn (gen_call_value (valreg
,
9750 gen_rtx (MEM
, FUNCTION_MODE
, function
),
9751 const0_rtx
, NULL_RTX
, const0_rtx
));
9753 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9761 /* Find the CALL insn we just emitted. */
9762 for (call_insn
= get_last_insn ();
9763 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9764 call_insn
= PREV_INSN (call_insn
))
9770 /* Put the register usage information on the CALL. If there is already
9771 some usage information, put ours at the end. */
9772 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9776 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9777 link
= XEXP (link
, 1))
9780 XEXP (link
, 1) = call_fusage
;
9783 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9785 /* Restore the stack. */
9786 #ifdef HAVE_save_stack_nonlocal
9787 if (HAVE_save_stack_nonlocal
)
9788 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
9791 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9793 /* Return the address of the result block. */
9794 return copy_addr_to_reg (XEXP (result
, 0));
9797 /* Perform an untyped return. */
9800 expand_builtin_return (result
)
9803 int size
, align
, regno
;
9804 enum machine_mode mode
;
9806 rtx call_fusage
= 0;
9808 apply_result_size ();
9809 result
= gen_rtx (MEM
, BLKmode
, result
);
9811 #ifdef HAVE_untyped_return
9812 if (HAVE_untyped_return
)
9814 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9820 /* Restore the return value and note that each value is used. */
9822 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9823 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9825 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9826 if (size
% align
!= 0)
9827 size
= CEIL (size
, align
) * align
;
9828 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9829 emit_move_insn (reg
,
9830 change_address (result
, mode
,
9831 plus_constant (XEXP (result
, 0),
9834 push_to_sequence (call_fusage
);
9835 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
9836 call_fusage
= get_insns ();
9838 size
+= GET_MODE_SIZE (mode
);
9841 /* Put the USE insns before the return. */
9842 emit_insns (call_fusage
);
9844 /* Return whatever values was restored by jumping directly to the end
9846 expand_null_return ();
9849 /* Expand code for a post- or pre- increment or decrement
9850 and return the RTX for the result.
9851 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9854 expand_increment (exp
, post
, ignore
)
9858 register rtx op0
, op1
;
9859 register rtx temp
, value
;
9860 register tree incremented
= TREE_OPERAND (exp
, 0);
9861 optab this_optab
= add_optab
;
9863 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9864 int op0_is_copy
= 0;
9865 int single_insn
= 0;
9866 /* 1 means we can't store into OP0 directly,
9867 because it is a subreg narrower than a word,
9868 and we don't dare clobber the rest of the word. */
9871 if (output_bytecode
)
9873 bc_expand_expr (exp
);
9877 /* Stabilize any component ref that might need to be
9878 evaluated more than once below. */
9880 || TREE_CODE (incremented
) == BIT_FIELD_REF
9881 || (TREE_CODE (incremented
) == COMPONENT_REF
9882 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9883 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9884 incremented
= stabilize_reference (incremented
);
9885 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9886 ones into save exprs so that they don't accidentally get evaluated
9887 more than once by the code below. */
9888 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9889 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9890 incremented
= save_expr (incremented
);
9892 /* Compute the operands as RTX.
9893 Note whether OP0 is the actual lvalue or a copy of it:
9894 I believe it is a copy iff it is a register or subreg
9895 and insns were generated in computing it. */
9897 temp
= get_last_insn ();
9898 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9900 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9901 in place but instead must do sign- or zero-extension during assignment,
9902 so we copy it into a new register and let the code below use it as
9905 Note that we can safely modify this SUBREG since it is know not to be
9906 shared (it was made by the expand_expr call above). */
9908 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9911 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9915 else if (GET_CODE (op0
) == SUBREG
9916 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9918 /* We cannot increment this SUBREG in place. If we are
9919 post-incrementing, get a copy of the old value. Otherwise,
9920 just mark that we cannot increment in place. */
9922 op0
= copy_to_reg (op0
);
9927 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9928 && temp
!= get_last_insn ());
9929 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9930 EXPAND_MEMORY_USE_BAD
);
9932 /* Decide whether incrementing or decrementing. */
9933 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9934 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9935 this_optab
= sub_optab
;
9937 /* Convert decrement by a constant into a negative increment. */
9938 if (this_optab
== sub_optab
9939 && GET_CODE (op1
) == CONST_INT
)
9941 op1
= GEN_INT (- INTVAL (op1
));
9942 this_optab
= add_optab
;
9945 /* For a preincrement, see if we can do this with a single instruction. */
9948 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9949 if (icode
!= (int) CODE_FOR_nothing
9950 /* Make sure that OP0 is valid for operands 0 and 1
9951 of the insn we want to queue. */
9952 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9953 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9954 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9958 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9959 then we cannot just increment OP0. We must therefore contrive to
9960 increment the original value. Then, for postincrement, we can return
9961 OP0 since it is a copy of the old value. For preincrement, expand here
9962 unless we can do it with a single insn.
9964 Likewise if storing directly into OP0 would clobber high bits
9965 we need to preserve (bad_subreg). */
9966 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9968 /* This is the easiest way to increment the value wherever it is.
9969 Problems with multiple evaluation of INCREMENTED are prevented
9970 because either (1) it is a component_ref or preincrement,
9971 in which case it was stabilized above, or (2) it is an array_ref
9972 with constant index in an array in a register, which is
9973 safe to reevaluate. */
9974 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9975 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9976 ? MINUS_EXPR
: PLUS_EXPR
),
9979 TREE_OPERAND (exp
, 1));
9981 while (TREE_CODE (incremented
) == NOP_EXPR
9982 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9984 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9985 incremented
= TREE_OPERAND (incremented
, 0);
9988 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9989 return post
? op0
: temp
;
9994 /* We have a true reference to the value in OP0.
9995 If there is an insn to add or subtract in this mode, queue it.
9996 Queueing the increment insn avoids the register shuffling
9997 that often results if we must increment now and first save
9998 the old value for subsequent use. */
10000 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10001 op0
= stabilize (op0
);
10004 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
10005 if (icode
!= (int) CODE_FOR_nothing
10006 /* Make sure that OP0 is valid for operands 0 and 1
10007 of the insn we want to queue. */
10008 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
10009 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
10011 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
10012 op1
= force_reg (mode
, op1
);
10014 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
10016 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
10018 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
10019 ? force_reg (Pmode
, XEXP (op0
, 0))
10020 : copy_to_reg (XEXP (op0
, 0)));
10023 op0
= change_address (op0
, VOIDmode
, addr
);
10024 temp
= force_reg (GET_MODE (op0
), op0
);
10025 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
10026 op1
= force_reg (mode
, op1
);
10028 /* The increment queue is LIFO, thus we have to `queue'
10029 the instructions in reverse order. */
10030 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
10031 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
10036 /* Preincrement, or we can't increment with one simple insn. */
10038 /* Save a copy of the value before inc or dec, to return it later. */
10039 temp
= value
= copy_to_reg (op0
);
10041 /* Arrange to return the incremented value. */
10042 /* Copy the rtx because expand_binop will protect from the queue,
10043 and the results of that would be invalid for us to return
10044 if our caller does emit_queue before using our result. */
10045 temp
= copy_rtx (value
= op0
);
10047 /* Increment however we can. */
10048 op1
= expand_binop (mode
, this_optab
, value
, op1
,
10049 flag_check_memory_usage
? NULL_RTX
: op0
,
10050 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
10051 /* Make sure the value is stored into OP0. */
10053 emit_move_insn (op0
, op1
);
10058 /* Expand all function calls contained within EXP, innermost ones first.
10059 But don't look within expressions that have sequence points.
10060 For each CALL_EXPR, record the rtx for its value
10061 in the CALL_EXPR_RTL field. */
10064 preexpand_calls (exp
)
10067 register int nops
, i
;
10068 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
10070 if (! do_preexpand_calls
)
10073 /* Only expressions and references can contain calls. */
10075 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
10078 switch (TREE_CODE (exp
))
10081 /* Do nothing if already expanded. */
10082 if (CALL_EXPR_RTL (exp
) != 0
10083 /* Do nothing if the call returns a variable-sized object. */
10084 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
10085 /* Do nothing to built-in functions. */
10086 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
10087 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
10089 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
10092 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
10095 case COMPOUND_EXPR
:
10097 case TRUTH_ANDIF_EXPR
:
10098 case TRUTH_ORIF_EXPR
:
10099 /* If we find one of these, then we can be sure
10100 the adjust will be done for it (since it makes jumps).
10101 Do it now, so that if this is inside an argument
10102 of a function, we don't get the stack adjustment
10103 after some other args have already been pushed. */
10104 do_pending_stack_adjust ();
10109 case WITH_CLEANUP_EXPR
:
10110 case CLEANUP_POINT_EXPR
:
10111 case TRY_CATCH_EXPR
:
10115 if (SAVE_EXPR_RTL (exp
) != 0)
10122 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
10123 for (i
= 0; i
< nops
; i
++)
10124 if (TREE_OPERAND (exp
, i
) != 0)
10126 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
10127 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
10129 preexpand_calls (TREE_OPERAND (exp
, i
));
10133 /* At the start of a function, record that we have no previously-pushed
10134 arguments waiting to be popped. */
10137 init_pending_stack_adjust ()
10139 pending_stack_adjust
= 0;
10142 /* When exiting from function, if safe, clear out any pending stack adjust
10143 so the adjustment won't get done. */
10146 clear_pending_stack_adjust ()
10148 #ifdef EXIT_IGNORE_STACK
10150 && ! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
10151 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
10152 && ! flag_inline_functions
)
10153 pending_stack_adjust
= 0;
10157 /* Pop any previously-pushed arguments that have not been popped yet. */
10160 do_pending_stack_adjust ()
10162 if (inhibit_defer_pop
== 0)
10164 if (pending_stack_adjust
!= 0)
10165 adjust_stack (GEN_INT (pending_stack_adjust
));
10166 pending_stack_adjust
= 0;
10170 /* Expand conditional expressions. */
10172 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10173 LABEL is an rtx of code CODE_LABEL, in this function and all the
10177 jumpifnot (exp
, label
)
10181 do_jump (exp
, label
, NULL_RTX
);
10184 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10187 jumpif (exp
, label
)
10191 do_jump (exp
, NULL_RTX
, label
);
10194 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10195 the result is zero, or IF_TRUE_LABEL if the result is one.
10196 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10197 meaning fall through in that case.
10199 do_jump always does any pending stack adjust except when it does not
10200 actually perform a jump. An example where there is no jump
10201 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10203 This function is responsible for optimizing cases such as
10204 &&, || and comparison operators in EXP. */
10207 do_jump (exp
, if_false_label
, if_true_label
)
10209 rtx if_false_label
, if_true_label
;
10211 register enum tree_code code
= TREE_CODE (exp
);
10212 /* Some cases need to create a label to jump to
10213 in order to properly fall through.
10214 These cases set DROP_THROUGH_LABEL nonzero. */
10215 rtx drop_through_label
= 0;
10217 rtx comparison
= 0;
10220 enum machine_mode mode
;
10230 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
10236 /* This is not true with #pragma weak */
10238 /* The address of something can never be zero. */
10240 emit_jump (if_true_label
);
10245 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
10246 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
10247 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
10250 /* If we are narrowing the operand, we have to do the compare in the
10252 if ((TYPE_PRECISION (TREE_TYPE (exp
))
10253 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10255 case NON_LVALUE_EXPR
:
10256 case REFERENCE_EXPR
:
10261 /* These cannot change zero->non-zero or vice versa. */
10262 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10266 /* This is never less insns than evaluating the PLUS_EXPR followed by
10267 a test and can be longer if the test is eliminated. */
10269 /* Reduce to minus. */
10270 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
10271 TREE_OPERAND (exp
, 0),
10272 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
10273 TREE_OPERAND (exp
, 1))));
10274 /* Process as MINUS. */
10278 /* Non-zero iff operands of minus differ. */
10279 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
10280 TREE_OPERAND (exp
, 0),
10281 TREE_OPERAND (exp
, 1)),
10286 /* If we are AND'ing with a small constant, do this comparison in the
10287 smallest type that fits. If the machine doesn't have comparisons
10288 that small, it will be converted back to the wider comparison.
10289 This helps if we are testing the sign bit of a narrower object.
10290 combine can't do this for us because it can't know whether a
10291 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10293 if (! SLOW_BYTE_ACCESS
10294 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
10295 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
10296 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
10297 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
10298 && (type
= type_for_mode (mode
, 1)) != 0
10299 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10300 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10301 != CODE_FOR_nothing
))
10303 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10308 case TRUTH_NOT_EXPR
:
10309 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10312 case TRUTH_ANDIF_EXPR
:
10313 if (if_false_label
== 0)
10314 if_false_label
= drop_through_label
= gen_label_rtx ();
10315 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
10316 start_cleanup_deferral ();
10317 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10318 end_cleanup_deferral ();
10321 case TRUTH_ORIF_EXPR
:
10322 if (if_true_label
== 0)
10323 if_true_label
= drop_through_label
= gen_label_rtx ();
10324 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
10325 start_cleanup_deferral ();
10326 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10327 end_cleanup_deferral ();
10330 case COMPOUND_EXPR
:
10331 push_temp_slots ();
10332 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
10333 preserve_temp_slots (NULL_RTX
);
10334 free_temp_slots ();
10337 do_pending_stack_adjust ();
10338 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10341 case COMPONENT_REF
:
10342 case BIT_FIELD_REF
:
10345 int bitsize
, bitpos
, unsignedp
;
10346 enum machine_mode mode
;
10352 /* Get description of this reference. We don't actually care
10353 about the underlying object here. */
10354 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10355 &mode
, &unsignedp
, &volatilep
,
10358 type
= type_for_size (bitsize
, unsignedp
);
10359 if (! SLOW_BYTE_ACCESS
10360 && type
!= 0 && bitsize
>= 0
10361 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10362 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10363 != CODE_FOR_nothing
))
10365 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10372 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10373 if (integer_onep (TREE_OPERAND (exp
, 1))
10374 && integer_zerop (TREE_OPERAND (exp
, 2)))
10375 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10377 else if (integer_zerop (TREE_OPERAND (exp
, 1))
10378 && integer_onep (TREE_OPERAND (exp
, 2)))
10379 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10383 register rtx label1
= gen_label_rtx ();
10384 drop_through_label
= gen_label_rtx ();
10386 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
10388 start_cleanup_deferral ();
10389 /* Now the THEN-expression. */
10390 do_jump (TREE_OPERAND (exp
, 1),
10391 if_false_label
? if_false_label
: drop_through_label
,
10392 if_true_label
? if_true_label
: drop_through_label
);
10393 /* In case the do_jump just above never jumps. */
10394 do_pending_stack_adjust ();
10395 emit_label (label1
);
10397 /* Now the ELSE-expression. */
10398 do_jump (TREE_OPERAND (exp
, 2),
10399 if_false_label
? if_false_label
: drop_through_label
,
10400 if_true_label
? if_true_label
: drop_through_label
);
10401 end_cleanup_deferral ();
10407 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10409 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10410 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10411 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10412 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10415 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
10416 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10417 fold (build1 (REALPART_EXPR
,
10418 TREE_TYPE (inner_type
),
10419 TREE_OPERAND (exp
, 0))),
10420 fold (build1 (REALPART_EXPR
,
10421 TREE_TYPE (inner_type
),
10422 TREE_OPERAND (exp
, 1))))),
10423 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10424 fold (build1 (IMAGPART_EXPR
,
10425 TREE_TYPE (inner_type
),
10426 TREE_OPERAND (exp
, 0))),
10427 fold (build1 (IMAGPART_EXPR
,
10428 TREE_TYPE (inner_type
),
10429 TREE_OPERAND (exp
, 1))))))),
10430 if_false_label
, if_true_label
);
10431 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10432 && !can_compare_p (TYPE_MODE (inner_type
)))
10433 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
10435 comparison
= compare (exp
, EQ
, EQ
);
10441 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10443 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10444 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10445 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10446 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10449 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
10450 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10451 fold (build1 (REALPART_EXPR
,
10452 TREE_TYPE (inner_type
),
10453 TREE_OPERAND (exp
, 0))),
10454 fold (build1 (REALPART_EXPR
,
10455 TREE_TYPE (inner_type
),
10456 TREE_OPERAND (exp
, 1))))),
10457 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10458 fold (build1 (IMAGPART_EXPR
,
10459 TREE_TYPE (inner_type
),
10460 TREE_OPERAND (exp
, 0))),
10461 fold (build1 (IMAGPART_EXPR
,
10462 TREE_TYPE (inner_type
),
10463 TREE_OPERAND (exp
, 1))))))),
10464 if_false_label
, if_true_label
);
10465 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10466 && !can_compare_p (TYPE_MODE (inner_type
)))
10467 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
10469 comparison
= compare (exp
, NE
, NE
);
10474 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10476 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10477 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10479 comparison
= compare (exp
, LT
, LTU
);
10483 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10485 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10486 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10488 comparison
= compare (exp
, LE
, LEU
);
10492 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10494 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10495 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10497 comparison
= compare (exp
, GT
, GTU
);
10501 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10503 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10504 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10506 comparison
= compare (exp
, GE
, GEU
);
10511 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10513 /* This is not needed any more and causes poor code since it causes
10514 comparisons and tests from non-SI objects to have different code
10516 /* Copy to register to avoid generating bad insns by cse
10517 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10518 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10519 temp
= copy_to_reg (temp
);
10521 do_pending_stack_adjust ();
10522 if (GET_CODE (temp
) == CONST_INT
)
10523 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10524 else if (GET_CODE (temp
) == LABEL_REF
)
10525 comparison
= const_true_rtx
;
10526 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10527 && !can_compare_p (GET_MODE (temp
)))
10528 /* Note swapping the labels gives us not-equal. */
10529 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10530 else if (GET_MODE (temp
) != VOIDmode
)
10531 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10532 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10533 GET_MODE (temp
), NULL_RTX
, 0);
10538 /* Do any postincrements in the expression that was tested. */
10541 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10542 straight into a conditional jump instruction as the jump condition.
10543 Otherwise, all the work has been done already. */
10545 if (comparison
== const_true_rtx
)
10548 emit_jump (if_true_label
);
10550 else if (comparison
== const0_rtx
)
10552 if (if_false_label
)
10553 emit_jump (if_false_label
);
10555 else if (comparison
)
10556 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10558 if (drop_through_label
)
10560 /* If do_jump produces code that might be jumped around,
10561 do any stack adjusts from that code, before the place
10562 where control merges in. */
10563 do_pending_stack_adjust ();
10564 emit_label (drop_through_label
);
10568 /* Given a comparison expression EXP for values too wide to be compared
10569 with one insn, test the comparison and jump to the appropriate label.
10570 The code of EXP is ignored; we always test GT if SWAP is 0,
10571 and LT if SWAP is 1. */
10574 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10577 rtx if_false_label
, if_true_label
;
10579 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10580 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10581 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10582 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10583 rtx drop_through_label
= 0;
10584 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10587 if (! if_true_label
|| ! if_false_label
)
10588 drop_through_label
= gen_label_rtx ();
10589 if (! if_true_label
)
10590 if_true_label
= drop_through_label
;
10591 if (! if_false_label
)
10592 if_false_label
= drop_through_label
;
10594 /* Compare a word at a time, high order first. */
10595 for (i
= 0; i
< nwords
; i
++)
10598 rtx op0_word
, op1_word
;
10600 if (WORDS_BIG_ENDIAN
)
10602 op0_word
= operand_subword_force (op0
, i
, mode
);
10603 op1_word
= operand_subword_force (op1
, i
, mode
);
10607 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10608 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10611 /* All but high-order word must be compared as unsigned. */
10612 comp
= compare_from_rtx (op0_word
, op1_word
,
10613 (unsignedp
|| i
> 0) ? GTU
: GT
,
10614 unsignedp
, word_mode
, NULL_RTX
, 0);
10615 if (comp
== const_true_rtx
)
10616 emit_jump (if_true_label
);
10617 else if (comp
!= const0_rtx
)
10618 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10620 /* Consider lower words only if these are equal. */
10621 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10623 if (comp
== const_true_rtx
)
10624 emit_jump (if_false_label
);
10625 else if (comp
!= const0_rtx
)
10626 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10629 if (if_false_label
)
10630 emit_jump (if_false_label
);
10631 if (drop_through_label
)
10632 emit_label (drop_through_label
);
10635 /* Compare OP0 with OP1, word at a time, in mode MODE.
10636 UNSIGNEDP says to do unsigned comparison.
10637 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10640 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10641 enum machine_mode mode
;
10644 rtx if_false_label
, if_true_label
;
10646 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10647 rtx drop_through_label
= 0;
10650 if (! if_true_label
|| ! if_false_label
)
10651 drop_through_label
= gen_label_rtx ();
10652 if (! if_true_label
)
10653 if_true_label
= drop_through_label
;
10654 if (! if_false_label
)
10655 if_false_label
= drop_through_label
;
10657 /* Compare a word at a time, high order first. */
10658 for (i
= 0; i
< nwords
; i
++)
10661 rtx op0_word
, op1_word
;
10663 if (WORDS_BIG_ENDIAN
)
10665 op0_word
= operand_subword_force (op0
, i
, mode
);
10666 op1_word
= operand_subword_force (op1
, i
, mode
);
10670 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10671 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10674 /* All but high-order word must be compared as unsigned. */
10675 comp
= compare_from_rtx (op0_word
, op1_word
,
10676 (unsignedp
|| i
> 0) ? GTU
: GT
,
10677 unsignedp
, word_mode
, NULL_RTX
, 0);
10678 if (comp
== const_true_rtx
)
10679 emit_jump (if_true_label
);
10680 else if (comp
!= const0_rtx
)
10681 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10683 /* Consider lower words only if these are equal. */
10684 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10686 if (comp
== const_true_rtx
)
10687 emit_jump (if_false_label
);
10688 else if (comp
!= const0_rtx
)
10689 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10692 if (if_false_label
)
10693 emit_jump (if_false_label
);
10694 if (drop_through_label
)
10695 emit_label (drop_through_label
);
10698 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10699 with one insn, test the comparison and jump to the appropriate label. */
10702 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10704 rtx if_false_label
, if_true_label
;
10706 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10707 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10708 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10709 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10711 rtx drop_through_label
= 0;
10713 if (! if_false_label
)
10714 drop_through_label
= if_false_label
= gen_label_rtx ();
10716 for (i
= 0; i
< nwords
; i
++)
10718 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10719 operand_subword_force (op1
, i
, mode
),
10720 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10721 word_mode
, NULL_RTX
, 0);
10722 if (comp
== const_true_rtx
)
10723 emit_jump (if_false_label
);
10724 else if (comp
!= const0_rtx
)
10725 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10729 emit_jump (if_true_label
);
10730 if (drop_through_label
)
10731 emit_label (drop_through_label
);
10734 /* Jump according to whether OP0 is 0.
10735 We assume that OP0 has an integer mode that is too wide
10736 for the available compare insns. */
10739 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10741 rtx if_false_label
, if_true_label
;
10743 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10746 rtx drop_through_label
= 0;
10748 /* The fastest way of doing this comparison on almost any machine is to
10749 "or" all the words and compare the result. If all have to be loaded
10750 from memory and this is a very wide item, it's possible this may
10751 be slower, but that's highly unlikely. */
10753 part
= gen_reg_rtx (word_mode
);
10754 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10755 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10756 part
= expand_binop (word_mode
, ior_optab
, part
,
10757 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10758 part
, 1, OPTAB_WIDEN
);
10762 rtx comp
= compare_from_rtx (part
, const0_rtx
, EQ
, 1, word_mode
,
10765 if (comp
== const_true_rtx
)
10766 emit_jump (if_false_label
);
10767 else if (comp
== const0_rtx
)
10768 emit_jump (if_true_label
);
10770 do_jump_for_compare (comp
, if_false_label
, if_true_label
);
10775 /* If we couldn't do the "or" simply, do this with a series of compares. */
10776 if (! if_false_label
)
10777 drop_through_label
= if_false_label
= gen_label_rtx ();
10779 for (i
= 0; i
< nwords
; i
++)
10781 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10783 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10784 if (comp
== const_true_rtx
)
10785 emit_jump (if_false_label
);
10786 else if (comp
!= const0_rtx
)
10787 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10791 emit_jump (if_true_label
);
10793 if (drop_through_label
)
10794 emit_label (drop_through_label
);
10797 /* Given a comparison expression in rtl form, output conditional branches to
10798 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10801 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10802 rtx comparison
, if_false_label
, if_true_label
;
10806 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10807 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10811 if (if_false_label
)
10812 emit_jump (if_false_label
);
10814 else if (if_false_label
)
10817 rtx prev
= get_last_insn ();
10820 /* Output the branch with the opposite condition. Then try to invert
10821 what is generated. If more than one insn is a branch, or if the
10822 branch is not the last insn written, abort. If we can't invert
10823 the branch, emit make a true label, redirect this jump to that,
10824 emit a jump to the false label and define the true label. */
10826 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10827 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10831 /* Here we get the first insn that was just emitted. It used to be the
10832 case that, on some machines, emitting the branch would discard
10833 the previous compare insn and emit a replacement. This isn't
10834 done anymore, but abort if we see that PREV is deleted. */
10837 insn
= get_insns ();
10838 else if (INSN_DELETED_P (prev
))
10841 insn
= NEXT_INSN (prev
);
10843 for (; insn
; insn
= NEXT_INSN (insn
))
10844 if (GET_CODE (insn
) == JUMP_INSN
)
10851 if (branch
!= get_last_insn ())
10854 JUMP_LABEL (branch
) = if_false_label
;
10855 if (! invert_jump (branch
, if_false_label
))
10857 if_true_label
= gen_label_rtx ();
10858 redirect_jump (branch
, if_true_label
);
10859 emit_jump (if_false_label
);
10860 emit_label (if_true_label
);
10865 /* Generate code for a comparison expression EXP
10866 (including code to compute the values to be compared)
10867 and set (CC0) according to the result.
10868 SIGNED_CODE should be the rtx operation for this comparison for
10869 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10871 We force a stack adjustment unless there are currently
10872 things pushed on the stack that aren't yet used. */
10875 compare (exp
, signed_code
, unsigned_code
)
10877 enum rtx_code signed_code
, unsigned_code
;
10880 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10882 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10883 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10884 register enum machine_mode mode
= TYPE_MODE (type
);
10885 int unsignedp
= TREE_UNSIGNED (type
);
10886 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10888 #ifdef HAVE_canonicalize_funcptr_for_compare
10889 /* If function pointers need to be "canonicalized" before they can
10890 be reliably compared, then canonicalize them. */
10891 if (HAVE_canonicalize_funcptr_for_compare
10892 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10893 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10896 rtx new_op0
= gen_reg_rtx (mode
);
10898 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10902 if (HAVE_canonicalize_funcptr_for_compare
10903 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10904 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10907 rtx new_op1
= gen_reg_rtx (mode
);
10909 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10914 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10916 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10917 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10920 /* Like compare but expects the values to compare as two rtx's.
10921 The decision as to signed or unsigned comparison must be made by the caller.
10923 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10926 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10927 size of MODE should be used. */
10930 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10931 register rtx op0
, op1
;
10932 enum rtx_code code
;
10934 enum machine_mode mode
;
10940 /* If one operand is constant, make it the second one. Only do this
10941 if the other operand is not constant as well. */
10943 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10944 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10949 code
= swap_condition (code
);
10952 if (flag_force_mem
)
10954 op0
= force_not_mem (op0
);
10955 op1
= force_not_mem (op1
);
10958 do_pending_stack_adjust ();
10960 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10961 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10965 /* There's no need to do this now that combine.c can eliminate lots of
10966 sign extensions. This can be less efficient in certain cases on other
10969 /* If this is a signed equality comparison, we can do it as an
10970 unsigned comparison since zero-extension is cheaper than sign
10971 extension and comparisons with zero are done as unsigned. This is
10972 the case even on machines that can do fast sign extension, since
10973 zero-extension is easier to combine with other operations than
10974 sign-extension is. If we are comparing against a constant, we must
10975 convert it to what it would look like unsigned. */
10976 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10977 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10979 if (GET_CODE (op1
) == CONST_INT
10980 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10981 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10986 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10988 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10991 /* Generate code to calculate EXP using a store-flag instruction
10992 and return an rtx for the result. EXP is either a comparison
10993 or a TRUTH_NOT_EXPR whose operand is a comparison.
10995 If TARGET is nonzero, store the result there if convenient.
10997 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11000 Return zero if there is no suitable set-flag instruction
11001 available on this machine.
11003 Once expand_expr has been called on the arguments of the comparison,
11004 we are committed to doing the store flag, since it is not safe to
11005 re-evaluate the expression. We emit the store-flag insn by calling
11006 emit_store_flag, but only expand the arguments if we have a reason
11007 to believe that emit_store_flag will be successful. If we think that
11008 it will, but it isn't, we have to simulate the store-flag with a
11009 set/jump/set sequence. */
11012 do_store_flag (exp
, target
, mode
, only_cheap
)
11015 enum machine_mode mode
;
11018 enum rtx_code code
;
11019 tree arg0
, arg1
, type
;
11021 enum machine_mode operand_mode
;
11025 enum insn_code icode
;
11026 rtx subtarget
= target
;
11027 rtx result
, label
, pattern
, jump_pat
;
11029 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11030 result at the end. We can't simply invert the test since it would
11031 have already been inverted if it were valid. This case occurs for
11032 some floating-point comparisons. */
11034 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
11035 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
11037 arg0
= TREE_OPERAND (exp
, 0);
11038 arg1
= TREE_OPERAND (exp
, 1);
11039 type
= TREE_TYPE (arg0
);
11040 operand_mode
= TYPE_MODE (type
);
11041 unsignedp
= TREE_UNSIGNED (type
);
11043 /* We won't bother with BLKmode store-flag operations because it would mean
11044 passing a lot of information to emit_store_flag. */
11045 if (operand_mode
== BLKmode
)
11048 /* We won't bother with store-flag operations involving function pointers
11049 when function pointers must be canonicalized before comparisons. */
11050 #ifdef HAVE_canonicalize_funcptr_for_compare
11051 if (HAVE_canonicalize_funcptr_for_compare
11052 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
11053 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
11055 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
11056 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
11057 == FUNCTION_TYPE
))))
11064 /* Get the rtx comparison code to use. We know that EXP is a comparison
11065 operation of some type. Some comparisons against 1 and -1 can be
11066 converted to comparisons with zero. Do so here so that the tests
11067 below will be aware that we have a comparison with zero. These
11068 tests will not catch constants in the first operand, but constants
11069 are rarely passed as the first operand. */
11071 switch (TREE_CODE (exp
))
11080 if (integer_onep (arg1
))
11081 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
11083 code
= unsignedp
? LTU
: LT
;
11086 if (! unsignedp
&& integer_all_onesp (arg1
))
11087 arg1
= integer_zero_node
, code
= LT
;
11089 code
= unsignedp
? LEU
: LE
;
11092 if (! unsignedp
&& integer_all_onesp (arg1
))
11093 arg1
= integer_zero_node
, code
= GE
;
11095 code
= unsignedp
? GTU
: GT
;
11098 if (integer_onep (arg1
))
11099 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11101 code
= unsignedp
? GEU
: GE
;
11107 /* Put a constant second. */
11108 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
11110 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
11111 code
= swap_condition (code
);
11114 /* If this is an equality or inequality test of a single bit, we can
11115 do this by shifting the bit being tested to the low-order bit and
11116 masking the result with the constant 1. If the condition was EQ,
11117 we xor it with 1. This does not require an scc insn and is faster
11118 than an scc insn even if we have it. */
11120 if ((code
== NE
|| code
== EQ
)
11121 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
11122 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11123 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
11125 tree inner
= TREE_OPERAND (arg0
, 0);
11130 tem
= INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
11131 NULL_RTX
, VOIDmode
, 0));
11132 /* In this case, immed_double_const will sign extend the value to make
11133 it look the same on the host and target. We must remove the
11134 sign-extension before calling exact_log2, since exact_log2 will
11135 fail for negative values. */
11136 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
11137 && BITS_PER_WORD
== GET_MODE_BITSIZE (TYPE_MODE (type
)))
11138 /* We don't use the obvious constant shift to generate the mask,
11139 because that generates compiler warnings when BITS_PER_WORD is
11140 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11141 code is unreachable in that case. */
11142 tem
= tem
& GET_MODE_MASK (word_mode
);
11143 bitnum
= exact_log2 (tem
);
11145 /* If INNER is a right shift of a constant and it plus BITNUM does
11146 not overflow, adjust BITNUM and INNER. */
11148 if (TREE_CODE (inner
) == RSHIFT_EXPR
11149 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
11150 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
11151 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
11152 < TYPE_PRECISION (type
)))
11154 bitnum
+=TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
11155 inner
= TREE_OPERAND (inner
, 0);
11158 /* If we are going to be able to omit the AND below, we must do our
11159 operations as unsigned. If we must use the AND, we have a choice.
11160 Normally unsigned is faster, but for some machines signed is. */
11161 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
11162 #ifdef LOAD_EXTEND_OP
11163 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
11169 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11170 || GET_MODE (subtarget
) != operand_mode
11171 || ! safe_from_p (subtarget
, inner
))
11174 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
11177 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
11178 size_int (bitnum
), subtarget
, ops_unsignedp
);
11180 if (GET_MODE (op0
) != mode
)
11181 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
11183 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
11184 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
11185 ops_unsignedp
, OPTAB_LIB_WIDEN
);
11187 /* Put the AND last so it can combine with more things. */
11188 if (bitnum
!= TYPE_PRECISION (type
) - 1)
11189 op0
= expand_and (op0
, const1_rtx
, subtarget
);
11194 /* Now see if we are likely to be able to do this. Return if not. */
11195 if (! can_compare_p (operand_mode
))
11197 icode
= setcc_gen_code
[(int) code
];
11198 if (icode
== CODE_FOR_nothing
11199 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
11201 /* We can only do this if it is one of the special cases that
11202 can be handled without an scc insn. */
11203 if ((code
== LT
&& integer_zerop (arg1
))
11204 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
11206 else if (BRANCH_COST
>= 0
11207 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
11208 && TREE_CODE (type
) != REAL_TYPE
11209 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
11210 != CODE_FOR_nothing
)
11211 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
11212 != CODE_FOR_nothing
)))
11218 preexpand_calls (exp
);
11219 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11220 || GET_MODE (subtarget
) != operand_mode
11221 || ! safe_from_p (subtarget
, arg1
))
11224 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
11225 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
11228 target
= gen_reg_rtx (mode
);
11230 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11231 because, if the emit_store_flag does anything it will succeed and
11232 OP0 and OP1 will not be used subsequently. */
11234 result
= emit_store_flag (target
, code
,
11235 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
11236 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
11237 operand_mode
, unsignedp
, 1);
11242 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
11243 result
, 0, OPTAB_LIB_WIDEN
);
11247 /* If this failed, we have to do this with set/compare/jump/set code. */
11248 if (GET_CODE (target
) != REG
11249 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
11250 target
= gen_reg_rtx (GET_MODE (target
));
11252 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
11253 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
11254 operand_mode
, NULL_RTX
, 0);
11255 if (GET_CODE (result
) == CONST_INT
)
11256 return (((result
== const0_rtx
&& ! invert
)
11257 || (result
!= const0_rtx
&& invert
))
11258 ? const0_rtx
: const1_rtx
);
11260 label
= gen_label_rtx ();
11261 if (bcc_gen_fctn
[(int) code
] == 0)
11264 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
11265 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
11266 emit_label (label
);
11271 /* Generate a tablejump instruction (used for switch statements). */
11273 #ifdef HAVE_tablejump
11275 /* INDEX is the value being switched on, with the lowest value
11276 in the table already subtracted.
11277 MODE is its expected mode (needed if INDEX is constant).
11278 RANGE is the length of the jump table.
11279 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11281 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11282 index value is out of range. */
11285 do_tablejump (index
, mode
, range
, table_label
, default_label
)
11286 rtx index
, range
, table_label
, default_label
;
11287 enum machine_mode mode
;
11289 register rtx temp
, vector
;
11291 /* Do an unsigned comparison (in the proper mode) between the index
11292 expression and the value which represents the length of the range.
11293 Since we just finished subtracting the lower bound of the range
11294 from the index expression, this comparison allows us to simultaneously
11295 check that the original index expression value is both greater than
11296 or equal to the minimum value of the range and less than or equal to
11297 the maximum value of the range. */
11299 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
11300 emit_jump_insn (gen_bgtu (default_label
));
11302 /* If index is in range, it must fit in Pmode.
11303 Convert to Pmode so we can index with it. */
11305 index
= convert_to_mode (Pmode
, index
, 1);
11307 /* Don't let a MEM slip thru, because then INDEX that comes
11308 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11309 and break_out_memory_refs will go to work on it and mess it up. */
11310 #ifdef PIC_CASE_VECTOR_ADDRESS
11311 if (flag_pic
&& GET_CODE (index
) != REG
)
11312 index
= copy_to_mode_reg (Pmode
, index
);
11315 /* If flag_force_addr were to affect this address
11316 it could interfere with the tricky assumptions made
11317 about addresses that contain label-refs,
11318 which may be valid only very near the tablejump itself. */
11319 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11320 GET_MODE_SIZE, because this indicates how large insns are. The other
11321 uses should all be Pmode, because they are addresses. This code
11322 could fail if addresses and insns are not the same size. */
11323 index
= gen_rtx (PLUS
, Pmode
,
11324 gen_rtx (MULT
, Pmode
, index
,
11325 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
11326 gen_rtx (LABEL_REF
, Pmode
, table_label
));
11327 #ifdef PIC_CASE_VECTOR_ADDRESS
11329 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11332 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
11333 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11334 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
11335 RTX_UNCHANGING_P (vector
) = 1;
11336 convert_move (temp
, vector
, 0);
11338 emit_jump_insn (gen_tablejump (temp
, table_label
));
11340 /* If we are generating PIC code or if the table is PC-relative, the
11341 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11342 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11346 #endif /* HAVE_tablejump */
11349 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11350 to that value is on the top of the stack. The resulting type is TYPE, and
11351 the source declaration is DECL. */
11354 bc_load_memory (type
, decl
)
11357 enum bytecode_opcode opcode
;
11360 /* Bit fields are special. We only know about signed and
11361 unsigned ints, and enums. The latter are treated as
11362 signed integers. */
11364 if (DECL_BIT_FIELD (decl
))
11365 if (TREE_CODE (type
) == ENUMERAL_TYPE
11366 || TREE_CODE (type
) == INTEGER_TYPE
)
11367 opcode
= TREE_UNSIGNED (type
) ? zxloadBI
: sxloadBI
;
11371 /* See corresponding comment in bc_store_memory. */
11372 if (TYPE_MODE (type
) == BLKmode
11373 || TYPE_MODE (type
) == VOIDmode
)
11376 opcode
= mode_to_load_map
[(int) TYPE_MODE (type
)];
11378 if (opcode
== neverneverland
)
11381 bc_emit_bytecode (opcode
);
11383 #ifdef DEBUG_PRINT_CODE
11384 fputc ('\n', stderr
);
11389 /* Store the contents of the second stack slot to the address in the
11390 top stack slot. DECL is the declaration of the destination and is used
11391 to determine whether we're dealing with a bitfield. */
11394 bc_store_memory (type
, decl
)
11397 enum bytecode_opcode opcode
;
11400 if (DECL_BIT_FIELD (decl
))
11402 if (TREE_CODE (type
) == ENUMERAL_TYPE
11403 || TREE_CODE (type
) == INTEGER_TYPE
)
11409 if (TYPE_MODE (type
) == BLKmode
)
11411 /* Copy structure. This expands to a block copy instruction, storeBLK.
11412 In addition to the arguments expected by the other store instructions,
11413 it also expects a type size (SImode) on top of the stack, which is the
11414 structure size in size units (usually bytes). The two first arguments
11415 are already on the stack; so we just put the size on level 1. For some
11416 other languages, the size may be variable, this is why we don't encode
11417 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11419 bc_expand_expr (TYPE_SIZE (type
));
11423 opcode
= mode_to_store_map
[(int) TYPE_MODE (type
)];
11425 if (opcode
== neverneverland
)
11428 bc_emit_bytecode (opcode
);
11430 #ifdef DEBUG_PRINT_CODE
11431 fputc ('\n', stderr
);
11436 /* Allocate local stack space sufficient to hold a value of the given
11437 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11438 integral power of 2. A special case is locals of type VOID, which
11439 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11440 remapped into the corresponding attribute of SI. */
11443 bc_allocate_local (size
, alignment
)
11444 int size
, alignment
;
11447 int byte_alignment
;
11452 /* Normalize size and alignment */
11454 size
= UNITS_PER_WORD
;
11456 if (alignment
< BITS_PER_UNIT
)
11457 byte_alignment
= 1 << (INT_ALIGN
- 1);
11460 byte_alignment
= alignment
/ BITS_PER_UNIT
;
11462 if (local_vars_size
& (byte_alignment
- 1))
11463 local_vars_size
+= byte_alignment
- (local_vars_size
& (byte_alignment
- 1));
11465 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11466 local_vars_size
+= size
;
11472 /* Allocate variable-sized local array. Variable-sized arrays are
11473 actually pointers to the address in memory where they are stored. */
11476 bc_allocate_variable_array (size
)
11480 const int ptralign
= (1 << (PTR_ALIGN
- 1));
11482 /* Align pointer */
11483 if (local_vars_size
& ptralign
)
11484 local_vars_size
+= ptralign
- (local_vars_size
& ptralign
);
11486 /* Note down local space needed: pointer to block; also return
11489 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11490 local_vars_size
+= POINTER_SIZE
/ BITS_PER_UNIT
;
11495 /* Push the machine address for the given external variable offset. */
11498 bc_load_externaddr (externaddr
)
11501 bc_emit_bytecode (constP
);
11502 bc_emit_code_labelref (BYTECODE_LABEL (externaddr
),
11503 BYTECODE_BC_LABEL (externaddr
)->offset
);
11505 #ifdef DEBUG_PRINT_CODE
11506 fputc ('\n', stderr
);
11511 /* Like above, but expects an IDENTIFIER. */
11514 bc_load_externaddr_id (id
, offset
)
11518 if (!IDENTIFIER_POINTER (id
))
11521 bc_emit_bytecode (constP
);
11522 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id
)), offset
);
11524 #ifdef DEBUG_PRINT_CODE
11525 fputc ('\n', stderr
);
11530 /* Push the machine address for the given local variable offset. */
11533 bc_load_localaddr (localaddr
)
11536 bc_emit_instruction (localP
, (HOST_WIDE_INT
) BYTECODE_BC_LABEL (localaddr
)->offset
);
11540 /* Push the machine address for the given parameter offset.
11541 NOTE: offset is in bits. */
11544 bc_load_parmaddr (parmaddr
)
11547 bc_emit_instruction (argP
, ((HOST_WIDE_INT
) BYTECODE_BC_LABEL (parmaddr
)->offset
11552 /* Convert a[i] into *(a + i). */
11555 bc_canonicalize_array_ref (exp
)
11558 tree type
= TREE_TYPE (exp
);
11559 tree array_adr
= build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
),
11560 TREE_OPERAND (exp
, 0));
11561 tree index
= TREE_OPERAND (exp
, 1);
11564 /* Convert the integer argument to a type the same size as a pointer
11565 so the multiply won't overflow spuriously. */
11567 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
11568 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
11570 /* The array address isn't volatile even if the array is.
11571 (Of course this isn't terribly relevant since the bytecode
11572 translator treats nearly everything as volatile anyway.) */
11573 TREE_THIS_VOLATILE (array_adr
) = 0;
11575 return build1 (INDIRECT_REF
, type
,
11576 fold (build (PLUS_EXPR
,
11577 TYPE_POINTER_TO (type
),
11579 fold (build (MULT_EXPR
,
11580 TYPE_POINTER_TO (type
),
11582 size_in_bytes (type
))))));
11586 /* Load the address of the component referenced by the given
11587 COMPONENT_REF expression.
11589 Returns innermost lvalue. */
11592 bc_expand_component_address (exp
)
11596 enum machine_mode mode
;
11598 HOST_WIDE_INT SIval
;
11601 tem
= TREE_OPERAND (exp
, 1);
11602 mode
= DECL_MODE (tem
);
11605 /* Compute cumulative bit offset for nested component refs
11606 and array refs, and find the ultimate containing object. */
11608 for (tem
= exp
;; tem
= TREE_OPERAND (tem
, 0))
11610 if (TREE_CODE (tem
) == COMPONENT_REF
)
11611 bitpos
+= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem
, 1)));
11613 if (TREE_CODE (tem
) == ARRAY_REF
11614 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11615 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
))) == INTEGER_CST
)
11617 bitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1))
11618 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)))
11619 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11624 bc_expand_expr (tem
);
11627 /* For bitfields also push their offset and size */
11628 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
11629 bc_push_offset_and_size (bitpos
, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp
, 1)));
11631 if (SIval
= bitpos
/ BITS_PER_UNIT
)
11632 bc_emit_instruction (addconstPSI
, SIval
);
11634 return (TREE_OPERAND (exp
, 1));
11638 /* Emit code to push two SI constants */
11641 bc_push_offset_and_size (offset
, size
)
11642 HOST_WIDE_INT offset
, size
;
11644 bc_emit_instruction (constSI
, offset
);
11645 bc_emit_instruction (constSI
, size
);
11649 /* Emit byte code to push the address of the given lvalue expression to
11650 the stack. If it's a bit field, we also push offset and size info.
11652 Returns innermost component, which allows us to determine not only
11653 its type, but also whether it's a bitfield. */
11656 bc_expand_address (exp
)
11660 if (!exp
|| TREE_CODE (exp
) == ERROR_MARK
)
11664 switch (TREE_CODE (exp
))
11668 return (bc_expand_address (bc_canonicalize_array_ref (exp
)));
11670 case COMPONENT_REF
:
11672 return (bc_expand_component_address (exp
));
11676 bc_expand_expr (TREE_OPERAND (exp
, 0));
11678 /* For variable-sized types: retrieve pointer. Sometimes the
11679 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11680 also make sure we have an operand, just in case... */
11682 if (TREE_OPERAND (exp
, 0)
11683 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
11684 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))) != INTEGER_CST
)
11685 bc_emit_instruction (loadP
);
11687 /* If packed, also return offset and size */
11688 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 0)))
11690 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 0))),
11691 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp
, 0))));
11693 return (TREE_OPERAND (exp
, 0));
11695 case FUNCTION_DECL
:
11697 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11698 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
11703 bc_load_parmaddr (DECL_RTL (exp
));
11705 /* For variable-sized types: retrieve pointer */
11706 if (TYPE_SIZE (TREE_TYPE (exp
))
11707 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11708 bc_emit_instruction (loadP
);
11710 /* If packed, also return offset and size */
11711 if (DECL_BIT_FIELD (exp
))
11712 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11713 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11719 bc_emit_instruction (returnP
);
11725 if (BYTECODE_LABEL (DECL_RTL (exp
)))
11726 bc_load_externaddr (DECL_RTL (exp
));
11729 if (DECL_EXTERNAL (exp
))
11730 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11731 (BYTECODE_BC_LABEL (DECL_RTL (exp
)))->offset
);
11733 bc_load_localaddr (DECL_RTL (exp
));
11735 /* For variable-sized types: retrieve pointer */
11736 if (TYPE_SIZE (TREE_TYPE (exp
))
11737 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11738 bc_emit_instruction (loadP
);
11740 /* If packed, also return offset and size */
11741 if (DECL_BIT_FIELD (exp
))
11742 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11743 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11751 bc_emit_bytecode (constP
);
11752 r
= output_constant_def (exp
);
11753 bc_emit_code_labelref (BYTECODE_LABEL (r
), BYTECODE_BC_LABEL (r
)->offset
);
11755 #ifdef DEBUG_PRINT_CODE
11756 fputc ('\n', stderr
);
11767 /* Most lvalues don't have components. */
11772 /* Emit a type code to be used by the runtime support in handling
11773 parameter passing. The type code consists of the machine mode
11774 plus the minimal alignment shifted left 8 bits. */
11777 bc_runtime_type_code (type
)
11782 switch (TREE_CODE (type
))
11788 case ENUMERAL_TYPE
:
11792 val
= (int) TYPE_MODE (type
) | TYPE_ALIGN (type
) << 8;
11804 return build_int_2 (val
, 0);
11808 /* Generate constructor label */
11811 bc_gen_constr_label ()
11813 static int label_counter
;
11814 static char label
[20];
11816 sprintf (label
, "*LR%d", label_counter
++);
11818 return (obstack_copy0 (&permanent_obstack
, label
, strlen (label
)));
11822 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11823 expand the constructor data as static data, and push a pointer to it.
11824 The pointer is put in the pointer table and is retrieved by a constP
11825 bytecode instruction. We then loop and store each constructor member in
11826 the corresponding component. Finally, we return the original pointer on
11830 bc_expand_constructor (constr
)
11834 HOST_WIDE_INT ptroffs
;
11838 /* Literal constructors are handled as constants, whereas
11839 non-literals are evaluated and stored element by element
11840 into the data segment. */
11842 /* Allocate space in proper segment and push pointer to space on stack.
11845 l
= bc_gen_constr_label ();
11847 if (TREE_CONSTANT (constr
))
11851 bc_emit_const_labeldef (l
);
11852 bc_output_constructor (constr
, int_size_in_bytes (TREE_TYPE (constr
)));
11858 bc_emit_data_labeldef (l
);
11859 bc_output_data_constructor (constr
);
11863 /* Add reference to pointer table and recall pointer to stack;
11864 this code is common for both types of constructors: literals
11865 and non-literals. */
11867 ptroffs
= bc_define_pointer (l
);
11868 bc_emit_instruction (constP
, ptroffs
);
11870 /* This is all that has to be done if it's a literal. */
11871 if (TREE_CONSTANT (constr
))
11875 /* At this point, we have the pointer to the structure on top of the stack.
11876 Generate sequences of store_memory calls for the constructor. */
11878 /* constructor type is structure */
11879 if (TREE_CODE (TREE_TYPE (constr
)) == RECORD_TYPE
)
11883 /* If the constructor has fewer fields than the structure,
11884 clear the whole structure first. */
11886 if (list_length (CONSTRUCTOR_ELTS (constr
))
11887 != list_length (TYPE_FIELDS (TREE_TYPE (constr
))))
11889 bc_emit_instruction (duplicate
);
11890 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11891 bc_emit_instruction (clearBLK
);
11894 /* Store each element of the constructor into the corresponding
11895 field of TARGET. */
11897 for (elt
= CONSTRUCTOR_ELTS (constr
); elt
; elt
= TREE_CHAIN (elt
))
11899 register tree field
= TREE_PURPOSE (elt
);
11900 register enum machine_mode mode
;
11905 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
)) /* * DECL_SIZE_UNIT (field) */;
11906 mode
= DECL_MODE (field
);
11907 unsignedp
= TREE_UNSIGNED (field
);
11909 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
11911 bc_store_field (elt
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11912 /* The alignment of TARGET is
11913 at least what its type requires. */
11915 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11916 int_size_in_bytes (TREE_TYPE (constr
)));
11921 /* Constructor type is array */
11922 if (TREE_CODE (TREE_TYPE (constr
)) == ARRAY_TYPE
)
11926 tree domain
= TYPE_DOMAIN (TREE_TYPE (constr
));
11927 int minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
11928 int maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
11929 tree elttype
= TREE_TYPE (TREE_TYPE (constr
));
11931 /* If the constructor has fewer fields than the structure,
11932 clear the whole structure first. */
11934 if (list_length (CONSTRUCTOR_ELTS (constr
)) < maxelt
- minelt
+ 1)
11936 bc_emit_instruction (duplicate
);
11937 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11938 bc_emit_instruction (clearBLK
);
11942 /* Store each element of the constructor into the corresponding
11943 element of TARGET, determined by counting the elements. */
11945 for (elt
= CONSTRUCTOR_ELTS (constr
), i
= 0;
11947 elt
= TREE_CHAIN (elt
), i
++)
11949 register enum machine_mode mode
;
11954 mode
= TYPE_MODE (elttype
);
11955 bitsize
= GET_MODE_BITSIZE (mode
);
11956 unsignedp
= TREE_UNSIGNED (elttype
);
11958 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
))
11959 /* * TYPE_SIZE_UNIT (elttype) */ );
11961 bc_store_field (elt
, bitsize
, bitpos
, mode
,
11962 TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11963 /* The alignment of TARGET is
11964 at least what its type requires. */
11966 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11967 int_size_in_bytes (TREE_TYPE (constr
)));
11974 /* Store the value of EXP (an expression tree) into member FIELD of
11975 structure at address on stack, which has type TYPE, mode MODE and
11976 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11979 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11980 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11983 bc_store_field (field
, bitsize
, bitpos
, mode
, exp
, type
,
11984 value_mode
, unsignedp
, align
, total_size
)
11985 int bitsize
, bitpos
;
11986 enum machine_mode mode
;
11987 tree field
, exp
, type
;
11988 enum machine_mode value_mode
;
11994 /* Expand expression and copy pointer */
11995 bc_expand_expr (exp
);
11996 bc_emit_instruction (over
);
11999 /* If the component is a bit field, we cannot use addressing to access
12000 it. Use bit-field techniques to store in it. */
12002 if (DECL_BIT_FIELD (field
))
12004 bc_store_bit_field (bitpos
, bitsize
, unsignedp
);
12008 /* Not bit field */
12010 HOST_WIDE_INT offset
= bitpos
/ BITS_PER_UNIT
;
12012 /* Advance pointer to the desired member */
12014 bc_emit_instruction (addconstPSI
, offset
);
12017 bc_store_memory (type
, field
);
12022 /* Store SI/SU in bitfield */
12025 bc_store_bit_field (offset
, size
, unsignedp
)
12026 int offset
, size
, unsignedp
;
12028 /* Push bitfield offset and size */
12029 bc_push_offset_and_size (offset
, size
);
12032 bc_emit_instruction (sstoreBI
);
12036 /* Load SI/SU from bitfield */
12039 bc_load_bit_field (offset
, size
, unsignedp
)
12040 int offset
, size
, unsignedp
;
12042 /* Push bitfield offset and size */
12043 bc_push_offset_and_size (offset
, size
);
12045 /* Load: sign-extend if signed, else zero-extend */
12046 bc_emit_instruction (unsignedp
? zxloadBI
: sxloadBI
);
12050 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12051 (adjust stack pointer upwards), negative means add that number of
12052 levels (adjust the stack pointer downwards). Only positive values
12053 normally make sense. */
12056 bc_adjust_stack (nlevels
)
12065 bc_emit_instruction (drop
);
12068 bc_emit_instruction (drop
);
12073 bc_emit_instruction (adjstackSI
, (HOST_WIDE_INT
) nlevels
);
12074 stack_depth
-= nlevels
;
12077 #if defined (VALIDATE_STACK_FOR_BC)
12078 VALIDATE_STACK_FOR_BC ();