]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(store_constructor_field): Only call change_address if
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "insn-flags.h"
32 #include "insn-codes.h"
33 #include "expr.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "typeclass.h"
38
39 #include "bytecode.h"
40 #include "bc-opcode.h"
41 #include "bc-typecd.h"
42 #include "bc-optab.h"
43 #include "bc-emit.h"
44
45
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
80
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
85
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
89
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
95
96 /* A list of all cleanups which belong to the arguments of
97 function calls being expanded by expand_call. */
98 tree cleanups_this_call;
99
100 /* When temporaries are created by TARGET_EXPRs, they are created at
101 this level of temp_slot_level, so that they can remain allocated
102 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 of TARGET_EXPRs. */
104 int target_temp_slot_level;
105
106 /* Nonzero means __builtin_saveregs has already been done in this function.
107 The value is the pseudoreg containing the value __builtin_saveregs
108 returned. */
109 static rtx saveregs_value;
110
111 /* Similarly for __builtin_apply_args. */
112 static rtx apply_args_value;
113
114 /* This structure is used by move_by_pieces to describe the move to
115 be performed. */
116
117 struct move_by_pieces
118 {
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 int to_struct;
124 rtx from;
125 rtx from_addr;
126 int autinc_from;
127 int explicit_inc_from;
128 int from_struct;
129 int len;
130 int offset;
131 int reverse;
132 };
133
134 /* This structure is used by clear_by_pieces to describe the clear to
135 be performed. */
136
137 struct clear_by_pieces
138 {
139 rtx to;
140 rtx to_addr;
141 int autinc_to;
142 int explicit_inc_to;
143 int to_struct;
144 int len;
145 int offset;
146 int reverse;
147 };
148
149 /* Used to generate bytecodes: keep track of size of local variables,
150 as well as depth of arithmetic stack. (Notice that variables are
151 stored on the machine's stack, not the arithmetic stack.) */
152
153 extern int local_vars_size;
154 extern int stack_depth;
155 extern int max_stack_depth;
156 extern struct obstack permanent_obstack;
157 extern rtx arg_pointer_save_area;
158
159 static rtx enqueue_insn PROTO((rtx, rtx));
160 static int queued_subexp_p PROTO((rtx));
161 static void init_queue PROTO((void));
162 static void move_by_pieces PROTO((rtx, rtx, int, int));
163 static int move_by_pieces_ninsns PROTO((unsigned int, int));
164 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
165 struct move_by_pieces *));
166 static void clear_by_pieces PROTO((rtx, int, int));
167 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
168 struct clear_by_pieces *));
169 static int is_zeros_p PROTO((tree));
170 static int mostly_zeros_p PROTO((tree));
171 static void store_constructor PROTO((tree, rtx, int));
172 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
173 enum machine_mode, int, int, int));
174 static int get_inner_unaligned_p PROTO((tree));
175 static tree save_noncopied_parts PROTO((tree, tree));
176 static tree init_noncopied_parts PROTO((tree, tree));
177 static int safe_from_p PROTO((rtx, tree));
178 static int fixed_type_p PROTO((tree));
179 static int get_pointer_alignment PROTO((tree, unsigned));
180 static tree string_constant PROTO((tree, tree *));
181 static tree c_strlen PROTO((tree));
182 static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
184 static int apply_args_size PROTO((void));
185 static int apply_result_size PROTO((void));
186 static rtx result_vector PROTO((int, rtx));
187 static rtx expand_builtin_apply_args PROTO((void));
188 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189 static void expand_builtin_return PROTO((rtx));
190 static rtx expand_increment PROTO((tree, int));
191 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
192 tree bc_runtime_type_code PROTO((tree));
193 rtx bc_allocate_local PROTO((int, int));
194 void bc_store_memory PROTO((tree, tree));
195 tree bc_expand_component_address PROTO((tree));
196 tree bc_expand_address PROTO((tree));
197 void bc_expand_constructor PROTO((tree));
198 void bc_adjust_stack PROTO((int));
199 tree bc_canonicalize_array_ref PROTO((tree));
200 void bc_load_memory PROTO((tree, tree));
201 void bc_load_externaddr PROTO((rtx));
202 void bc_load_externaddr_id PROTO((tree, int));
203 void bc_load_localaddr PROTO((rtx));
204 void bc_load_parmaddr PROTO((rtx));
205 static void preexpand_calls PROTO((tree));
206 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
207 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
208 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
209 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
210 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
211 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
212 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
213 static tree defer_cleanups_to PROTO((tree));
214 extern void (*interim_eh_hook) PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
216
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
223
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
236
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
248
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
256 \f
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264 void
265 bc_init_mode_to_opcode_maps ()
266 {
267 int mode;
268
269 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
270 mode_to_const_map[mode] =
271 mode_to_load_map[mode] =
272 mode_to_store_map[mode] = neverneverland;
273
274 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
275 mode_to_const_map[(int) SYM] = CONST; \
276 mode_to_load_map[(int) SYM] = LOAD; \
277 mode_to_store_map[(int) SYM] = STORE;
278
279 #include "modemap.def"
280 #undef DEF_MODEMAP
281 }
282 \f
283 /* This is run once per compilation to set up which modes can be used
284 directly in memory and to initialize the block move optab. */
285
286 void
287 init_expr_once ()
288 {
289 rtx insn, pat;
290 enum machine_mode mode;
291 /* Try indexing by frame ptr and try by stack ptr.
292 It is known that on the Convex the stack ptr isn't a valid index.
293 With luck, one or the other is valid on any machine. */
294 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
295 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
296
297 start_sequence ();
298 insn = emit_insn (gen_rtx (SET, 0, 0));
299 pat = PATTERN (insn);
300
301 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
302 mode = (enum machine_mode) ((int) mode + 1))
303 {
304 int regno;
305 rtx reg;
306 int num_clobbers;
307
308 direct_load[(int) mode] = direct_store[(int) mode] = 0;
309 PUT_MODE (mem, mode);
310 PUT_MODE (mem1, mode);
311
312 /* See if there is some register that can be used in this mode and
313 directly loaded or stored from memory. */
314
315 if (mode != VOIDmode && mode != BLKmode)
316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
317 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
318 regno++)
319 {
320 if (! HARD_REGNO_MODE_OK (regno, mode))
321 continue;
322
323 reg = gen_rtx (REG, mode, regno);
324
325 SET_SRC (pat) = mem;
326 SET_DEST (pat) = reg;
327 if (recog (pat, insn, &num_clobbers) >= 0)
328 direct_load[(int) mode] = 1;
329
330 SET_SRC (pat) = mem1;
331 SET_DEST (pat) = reg;
332 if (recog (pat, insn, &num_clobbers) >= 0)
333 direct_load[(int) mode] = 1;
334
335 SET_SRC (pat) = reg;
336 SET_DEST (pat) = mem;
337 if (recog (pat, insn, &num_clobbers) >= 0)
338 direct_store[(int) mode] = 1;
339
340 SET_SRC (pat) = reg;
341 SET_DEST (pat) = mem1;
342 if (recog (pat, insn, &num_clobbers) >= 0)
343 direct_store[(int) mode] = 1;
344 }
345 }
346
347 end_sequence ();
348 }
349
350 /* This is run at the start of compiling a function. */
351
352 void
353 init_expr ()
354 {
355 init_queue ();
356
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
360 saveregs_value = 0;
361 apply_args_value = 0;
362 forced_labels = 0;
363 }
364
365 /* Save all variables describing the current status into the structure *P.
366 This is used before starting a nested function. */
367
368 void
369 save_expr_status (p)
370 struct function *p;
371 {
372 /* Instead of saving the postincrement queue, empty it. */
373 emit_queue ();
374
375 p->pending_stack_adjust = pending_stack_adjust;
376 p->inhibit_defer_pop = inhibit_defer_pop;
377 p->cleanups_this_call = cleanups_this_call;
378 p->saveregs_value = saveregs_value;
379 p->apply_args_value = apply_args_value;
380 p->forced_labels = forced_labels;
381
382 pending_stack_adjust = 0;
383 inhibit_defer_pop = 0;
384 cleanups_this_call = 0;
385 saveregs_value = 0;
386 apply_args_value = 0;
387 forced_labels = 0;
388 }
389
390 /* Restore all variables describing the current status from the structure *P.
391 This is used after a nested function. */
392
393 void
394 restore_expr_status (p)
395 struct function *p;
396 {
397 pending_stack_adjust = p->pending_stack_adjust;
398 inhibit_defer_pop = p->inhibit_defer_pop;
399 cleanups_this_call = p->cleanups_this_call;
400 saveregs_value = p->saveregs_value;
401 apply_args_value = p->apply_args_value;
402 forced_labels = p->forced_labels;
403 }
404 \f
405 /* Manage the queue of increment instructions to be output
406 for POSTINCREMENT_EXPR expressions, etc. */
407
408 static rtx pending_chain;
409
410 /* Queue up to increment (or change) VAR later. BODY says how:
411 BODY should be the same thing you would pass to emit_insn
412 to increment right away. It will go to emit_insn later on.
413
414 The value is a QUEUED expression to be used in place of VAR
415 where you want to guarantee the pre-incrementation value of VAR. */
416
417 static rtx
418 enqueue_insn (var, body)
419 rtx var, body;
420 {
421 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
422 var, NULL_RTX, NULL_RTX, body, pending_chain);
423 return pending_chain;
424 }
425
426 /* Use protect_from_queue to convert a QUEUED expression
427 into something that you can put immediately into an instruction.
428 If the queued incrementation has not happened yet,
429 protect_from_queue returns the variable itself.
430 If the incrementation has happened, protect_from_queue returns a temp
431 that contains a copy of the old value of the variable.
432
433 Any time an rtx which might possibly be a QUEUED is to be put
434 into an instruction, it must be passed through protect_from_queue first.
435 QUEUED expressions are not meaningful in instructions.
436
437 Do not pass a value through protect_from_queue and then hold
438 on to it for a while before putting it in an instruction!
439 If the queue is flushed in between, incorrect code will result. */
440
441 rtx
442 protect_from_queue (x, modify)
443 register rtx x;
444 int modify;
445 {
446 register RTX_CODE code = GET_CODE (x);
447
448 #if 0 /* A QUEUED can hang around after the queue is forced out. */
449 /* Shortcut for most common case. */
450 if (pending_chain == 0)
451 return x;
452 #endif
453
454 if (code != QUEUED)
455 {
456 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
457 use of autoincrement. Make a copy of the contents of the memory
458 location rather than a copy of the address, but not if the value is
459 of mode BLKmode. Don't modify X in place since it might be
460 shared. */
461 if (code == MEM && GET_MODE (x) != BLKmode
462 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
463 {
464 register rtx y = XEXP (x, 0);
465 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
466
467 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
468 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
469 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
470
471 if (QUEUED_INSN (y))
472 {
473 register rtx temp = gen_reg_rtx (GET_MODE (new));
474 emit_insn_before (gen_move_insn (temp, new),
475 QUEUED_INSN (y));
476 return temp;
477 }
478 return new;
479 }
480 /* Otherwise, recursively protect the subexpressions of all
481 the kinds of rtx's that can contain a QUEUED. */
482 if (code == MEM)
483 {
484 rtx tem = protect_from_queue (XEXP (x, 0), 0);
485 if (tem != XEXP (x, 0))
486 {
487 x = copy_rtx (x);
488 XEXP (x, 0) = tem;
489 }
490 }
491 else if (code == PLUS || code == MULT)
492 {
493 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
494 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
495 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
496 {
497 x = copy_rtx (x);
498 XEXP (x, 0) = new0;
499 XEXP (x, 1) = new1;
500 }
501 }
502 return x;
503 }
504 /* If the increment has not happened, use the variable itself. */
505 if (QUEUED_INSN (x) == 0)
506 return QUEUED_VAR (x);
507 /* If the increment has happened and a pre-increment copy exists,
508 use that copy. */
509 if (QUEUED_COPY (x) != 0)
510 return QUEUED_COPY (x);
511 /* The increment has happened but we haven't set up a pre-increment copy.
512 Set one up now, and use it. */
513 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
514 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
515 QUEUED_INSN (x));
516 return QUEUED_COPY (x);
517 }
518
519 /* Return nonzero if X contains a QUEUED expression:
520 if it contains anything that will be altered by a queued increment.
521 We handle only combinations of MEM, PLUS, MINUS and MULT operators
522 since memory addresses generally contain only those. */
523
524 static int
525 queued_subexp_p (x)
526 rtx x;
527 {
528 register enum rtx_code code = GET_CODE (x);
529 switch (code)
530 {
531 case QUEUED:
532 return 1;
533 case MEM:
534 return queued_subexp_p (XEXP (x, 0));
535 case MULT:
536 case PLUS:
537 case MINUS:
538 return queued_subexp_p (XEXP (x, 0))
539 || queued_subexp_p (XEXP (x, 1));
540 }
541 return 0;
542 }
543
544 /* Perform all the pending incrementations. */
545
546 void
547 emit_queue ()
548 {
549 register rtx p;
550 while (p = pending_chain)
551 {
552 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
553 pending_chain = QUEUED_NEXT (p);
554 }
555 }
556
557 static void
558 init_queue ()
559 {
560 if (pending_chain)
561 abort ();
562 }
563 \f
564 /* Copy data from FROM to TO, where the machine modes are not the same.
565 Both modes may be integer, or both may be floating.
566 UNSIGNEDP should be nonzero if FROM is an unsigned type.
567 This causes zero-extension instead of sign-extension. */
568
569 void
570 convert_move (to, from, unsignedp)
571 register rtx to, from;
572 int unsignedp;
573 {
574 enum machine_mode to_mode = GET_MODE (to);
575 enum machine_mode from_mode = GET_MODE (from);
576 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
577 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
578 enum insn_code code;
579 rtx libcall;
580
581 /* rtx code for making an equivalent value. */
582 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
583
584 to = protect_from_queue (to, 1);
585 from = protect_from_queue (from, 0);
586
587 if (to_real != from_real)
588 abort ();
589
590 /* If FROM is a SUBREG that indicates that we have already done at least
591 the required extension, strip it. We don't handle such SUBREGs as
592 TO here. */
593
594 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
595 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
596 >= GET_MODE_SIZE (to_mode))
597 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
598 from = gen_lowpart (to_mode, from), from_mode = to_mode;
599
600 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
601 abort ();
602
603 if (to_mode == from_mode
604 || (from_mode == VOIDmode && CONSTANT_P (from)))
605 {
606 emit_move_insn (to, from);
607 return;
608 }
609
610 if (to_real)
611 {
612 rtx value;
613
614 #ifdef HAVE_extendqfhf2
615 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
616 {
617 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
618 return;
619 }
620 #endif
621 #ifdef HAVE_extendqfsf2
622 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
623 {
624 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
625 return;
626 }
627 #endif
628 #ifdef HAVE_extendqfdf2
629 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
630 {
631 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
632 return;
633 }
634 #endif
635 #ifdef HAVE_extendqfxf2
636 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
637 {
638 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
639 return;
640 }
641 #endif
642 #ifdef HAVE_extendqftf2
643 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
644 {
645 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649
650 #ifdef HAVE_extendhftqf2
651 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
652 {
653 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657
658 #ifdef HAVE_extendhfsf2
659 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
660 {
661 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_extendhfdf2
666 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
667 {
668 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672 #ifdef HAVE_extendhfxf2
673 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
674 {
675 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679 #ifdef HAVE_extendhftf2
680 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
681 {
682 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
683 return;
684 }
685 #endif
686
687 #ifdef HAVE_extendsfdf2
688 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
689 {
690 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_extendsfxf2
695 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
696 {
697 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_extendsftf2
702 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
703 {
704 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708 #ifdef HAVE_extenddfxf2
709 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
710 {
711 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
712 return;
713 }
714 #endif
715 #ifdef HAVE_extenddftf2
716 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
717 {
718 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
719 return;
720 }
721 #endif
722
723 #ifdef HAVE_trunchfqf2
724 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
725 {
726 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
727 return;
728 }
729 #endif
730 #ifdef HAVE_truncsfqf2
731 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
732 {
733 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
734 return;
735 }
736 #endif
737 #ifdef HAVE_truncdfqf2
738 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
739 {
740 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
741 return;
742 }
743 #endif
744 #ifdef HAVE_truncxfqf2
745 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
746 {
747 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
748 return;
749 }
750 #endif
751 #ifdef HAVE_trunctfqf2
752 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
753 {
754 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
755 return;
756 }
757 #endif
758
759 #ifdef HAVE_trunctqfhf2
760 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
761 {
762 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
763 return;
764 }
765 #endif
766 #ifdef HAVE_truncsfhf2
767 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
768 {
769 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
770 return;
771 }
772 #endif
773 #ifdef HAVE_truncdfhf2
774 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
775 {
776 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
777 return;
778 }
779 #endif
780 #ifdef HAVE_truncxfhf2
781 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
782 {
783 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
784 return;
785 }
786 #endif
787 #ifdef HAVE_trunctfhf2
788 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
789 {
790 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
791 return;
792 }
793 #endif
794 #ifdef HAVE_truncdfsf2
795 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
796 {
797 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
798 return;
799 }
800 #endif
801 #ifdef HAVE_truncxfsf2
802 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
803 {
804 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
805 return;
806 }
807 #endif
808 #ifdef HAVE_trunctfsf2
809 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
810 {
811 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
812 return;
813 }
814 #endif
815 #ifdef HAVE_truncxfdf2
816 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
817 {
818 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
819 return;
820 }
821 #endif
822 #ifdef HAVE_trunctfdf2
823 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
824 {
825 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
826 return;
827 }
828 #endif
829
830 libcall = (rtx) 0;
831 switch (from_mode)
832 {
833 case SFmode:
834 switch (to_mode)
835 {
836 case DFmode:
837 libcall = extendsfdf2_libfunc;
838 break;
839
840 case XFmode:
841 libcall = extendsfxf2_libfunc;
842 break;
843
844 case TFmode:
845 libcall = extendsftf2_libfunc;
846 break;
847 }
848 break;
849
850 case DFmode:
851 switch (to_mode)
852 {
853 case SFmode:
854 libcall = truncdfsf2_libfunc;
855 break;
856
857 case XFmode:
858 libcall = extenddfxf2_libfunc;
859 break;
860
861 case TFmode:
862 libcall = extenddftf2_libfunc;
863 break;
864 }
865 break;
866
867 case XFmode:
868 switch (to_mode)
869 {
870 case SFmode:
871 libcall = truncxfsf2_libfunc;
872 break;
873
874 case DFmode:
875 libcall = truncxfdf2_libfunc;
876 break;
877 }
878 break;
879
880 case TFmode:
881 switch (to_mode)
882 {
883 case SFmode:
884 libcall = trunctfsf2_libfunc;
885 break;
886
887 case DFmode:
888 libcall = trunctfdf2_libfunc;
889 break;
890 }
891 break;
892 }
893
894 if (libcall == (rtx) 0)
895 /* This conversion is not implemented yet. */
896 abort ();
897
898 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
899 1, from, from_mode);
900 emit_move_insn (to, value);
901 return;
902 }
903
904 /* Now both modes are integers. */
905
906 /* Handle expanding beyond a word. */
907 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
908 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
909 {
910 rtx insns;
911 rtx lowpart;
912 rtx fill_value;
913 rtx lowfrom;
914 int i;
915 enum machine_mode lowpart_mode;
916 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
917
918 /* Try converting directly if the insn is supported. */
919 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
920 != CODE_FOR_nothing)
921 {
922 /* If FROM is a SUBREG, put it into a register. Do this
923 so that we always generate the same set of insns for
924 better cse'ing; if an intermediate assignment occurred,
925 we won't be doing the operation directly on the SUBREG. */
926 if (optimize > 0 && GET_CODE (from) == SUBREG)
927 from = force_reg (from_mode, from);
928 emit_unop_insn (code, to, from, equiv_code);
929 return;
930 }
931 /* Next, try converting via full word. */
932 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
933 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
934 != CODE_FOR_nothing))
935 {
936 if (GET_CODE (to) == REG)
937 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
938 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
939 emit_unop_insn (code, to,
940 gen_lowpart (word_mode, to), equiv_code);
941 return;
942 }
943
944 /* No special multiword conversion insn; do it by hand. */
945 start_sequence ();
946
947 /* Since we will turn this into a no conflict block, we must ensure
948 that the source does not overlap the target. */
949
950 if (reg_overlap_mentioned_p (to, from))
951 from = force_reg (from_mode, from);
952
953 /* Get a copy of FROM widened to a word, if necessary. */
954 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
955 lowpart_mode = word_mode;
956 else
957 lowpart_mode = from_mode;
958
959 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
960
961 lowpart = gen_lowpart (lowpart_mode, to);
962 emit_move_insn (lowpart, lowfrom);
963
964 /* Compute the value to put in each remaining word. */
965 if (unsignedp)
966 fill_value = const0_rtx;
967 else
968 {
969 #ifdef HAVE_slt
970 if (HAVE_slt
971 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
972 && STORE_FLAG_VALUE == -1)
973 {
974 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
975 lowpart_mode, 0, 0);
976 fill_value = gen_reg_rtx (word_mode);
977 emit_insn (gen_slt (fill_value));
978 }
979 else
980 #endif
981 {
982 fill_value
983 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
984 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
985 NULL_RTX, 0);
986 fill_value = convert_to_mode (word_mode, fill_value, 1);
987 }
988 }
989
990 /* Fill the remaining words. */
991 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
992 {
993 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
994 rtx subword = operand_subword (to, index, 1, to_mode);
995
996 if (subword == 0)
997 abort ();
998
999 if (fill_value != subword)
1000 emit_move_insn (subword, fill_value);
1001 }
1002
1003 insns = get_insns ();
1004 end_sequence ();
1005
1006 emit_no_conflict_block (insns, to, from, NULL_RTX,
1007 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
1008 return;
1009 }
1010
1011 /* Truncating multi-word to a word or less. */
1012 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
1013 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
1014 {
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
1022 convert_move (to, gen_lowpart (word_mode, from), 0);
1023 return;
1024 }
1025
1026 /* Handle pointer conversion */ /* SPEE 900220 */
1027 if (to_mode == PSImode)
1028 {
1029 if (from_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1031
1032 #ifdef HAVE_truncsipsi2
1033 if (HAVE_truncsipsi2)
1034 {
1035 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1036 return;
1037 }
1038 #endif /* HAVE_truncsipsi2 */
1039 abort ();
1040 }
1041
1042 if (from_mode == PSImode)
1043 {
1044 if (to_mode != SImode)
1045 {
1046 from = convert_to_mode (SImode, from, unsignedp);
1047 from_mode = SImode;
1048 }
1049 else
1050 {
1051 #ifdef HAVE_extendpsisi2
1052 if (HAVE_extendpsisi2)
1053 {
1054 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1055 return;
1056 }
1057 #endif /* HAVE_extendpsisi2 */
1058 abort ();
1059 }
1060 }
1061
1062 if (to_mode == PDImode)
1063 {
1064 if (from_mode != DImode)
1065 from = convert_to_mode (DImode, from, unsignedp);
1066
1067 #ifdef HAVE_truncdipdi2
1068 if (HAVE_truncdipdi2)
1069 {
1070 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1071 return;
1072 }
1073 #endif /* HAVE_truncdipdi2 */
1074 abort ();
1075 }
1076
1077 if (from_mode == PDImode)
1078 {
1079 if (to_mode != DImode)
1080 {
1081 from = convert_to_mode (DImode, from, unsignedp);
1082 from_mode = DImode;
1083 }
1084 else
1085 {
1086 #ifdef HAVE_extendpdidi2
1087 if (HAVE_extendpdidi2)
1088 {
1089 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1090 return;
1091 }
1092 #endif /* HAVE_extendpdidi2 */
1093 abort ();
1094 }
1095 }
1096
1097 /* Now follow all the conversions between integers
1098 no more than a word long. */
1099
1100 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1101 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1102 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1103 GET_MODE_BITSIZE (from_mode)))
1104 {
1105 if (!((GET_CODE (from) == MEM
1106 && ! MEM_VOLATILE_P (from)
1107 && direct_load[(int) to_mode]
1108 && ! mode_dependent_address_p (XEXP (from, 0)))
1109 || GET_CODE (from) == REG
1110 || GET_CODE (from) == SUBREG))
1111 from = force_reg (from_mode, from);
1112 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1113 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1114 from = copy_to_reg (from);
1115 emit_move_insn (to, gen_lowpart (to_mode, from));
1116 return;
1117 }
1118
1119 /* Handle extension. */
1120 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1121 {
1122 /* Convert directly if that works. */
1123 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1124 != CODE_FOR_nothing)
1125 {
1126 emit_unop_insn (code, to, from, equiv_code);
1127 return;
1128 }
1129 else
1130 {
1131 enum machine_mode intermediate;
1132
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1140 && (can_extend_p (intermediate, from_mode, unsignedp)
1141 != CODE_FOR_nothing))
1142 {
1143 convert_move (to, convert_to_mode (intermediate, from,
1144 unsignedp), unsignedp);
1145 return;
1146 }
1147
1148 /* No suitable intermediate mode. */
1149 abort ();
1150 }
1151 }
1152
1153 /* Support special truncate insns for certain modes. */
1154
1155 if (from_mode == DImode && to_mode == SImode)
1156 {
1157 #ifdef HAVE_truncdisi2
1158 if (HAVE_truncdisi2)
1159 {
1160 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1161 return;
1162 }
1163 #endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1166 }
1167
1168 if (from_mode == DImode && to_mode == HImode)
1169 {
1170 #ifdef HAVE_truncdihi2
1171 if (HAVE_truncdihi2)
1172 {
1173 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1174 return;
1175 }
1176 #endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1179 }
1180
1181 if (from_mode == DImode && to_mode == QImode)
1182 {
1183 #ifdef HAVE_truncdiqi2
1184 if (HAVE_truncdiqi2)
1185 {
1186 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1187 return;
1188 }
1189 #endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1192 }
1193
1194 if (from_mode == SImode && to_mode == HImode)
1195 {
1196 #ifdef HAVE_truncsihi2
1197 if (HAVE_truncsihi2)
1198 {
1199 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1200 return;
1201 }
1202 #endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1205 }
1206
1207 if (from_mode == SImode && to_mode == QImode)
1208 {
1209 #ifdef HAVE_truncsiqi2
1210 if (HAVE_truncsiqi2)
1211 {
1212 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1213 return;
1214 }
1215 #endif
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 return;
1218 }
1219
1220 if (from_mode == HImode && to_mode == QImode)
1221 {
1222 #ifdef HAVE_trunchiqi2
1223 if (HAVE_trunchiqi2)
1224 {
1225 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1226 return;
1227 }
1228 #endif
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 return;
1231 }
1232
1233 if (from_mode == TImode && to_mode == DImode)
1234 {
1235 #ifdef HAVE_trunctidi2
1236 if (HAVE_trunctidi2)
1237 {
1238 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1239 return;
1240 }
1241 #endif
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 return;
1244 }
1245
1246 if (from_mode == TImode && to_mode == SImode)
1247 {
1248 #ifdef HAVE_trunctisi2
1249 if (HAVE_trunctisi2)
1250 {
1251 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1252 return;
1253 }
1254 #endif
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1256 return;
1257 }
1258
1259 if (from_mode == TImode && to_mode == HImode)
1260 {
1261 #ifdef HAVE_trunctihi2
1262 if (HAVE_trunctihi2)
1263 {
1264 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1265 return;
1266 }
1267 #endif
1268 convert_move (to, force_reg (from_mode, from), unsignedp);
1269 return;
1270 }
1271
1272 if (from_mode == TImode && to_mode == QImode)
1273 {
1274 #ifdef HAVE_trunctiqi2
1275 if (HAVE_trunctiqi2)
1276 {
1277 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1278 return;
1279 }
1280 #endif
1281 convert_move (to, force_reg (from_mode, from), unsignedp);
1282 return;
1283 }
1284
1285 /* Handle truncation of volatile memrefs, and so on;
1286 the things that couldn't be truncated directly,
1287 and for which there was no special instruction. */
1288 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1289 {
1290 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1291 emit_move_insn (to, temp);
1292 return;
1293 }
1294
1295 /* Mode combination is not recognized. */
1296 abort ();
1297 }
1298
1299 /* Return an rtx for a value that would result
1300 from converting X to mode MODE.
1301 Both X and MODE may be floating, or both integer.
1302 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
1304 or by copying to a new temporary with conversion.
1305
1306 This function *must not* call protect_from_queue
1307 except when putting X into an insn (in which case convert_move does it). */
1308
1309 rtx
1310 convert_to_mode (mode, x, unsignedp)
1311 enum machine_mode mode;
1312 rtx x;
1313 int unsignedp;
1314 {
1315 return convert_modes (mode, VOIDmode, x, unsignedp);
1316 }
1317
1318 /* Return an rtx for a value that would result
1319 from converting X from mode OLDMODE to mode MODE.
1320 Both modes may be floating, or both integer.
1321 UNSIGNEDP is nonzero if X is an unsigned value.
1322
1323 This can be done by referring to a part of X in place
1324 or by copying to a new temporary with conversion.
1325
1326 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1327
1328 This function *must not* call protect_from_queue
1329 except when putting X into an insn (in which case convert_move does it). */
1330
1331 rtx
1332 convert_modes (mode, oldmode, x, unsignedp)
1333 enum machine_mode mode, oldmode;
1334 rtx x;
1335 int unsignedp;
1336 {
1337 register rtx temp;
1338
1339 /* If FROM is a SUBREG that indicates that we have already done at least
1340 the required extension, strip it. */
1341
1342 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1343 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1344 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1345 x = gen_lowpart (mode, x);
1346
1347 if (GET_MODE (x) != VOIDmode)
1348 oldmode = GET_MODE (x);
1349
1350 if (mode == oldmode)
1351 return x;
1352
1353 /* There is one case that we must handle specially: If we are converting
1354 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1355 we are to interpret the constant as unsigned, gen_lowpart will do
1356 the wrong if the constant appears negative. What we want to do is
1357 make the high-order word of the constant zero, not all ones. */
1358
1359 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1360 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1361 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1362 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1363
1364 /* We can do this with a gen_lowpart if both desired and current modes
1365 are integer, and this is either a constant integer, a register, or a
1366 non-volatile MEM. Except for the constant case where MODE is no
1367 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1368
1369 if ((GET_CODE (x) == CONST_INT
1370 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1371 || (GET_MODE_CLASS (mode) == MODE_INT
1372 && GET_MODE_CLASS (oldmode) == MODE_INT
1373 && (GET_CODE (x) == CONST_DOUBLE
1374 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1375 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1376 && direct_load[(int) mode])
1377 || (GET_CODE (x) == REG
1378 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1379 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1380 {
1381 /* ?? If we don't know OLDMODE, we have to assume here that
1382 X does not need sign- or zero-extension. This may not be
1383 the case, but it's the best we can do. */
1384 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1385 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1386 {
1387 HOST_WIDE_INT val = INTVAL (x);
1388 int width = GET_MODE_BITSIZE (oldmode);
1389
1390 /* We must sign or zero-extend in this case. Start by
1391 zero-extending, then sign extend if we need to. */
1392 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1393 if (! unsignedp
1394 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1395 val |= (HOST_WIDE_INT) (-1) << width;
1396
1397 return GEN_INT (val);
1398 }
1399
1400 return gen_lowpart (mode, x);
1401 }
1402
1403 temp = gen_reg_rtx (mode);
1404 convert_move (temp, x, unsignedp);
1405 return temp;
1406 }
1407 \f
1408 /* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN (in bytes) is maximum alignment we can assume. */
1413
1414 static void
1415 move_by_pieces (to, from, len, align)
1416 rtx to, from;
1417 int len, align;
1418 {
1419 struct move_by_pieces data;
1420 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1421 int max_size = MOVE_MAX + 1;
1422
1423 data.offset = 0;
1424 data.to_addr = to_addr;
1425 data.from_addr = from_addr;
1426 data.to = to;
1427 data.from = from;
1428 data.autinc_to
1429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1431 data.autinc_from
1432 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1433 || GET_CODE (from_addr) == POST_INC
1434 || GET_CODE (from_addr) == POST_DEC);
1435
1436 data.explicit_inc_from = 0;
1437 data.explicit_inc_to = 0;
1438 data.reverse
1439 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1440 if (data.reverse) data.offset = len;
1441 data.len = len;
1442
1443 data.to_struct = MEM_IN_STRUCT_P (to);
1444 data.from_struct = MEM_IN_STRUCT_P (from);
1445
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data.autinc_from && data.autinc_to)
1450 && move_by_pieces_ninsns (len, align) > 2)
1451 {
1452 #ifdef HAVE_PRE_DECREMENT
1453 if (data.reverse && ! data.autinc_from)
1454 {
1455 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1456 data.autinc_from = 1;
1457 data.explicit_inc_from = -1;
1458 }
1459 #endif
1460 #ifdef HAVE_POST_INCREMENT
1461 if (! data.autinc_from)
1462 {
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = 1;
1466 }
1467 #endif
1468 if (!data.autinc_from && CONSTANT_P (from_addr))
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 #ifdef HAVE_PRE_DECREMENT
1471 if (data.reverse && ! data.autinc_to)
1472 {
1473 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = -1;
1476 }
1477 #endif
1478 #ifdef HAVE_POST_INCREMENT
1479 if (! data.reverse && ! data.autinc_to)
1480 {
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1484 }
1485 #endif
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1488 }
1489
1490 if (! SLOW_UNALIGNED_ACCESS
1491 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1492 align = MOVE_MAX;
1493
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1496
1497 while (max_size > 1)
1498 {
1499 enum machine_mode mode = VOIDmode, tmode;
1500 enum insn_code icode;
1501
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
1505 mode = tmode;
1506
1507 if (mode == VOIDmode)
1508 break;
1509
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing
1512 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1513 GET_MODE_SIZE (mode)))
1514 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1515
1516 max_size = GET_MODE_SIZE (mode);
1517 }
1518
1519 /* The code above should have handled everything. */
1520 if (data.len != 0)
1521 abort ();
1522 }
1523
1524 /* Return number of insns required to move L bytes by pieces.
1525 ALIGN (in bytes) is maximum alignment we can assume. */
1526
1527 static int
1528 move_by_pieces_ninsns (l, align)
1529 unsigned int l;
1530 int align;
1531 {
1532 register int n_insns = 0;
1533 int max_size = MOVE_MAX + 1;
1534
1535 if (! SLOW_UNALIGNED_ACCESS
1536 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1537 align = MOVE_MAX;
1538
1539 while (max_size > 1)
1540 {
1541 enum machine_mode mode = VOIDmode, tmode;
1542 enum insn_code icode;
1543
1544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1546 if (GET_MODE_SIZE (tmode) < max_size)
1547 mode = tmode;
1548
1549 if (mode == VOIDmode)
1550 break;
1551
1552 icode = mov_optab->handlers[(int) mode].insn_code;
1553 if (icode != CODE_FOR_nothing
1554 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1555 GET_MODE_SIZE (mode)))
1556 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1557
1558 max_size = GET_MODE_SIZE (mode);
1559 }
1560
1561 return n_insns;
1562 }
1563
1564 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1565 with move instructions for mode MODE. GENFUN is the gen_... function
1566 to make a move insn for that mode. DATA has all the other info. */
1567
1568 static void
1569 move_by_pieces_1 (genfun, mode, data)
1570 rtx (*genfun) ();
1571 enum machine_mode mode;
1572 struct move_by_pieces *data;
1573 {
1574 register int size = GET_MODE_SIZE (mode);
1575 register rtx to1, from1;
1576
1577 while (data->len >= size)
1578 {
1579 if (data->reverse) data->offset -= size;
1580
1581 to1 = (data->autinc_to
1582 ? gen_rtx (MEM, mode, data->to_addr)
1583 : change_address (data->to, mode,
1584 plus_constant (data->to_addr, data->offset)));
1585 MEM_IN_STRUCT_P (to1) = data->to_struct;
1586 from1 =
1587 (data->autinc_from
1588 ? gen_rtx (MEM, mode, data->from_addr)
1589 : change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset)));
1591 MEM_IN_STRUCT_P (from1) = data->from_struct;
1592
1593 #ifdef HAVE_PRE_DECREMENT
1594 if (data->explicit_inc_to < 0)
1595 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1596 if (data->explicit_inc_from < 0)
1597 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1598 #endif
1599
1600 emit_insn ((*genfun) (to1, from1));
1601 #ifdef HAVE_POST_INCREMENT
1602 if (data->explicit_inc_to > 0)
1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1604 if (data->explicit_inc_from > 0)
1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1606 #endif
1607
1608 if (! data->reverse) data->offset += size;
1609
1610 data->len -= size;
1611 }
1612 }
1613 \f
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1617
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 with mode BLKmode.
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have,
1622 measured in bytes. */
1623
1624 void
1625 emit_block_move (x, y, size, align)
1626 rtx x, y;
1627 rtx size;
1628 int align;
1629 {
1630 if (GET_MODE (x) != BLKmode)
1631 abort ();
1632
1633 if (GET_MODE (y) != BLKmode)
1634 abort ();
1635
1636 x = protect_from_queue (x, 1);
1637 y = protect_from_queue (y, 0);
1638 size = protect_from_queue (size, 0);
1639
1640 if (GET_CODE (x) != MEM)
1641 abort ();
1642 if (GET_CODE (y) != MEM)
1643 abort ();
1644 if (size == 0)
1645 abort ();
1646
1647 if (GET_CODE (size) == CONST_INT
1648 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1649 move_by_pieces (x, y, INTVAL (size), align);
1650 else
1651 {
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1655
1656 rtx opalign = GEN_INT (align);
1657 enum machine_mode mode;
1658
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
1661 {
1662 enum insn_code code = movstr_optab[(int) mode];
1663
1664 if (code != CODE_FOR_nothing
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
1667 returned by the macro, it will definitely be less than the
1668 actual mode mask. */
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1671 <= GET_MODE_MASK (mode)))
1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1673 && (insn_operand_predicate[(int) code][0] == 0
1674 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1675 && (insn_operand_predicate[(int) code][1] == 0
1676 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1677 && (insn_operand_predicate[(int) code][3] == 0
1678 || (*insn_operand_predicate[(int) code][3]) (opalign,
1679 VOIDmode)))
1680 {
1681 rtx op2;
1682 rtx last = get_last_insn ();
1683 rtx pat;
1684
1685 op2 = convert_to_mode (mode, size, 1);
1686 if (insn_operand_predicate[(int) code][2] != 0
1687 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1688 op2 = copy_to_mode_reg (mode, op2);
1689
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1691 if (pat)
1692 {
1693 emit_insn (pat);
1694 return;
1695 }
1696 else
1697 delete_insns_since (last);
1698 }
1699 }
1700
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 emit_library_call (memcpy_libfunc, 0,
1703 VOIDmode, 3, XEXP (x, 0), Pmode,
1704 XEXP (y, 0), Pmode,
1705 convert_to_mode (TYPE_MODE (sizetype), size,
1706 TREE_UNSIGNED (sizetype)),
1707 TYPE_MODE (sizetype));
1708 #else
1709 emit_library_call (bcopy_libfunc, 0,
1710 VOIDmode, 3, XEXP (y, 0), Pmode,
1711 XEXP (x, 0), Pmode,
1712 convert_to_mode (TYPE_MODE (integer_type_node), size,
1713 TREE_UNSIGNED (integer_type_node)),
1714 TYPE_MODE (integer_type_node));
1715 #endif
1716 }
1717 }
1718 \f
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1721
1722 void
1723 move_block_to_reg (regno, x, nregs, mode)
1724 int regno;
1725 rtx x;
1726 int nregs;
1727 enum machine_mode mode;
1728 {
1729 int i;
1730 rtx pat, last;
1731
1732 if (nregs == 0)
1733 return;
1734
1735 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1736 x = validize_mem (force_const_mem (mode, x));
1737
1738 /* See if the machine can do this with a load multiple insn. */
1739 #ifdef HAVE_load_multiple
1740 if (HAVE_load_multiple)
1741 {
1742 last = get_last_insn ();
1743 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1744 GEN_INT (nregs));
1745 if (pat)
1746 {
1747 emit_insn (pat);
1748 return;
1749 }
1750 else
1751 delete_insns_since (last);
1752 }
1753 #endif
1754
1755 for (i = 0; i < nregs; i++)
1756 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1757 operand_subword_force (x, i, mode));
1758 }
1759
1760 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1761 The number of registers to be filled is NREGS. SIZE indicates the number
1762 of bytes in the object X. */
1763
1764
1765 void
1766 move_block_from_reg (regno, x, nregs, size)
1767 int regno;
1768 rtx x;
1769 int nregs;
1770 int size;
1771 {
1772 int i;
1773 rtx pat, last;
1774
1775 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1776 to the left before storing to memory. */
1777 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1778 {
1779 rtx tem = operand_subword (x, 0, 1, BLKmode);
1780 rtx shift;
1781
1782 if (tem == 0)
1783 abort ();
1784
1785 shift = expand_shift (LSHIFT_EXPR, word_mode,
1786 gen_rtx (REG, word_mode, regno),
1787 build_int_2 ((UNITS_PER_WORD - size)
1788 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1789 emit_move_insn (tem, shift);
1790 return;
1791 }
1792
1793 /* See if the machine can do this with a store multiple insn. */
1794 #ifdef HAVE_store_multiple
1795 if (HAVE_store_multiple)
1796 {
1797 last = get_last_insn ();
1798 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1799 GEN_INT (nregs));
1800 if (pat)
1801 {
1802 emit_insn (pat);
1803 return;
1804 }
1805 else
1806 delete_insns_since (last);
1807 }
1808 #endif
1809
1810 for (i = 0; i < nregs; i++)
1811 {
1812 rtx tem = operand_subword (x, i, 1, BLKmode);
1813
1814 if (tem == 0)
1815 abort ();
1816
1817 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1818 }
1819 }
1820
1821 /* Add a USE expression for REG to the (possibly empty) list pointed
1822 to by CALL_FUSAGE. REG must denote a hard register. */
1823
1824 void
1825 use_reg (call_fusage, reg)
1826 rtx *call_fusage, reg;
1827 {
1828 if (GET_CODE (reg) != REG
1829 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1830 abort();
1831
1832 *call_fusage
1833 = gen_rtx (EXPR_LIST, VOIDmode,
1834 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1835 }
1836
1837 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1838 starting at REGNO. All of these registers must be hard registers. */
1839
1840 void
1841 use_regs (call_fusage, regno, nregs)
1842 rtx *call_fusage;
1843 int regno;
1844 int nregs;
1845 {
1846 int i;
1847
1848 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1849 abort ();
1850
1851 for (i = 0; i < nregs; i++)
1852 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1853 }
1854 \f
1855 /* Generate several move instructions to clear LEN bytes of block TO.
1856 (A MEM rtx with BLKmode). The caller must pass TO through
1857 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1858 we can assume. */
1859
1860 static void
1861 clear_by_pieces (to, len, align)
1862 rtx to;
1863 int len, align;
1864 {
1865 struct clear_by_pieces data;
1866 rtx to_addr = XEXP (to, 0);
1867 int max_size = MOVE_MAX + 1;
1868
1869 data.offset = 0;
1870 data.to_addr = to_addr;
1871 data.to = to;
1872 data.autinc_to
1873 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1874 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1875
1876 data.explicit_inc_to = 0;
1877 data.reverse
1878 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1879 if (data.reverse) data.offset = len;
1880 data.len = len;
1881
1882 data.to_struct = MEM_IN_STRUCT_P (to);
1883
1884 /* If copying requires more than two move insns,
1885 copy addresses to registers (to make displacements shorter)
1886 and use post-increment if available. */
1887 if (!data.autinc_to
1888 && move_by_pieces_ninsns (len, align) > 2)
1889 {
1890 #ifdef HAVE_PRE_DECREMENT
1891 if (data.reverse && ! data.autinc_to)
1892 {
1893 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1894 data.autinc_to = 1;
1895 data.explicit_inc_to = -1;
1896 }
1897 #endif
1898 #ifdef HAVE_POST_INCREMENT
1899 if (! data.reverse && ! data.autinc_to)
1900 {
1901 data.to_addr = copy_addr_to_reg (to_addr);
1902 data.autinc_to = 1;
1903 data.explicit_inc_to = 1;
1904 }
1905 #endif
1906 if (!data.autinc_to && CONSTANT_P (to_addr))
1907 data.to_addr = copy_addr_to_reg (to_addr);
1908 }
1909
1910 if (! SLOW_UNALIGNED_ACCESS
1911 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1912 align = MOVE_MAX;
1913
1914 /* First move what we can in the largest integer mode, then go to
1915 successively smaller modes. */
1916
1917 while (max_size > 1)
1918 {
1919 enum machine_mode mode = VOIDmode, tmode;
1920 enum insn_code icode;
1921
1922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1924 if (GET_MODE_SIZE (tmode) < max_size)
1925 mode = tmode;
1926
1927 if (mode == VOIDmode)
1928 break;
1929
1930 icode = mov_optab->handlers[(int) mode].insn_code;
1931 if (icode != CODE_FOR_nothing
1932 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1933 GET_MODE_SIZE (mode)))
1934 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1935
1936 max_size = GET_MODE_SIZE (mode);
1937 }
1938
1939 /* The code above should have handled everything. */
1940 if (data.len != 0)
1941 abort ();
1942 }
1943
1944 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1945 with move instructions for mode MODE. GENFUN is the gen_... function
1946 to make a move insn for that mode. DATA has all the other info. */
1947
1948 static void
1949 clear_by_pieces_1 (genfun, mode, data)
1950 rtx (*genfun) ();
1951 enum machine_mode mode;
1952 struct clear_by_pieces *data;
1953 {
1954 register int size = GET_MODE_SIZE (mode);
1955 register rtx to1;
1956
1957 while (data->len >= size)
1958 {
1959 if (data->reverse) data->offset -= size;
1960
1961 to1 = (data->autinc_to
1962 ? gen_rtx (MEM, mode, data->to_addr)
1963 : change_address (data->to, mode,
1964 plus_constant (data->to_addr, data->offset)));
1965 MEM_IN_STRUCT_P (to1) = data->to_struct;
1966
1967 #ifdef HAVE_PRE_DECREMENT
1968 if (data->explicit_inc_to < 0)
1969 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1970 #endif
1971
1972 emit_insn ((*genfun) (to1, const0_rtx));
1973 #ifdef HAVE_POST_INCREMENT
1974 if (data->explicit_inc_to > 0)
1975 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1976 #endif
1977
1978 if (! data->reverse) data->offset += size;
1979
1980 data->len -= size;
1981 }
1982 }
1983 \f
1984 /* Write zeros through the storage of OBJECT.
1985 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
1986 the maximum alignment we can is has, measured in bytes. */
1987
1988 void
1989 clear_storage (object, size, align)
1990 rtx object;
1991 rtx size;
1992 int align;
1993 {
1994 if (GET_MODE (object) == BLKmode)
1995 {
1996 object = protect_from_queue (object, 1);
1997 size = protect_from_queue (size, 0);
1998
1999 if (GET_CODE (size) == CONST_INT
2000 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2001 clear_by_pieces (object, INTVAL (size), align);
2002
2003 else
2004 {
2005 /* Try the most limited insn first, because there's no point
2006 including more than one in the machine description unless
2007 the more limited one has some advantage. */
2008
2009 rtx opalign = GEN_INT (align);
2010 enum machine_mode mode;
2011
2012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2013 mode = GET_MODE_WIDER_MODE (mode))
2014 {
2015 enum insn_code code = clrstr_optab[(int) mode];
2016
2017 if (code != CODE_FOR_nothing
2018 /* We don't need MODE to be narrower than
2019 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2020 the mode mask, as it is returned by the macro, it will
2021 definitely be less than the actual mode mask. */
2022 && ((GET_CODE (size) == CONST_INT
2023 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2024 <= GET_MODE_MASK (mode)))
2025 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2026 && (insn_operand_predicate[(int) code][0] == 0
2027 || (*insn_operand_predicate[(int) code][0]) (object,
2028 BLKmode))
2029 && (insn_operand_predicate[(int) code][2] == 0
2030 || (*insn_operand_predicate[(int) code][2]) (opalign,
2031 VOIDmode)))
2032 {
2033 rtx op1;
2034 rtx last = get_last_insn ();
2035 rtx pat;
2036
2037 op1 = convert_to_mode (mode, size, 1);
2038 if (insn_operand_predicate[(int) code][1] != 0
2039 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2040 mode))
2041 op1 = copy_to_mode_reg (mode, op1);
2042
2043 pat = GEN_FCN ((int) code) (object, op1, opalign);
2044 if (pat)
2045 {
2046 emit_insn (pat);
2047 return;
2048 }
2049 else
2050 delete_insns_since (last);
2051 }
2052 }
2053
2054
2055 #ifdef TARGET_MEM_FUNCTIONS
2056 emit_library_call (memset_libfunc, 0,
2057 VOIDmode, 3,
2058 XEXP (object, 0), Pmode,
2059 const0_rtx, TYPE_MODE (integer_type_node),
2060 convert_to_mode (TYPE_MODE (sizetype),
2061 size, TREE_UNSIGNED (sizetype)),
2062 TYPE_MODE (sizetype));
2063 #else
2064 emit_library_call (bzero_libfunc, 0,
2065 VOIDmode, 2,
2066 XEXP (object, 0), Pmode,
2067 convert_to_mode (TYPE_MODE (integer_type_node),
2068 size,
2069 TREE_UNSIGNED (integer_type_node)),
2070 TYPE_MODE (integer_type_node));
2071 #endif
2072 }
2073 }
2074 else
2075 emit_move_insn (object, const0_rtx);
2076 }
2077
2078 /* Generate code to copy Y into X.
2079 Both Y and X must have the same mode, except that
2080 Y can be a constant with VOIDmode.
2081 This mode cannot be BLKmode; use emit_block_move for that.
2082
2083 Return the last instruction emitted. */
2084
2085 rtx
2086 emit_move_insn (x, y)
2087 rtx x, y;
2088 {
2089 enum machine_mode mode = GET_MODE (x);
2090
2091 x = protect_from_queue (x, 1);
2092 y = protect_from_queue (y, 0);
2093
2094 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2095 abort ();
2096
2097 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2098 y = force_const_mem (mode, y);
2099
2100 /* If X or Y are memory references, verify that their addresses are valid
2101 for the machine. */
2102 if (GET_CODE (x) == MEM
2103 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2104 && ! push_operand (x, GET_MODE (x)))
2105 || (flag_force_addr
2106 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2107 x = change_address (x, VOIDmode, XEXP (x, 0));
2108
2109 if (GET_CODE (y) == MEM
2110 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2111 || (flag_force_addr
2112 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2113 y = change_address (y, VOIDmode, XEXP (y, 0));
2114
2115 if (mode == BLKmode)
2116 abort ();
2117
2118 return emit_move_insn_1 (x, y);
2119 }
2120
2121 /* Low level part of emit_move_insn.
2122 Called just like emit_move_insn, but assumes X and Y
2123 are basically valid. */
2124
2125 rtx
2126 emit_move_insn_1 (x, y)
2127 rtx x, y;
2128 {
2129 enum machine_mode mode = GET_MODE (x);
2130 enum machine_mode submode;
2131 enum mode_class class = GET_MODE_CLASS (mode);
2132 int i;
2133
2134 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2135 return
2136 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2137
2138 /* Expand complex moves by moving real part and imag part, if possible. */
2139 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2140 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2141 * BITS_PER_UNIT),
2142 (class == MODE_COMPLEX_INT
2143 ? MODE_INT : MODE_FLOAT),
2144 0))
2145 && (mov_optab->handlers[(int) submode].insn_code
2146 != CODE_FOR_nothing))
2147 {
2148 /* Don't split destination if it is a stack push. */
2149 int stack = push_operand (x, GET_MODE (x));
2150 rtx insns;
2151
2152 /* If this is a stack, push the highpart first, so it
2153 will be in the argument order.
2154
2155 In that case, change_address is used only to convert
2156 the mode, not to change the address. */
2157 if (stack)
2158 {
2159 /* Note that the real part always precedes the imag part in memory
2160 regardless of machine's endianness. */
2161 #ifdef STACK_GROWS_DOWNWARD
2162 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2163 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2164 gen_imagpart (submode, y)));
2165 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2166 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2167 gen_realpart (submode, y)));
2168 #else
2169 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2170 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2171 gen_realpart (submode, y)));
2172 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2173 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2174 gen_imagpart (submode, y)));
2175 #endif
2176 }
2177 else
2178 {
2179 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2180 (gen_realpart (submode, x), gen_realpart (submode, y)));
2181 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2182 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2183 }
2184
2185 return get_last_insn ();
2186 }
2187
2188 /* This will handle any multi-word mode that lacks a move_insn pattern.
2189 However, you will get better code if you define such patterns,
2190 even if they must turn into multiple assembler instructions. */
2191 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2192 {
2193 rtx last_insn = 0;
2194 rtx insns;
2195
2196 #ifdef PUSH_ROUNDING
2197
2198 /* If X is a push on the stack, do the push now and replace
2199 X with a reference to the stack pointer. */
2200 if (push_operand (x, GET_MODE (x)))
2201 {
2202 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2203 x = change_address (x, VOIDmode, stack_pointer_rtx);
2204 }
2205 #endif
2206
2207 /* Show the output dies here. */
2208 if (x != y)
2209 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2210
2211 for (i = 0;
2212 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2213 i++)
2214 {
2215 rtx xpart = operand_subword (x, i, 1, mode);
2216 rtx ypart = operand_subword (y, i, 1, mode);
2217
2218 /* If we can't get a part of Y, put Y into memory if it is a
2219 constant. Otherwise, force it into a register. If we still
2220 can't get a part of Y, abort. */
2221 if (ypart == 0 && CONSTANT_P (y))
2222 {
2223 y = force_const_mem (mode, y);
2224 ypart = operand_subword (y, i, 1, mode);
2225 }
2226 else if (ypart == 0)
2227 ypart = operand_subword_force (y, i, mode);
2228
2229 if (xpart == 0 || ypart == 0)
2230 abort ();
2231
2232 last_insn = emit_move_insn (xpart, ypart);
2233 }
2234
2235 return last_insn;
2236 }
2237 else
2238 abort ();
2239 }
2240 \f
2241 /* Pushing data onto the stack. */
2242
2243 /* Push a block of length SIZE (perhaps variable)
2244 and return an rtx to address the beginning of the block.
2245 Note that it is not possible for the value returned to be a QUEUED.
2246 The value may be virtual_outgoing_args_rtx.
2247
2248 EXTRA is the number of bytes of padding to push in addition to SIZE.
2249 BELOW nonzero means this padding comes at low addresses;
2250 otherwise, the padding comes at high addresses. */
2251
2252 rtx
2253 push_block (size, extra, below)
2254 rtx size;
2255 int extra, below;
2256 {
2257 register rtx temp;
2258
2259 size = convert_modes (Pmode, ptr_mode, size, 1);
2260 if (CONSTANT_P (size))
2261 anti_adjust_stack (plus_constant (size, extra));
2262 else if (GET_CODE (size) == REG && extra == 0)
2263 anti_adjust_stack (size);
2264 else
2265 {
2266 rtx temp = copy_to_mode_reg (Pmode, size);
2267 if (extra != 0)
2268 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2269 temp, 0, OPTAB_LIB_WIDEN);
2270 anti_adjust_stack (temp);
2271 }
2272
2273 #ifdef STACK_GROWS_DOWNWARD
2274 temp = virtual_outgoing_args_rtx;
2275 if (extra != 0 && below)
2276 temp = plus_constant (temp, extra);
2277 #else
2278 if (GET_CODE (size) == CONST_INT)
2279 temp = plus_constant (virtual_outgoing_args_rtx,
2280 - INTVAL (size) - (below ? 0 : extra));
2281 else if (extra != 0 && !below)
2282 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2283 negate_rtx (Pmode, plus_constant (size, extra)));
2284 else
2285 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2286 negate_rtx (Pmode, size));
2287 #endif
2288
2289 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2290 }
2291
2292 rtx
2293 gen_push_operand ()
2294 {
2295 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2296 }
2297
2298 /* Generate code to push X onto the stack, assuming it has mode MODE and
2299 type TYPE.
2300 MODE is redundant except when X is a CONST_INT (since they don't
2301 carry mode info).
2302 SIZE is an rtx for the size of data to be copied (in bytes),
2303 needed only if X is BLKmode.
2304
2305 ALIGN (in bytes) is maximum alignment we can assume.
2306
2307 If PARTIAL and REG are both nonzero, then copy that many of the first
2308 words of X into registers starting with REG, and push the rest of X.
2309 The amount of space pushed is decreased by PARTIAL words,
2310 rounded *down* to a multiple of PARM_BOUNDARY.
2311 REG must be a hard register in this case.
2312 If REG is zero but PARTIAL is not, take any all others actions for an
2313 argument partially in registers, but do not actually load any
2314 registers.
2315
2316 EXTRA is the amount in bytes of extra space to leave next to this arg.
2317 This is ignored if an argument block has already been allocated.
2318
2319 On a machine that lacks real push insns, ARGS_ADDR is the address of
2320 the bottom of the argument block for this call. We use indexing off there
2321 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2322 argument block has not been preallocated.
2323
2324 ARGS_SO_FAR is the size of args previously pushed for this call. */
2325
2326 void
2327 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2328 args_addr, args_so_far)
2329 register rtx x;
2330 enum machine_mode mode;
2331 tree type;
2332 rtx size;
2333 int align;
2334 int partial;
2335 rtx reg;
2336 int extra;
2337 rtx args_addr;
2338 rtx args_so_far;
2339 {
2340 rtx xinner;
2341 enum direction stack_direction
2342 #ifdef STACK_GROWS_DOWNWARD
2343 = downward;
2344 #else
2345 = upward;
2346 #endif
2347
2348 /* Decide where to pad the argument: `downward' for below,
2349 `upward' for above, or `none' for don't pad it.
2350 Default is below for small data on big-endian machines; else above. */
2351 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2352
2353 /* Invert direction if stack is post-update. */
2354 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2355 if (where_pad != none)
2356 where_pad = (where_pad == downward ? upward : downward);
2357
2358 xinner = x = protect_from_queue (x, 0);
2359
2360 if (mode == BLKmode)
2361 {
2362 /* Copy a block into the stack, entirely or partially. */
2363
2364 register rtx temp;
2365 int used = partial * UNITS_PER_WORD;
2366 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2367 int skip;
2368
2369 if (size == 0)
2370 abort ();
2371
2372 used -= offset;
2373
2374 /* USED is now the # of bytes we need not copy to the stack
2375 because registers will take care of them. */
2376
2377 if (partial != 0)
2378 xinner = change_address (xinner, BLKmode,
2379 plus_constant (XEXP (xinner, 0), used));
2380
2381 /* If the partial register-part of the arg counts in its stack size,
2382 skip the part of stack space corresponding to the registers.
2383 Otherwise, start copying to the beginning of the stack space,
2384 by setting SKIP to 0. */
2385 #ifndef REG_PARM_STACK_SPACE
2386 skip = 0;
2387 #else
2388 skip = used;
2389 #endif
2390
2391 #ifdef PUSH_ROUNDING
2392 /* Do it with several push insns if that doesn't take lots of insns
2393 and if there is no difficulty with push insns that skip bytes
2394 on the stack for alignment purposes. */
2395 if (args_addr == 0
2396 && GET_CODE (size) == CONST_INT
2397 && skip == 0
2398 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2399 < MOVE_RATIO)
2400 /* Here we avoid the case of a structure whose weak alignment
2401 forces many pushes of a small amount of data,
2402 and such small pushes do rounding that causes trouble. */
2403 && ((! SLOW_UNALIGNED_ACCESS)
2404 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2405 || PUSH_ROUNDING (align) == align)
2406 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2407 {
2408 /* Push padding now if padding above and stack grows down,
2409 or if padding below and stack grows up.
2410 But if space already allocated, this has already been done. */
2411 if (extra && args_addr == 0
2412 && where_pad != none && where_pad != stack_direction)
2413 anti_adjust_stack (GEN_INT (extra));
2414
2415 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2416 INTVAL (size) - used, align);
2417 }
2418 else
2419 #endif /* PUSH_ROUNDING */
2420 {
2421 /* Otherwise make space on the stack and copy the data
2422 to the address of that space. */
2423
2424 /* Deduct words put into registers from the size we must copy. */
2425 if (partial != 0)
2426 {
2427 if (GET_CODE (size) == CONST_INT)
2428 size = GEN_INT (INTVAL (size) - used);
2429 else
2430 size = expand_binop (GET_MODE (size), sub_optab, size,
2431 GEN_INT (used), NULL_RTX, 0,
2432 OPTAB_LIB_WIDEN);
2433 }
2434
2435 /* Get the address of the stack space.
2436 In this case, we do not deal with EXTRA separately.
2437 A single stack adjust will do. */
2438 if (! args_addr)
2439 {
2440 temp = push_block (size, extra, where_pad == downward);
2441 extra = 0;
2442 }
2443 else if (GET_CODE (args_so_far) == CONST_INT)
2444 temp = memory_address (BLKmode,
2445 plus_constant (args_addr,
2446 skip + INTVAL (args_so_far)));
2447 else
2448 temp = memory_address (BLKmode,
2449 plus_constant (gen_rtx (PLUS, Pmode,
2450 args_addr, args_so_far),
2451 skip));
2452
2453 /* TEMP is the address of the block. Copy the data there. */
2454 if (GET_CODE (size) == CONST_INT
2455 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2456 < MOVE_RATIO))
2457 {
2458 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2459 INTVAL (size), align);
2460 goto ret;
2461 }
2462 /* Try the most limited insn first, because there's no point
2463 including more than one in the machine description unless
2464 the more limited one has some advantage. */
2465 #ifdef HAVE_movstrqi
2466 if (HAVE_movstrqi
2467 && GET_CODE (size) == CONST_INT
2468 && ((unsigned) INTVAL (size)
2469 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2470 {
2471 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2472 xinner, size, GEN_INT (align));
2473 if (pat != 0)
2474 {
2475 emit_insn (pat);
2476 goto ret;
2477 }
2478 }
2479 #endif
2480 #ifdef HAVE_movstrhi
2481 if (HAVE_movstrhi
2482 && GET_CODE (size) == CONST_INT
2483 && ((unsigned) INTVAL (size)
2484 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2485 {
2486 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2487 xinner, size, GEN_INT (align));
2488 if (pat != 0)
2489 {
2490 emit_insn (pat);
2491 goto ret;
2492 }
2493 }
2494 #endif
2495 #ifdef HAVE_movstrsi
2496 if (HAVE_movstrsi)
2497 {
2498 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2499 xinner, size, GEN_INT (align));
2500 if (pat != 0)
2501 {
2502 emit_insn (pat);
2503 goto ret;
2504 }
2505 }
2506 #endif
2507 #ifdef HAVE_movstrdi
2508 if (HAVE_movstrdi)
2509 {
2510 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2511 xinner, size, GEN_INT (align));
2512 if (pat != 0)
2513 {
2514 emit_insn (pat);
2515 goto ret;
2516 }
2517 }
2518 #endif
2519
2520 #ifndef ACCUMULATE_OUTGOING_ARGS
2521 /* If the source is referenced relative to the stack pointer,
2522 copy it to another register to stabilize it. We do not need
2523 to do this if we know that we won't be changing sp. */
2524
2525 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2526 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2527 temp = copy_to_reg (temp);
2528 #endif
2529
2530 /* Make inhibit_defer_pop nonzero around the library call
2531 to force it to pop the bcopy-arguments right away. */
2532 NO_DEFER_POP;
2533 #ifdef TARGET_MEM_FUNCTIONS
2534 emit_library_call (memcpy_libfunc, 0,
2535 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2536 convert_to_mode (TYPE_MODE (sizetype),
2537 size, TREE_UNSIGNED (sizetype)),
2538 TYPE_MODE (sizetype));
2539 #else
2540 emit_library_call (bcopy_libfunc, 0,
2541 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2542 convert_to_mode (TYPE_MODE (integer_type_node),
2543 size,
2544 TREE_UNSIGNED (integer_type_node)),
2545 TYPE_MODE (integer_type_node));
2546 #endif
2547 OK_DEFER_POP;
2548 }
2549 }
2550 else if (partial > 0)
2551 {
2552 /* Scalar partly in registers. */
2553
2554 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2555 int i;
2556 int not_stack;
2557 /* # words of start of argument
2558 that we must make space for but need not store. */
2559 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2560 int args_offset = INTVAL (args_so_far);
2561 int skip;
2562
2563 /* Push padding now if padding above and stack grows down,
2564 or if padding below and stack grows up.
2565 But if space already allocated, this has already been done. */
2566 if (extra && args_addr == 0
2567 && where_pad != none && where_pad != stack_direction)
2568 anti_adjust_stack (GEN_INT (extra));
2569
2570 /* If we make space by pushing it, we might as well push
2571 the real data. Otherwise, we can leave OFFSET nonzero
2572 and leave the space uninitialized. */
2573 if (args_addr == 0)
2574 offset = 0;
2575
2576 /* Now NOT_STACK gets the number of words that we don't need to
2577 allocate on the stack. */
2578 not_stack = partial - offset;
2579
2580 /* If the partial register-part of the arg counts in its stack size,
2581 skip the part of stack space corresponding to the registers.
2582 Otherwise, start copying to the beginning of the stack space,
2583 by setting SKIP to 0. */
2584 #ifndef REG_PARM_STACK_SPACE
2585 skip = 0;
2586 #else
2587 skip = not_stack;
2588 #endif
2589
2590 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2591 x = validize_mem (force_const_mem (mode, x));
2592
2593 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2594 SUBREGs of such registers are not allowed. */
2595 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2596 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2597 x = copy_to_reg (x);
2598
2599 /* Loop over all the words allocated on the stack for this arg. */
2600 /* We can do it by words, because any scalar bigger than a word
2601 has a size a multiple of a word. */
2602 #ifndef PUSH_ARGS_REVERSED
2603 for (i = not_stack; i < size; i++)
2604 #else
2605 for (i = size - 1; i >= not_stack; i--)
2606 #endif
2607 if (i >= not_stack + offset)
2608 emit_push_insn (operand_subword_force (x, i, mode),
2609 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2610 0, args_addr,
2611 GEN_INT (args_offset + ((i - not_stack + skip)
2612 * UNITS_PER_WORD)));
2613 }
2614 else
2615 {
2616 rtx addr;
2617
2618 /* Push padding now if padding above and stack grows down,
2619 or if padding below and stack grows up.
2620 But if space already allocated, this has already been done. */
2621 if (extra && args_addr == 0
2622 && where_pad != none && where_pad != stack_direction)
2623 anti_adjust_stack (GEN_INT (extra));
2624
2625 #ifdef PUSH_ROUNDING
2626 if (args_addr == 0)
2627 addr = gen_push_operand ();
2628 else
2629 #endif
2630 if (GET_CODE (args_so_far) == CONST_INT)
2631 addr
2632 = memory_address (mode,
2633 plus_constant (args_addr, INTVAL (args_so_far)));
2634 else
2635 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2636 args_so_far));
2637
2638 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2639 }
2640
2641 ret:
2642 /* If part should go in registers, copy that part
2643 into the appropriate registers. Do this now, at the end,
2644 since mem-to-mem copies above may do function calls. */
2645 if (partial > 0 && reg != 0)
2646 move_block_to_reg (REGNO (reg), x, partial, mode);
2647
2648 if (extra && args_addr == 0 && where_pad == stack_direction)
2649 anti_adjust_stack (GEN_INT (extra));
2650 }
2651 \f
2652 /* Expand an assignment that stores the value of FROM into TO.
2653 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2654 (This may contain a QUEUED rtx;
2655 if the value is constant, this rtx is a constant.)
2656 Otherwise, the returned value is NULL_RTX.
2657
2658 SUGGEST_REG is no longer actually used.
2659 It used to mean, copy the value through a register
2660 and return that register, if that is possible.
2661 We now use WANT_VALUE to decide whether to do this. */
2662
2663 rtx
2664 expand_assignment (to, from, want_value, suggest_reg)
2665 tree to, from;
2666 int want_value;
2667 int suggest_reg;
2668 {
2669 register rtx to_rtx = 0;
2670 rtx result;
2671
2672 /* Don't crash if the lhs of the assignment was erroneous. */
2673
2674 if (TREE_CODE (to) == ERROR_MARK)
2675 {
2676 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2677 return want_value ? result : NULL_RTX;
2678 }
2679
2680 if (output_bytecode)
2681 {
2682 tree dest_innermost;
2683
2684 bc_expand_expr (from);
2685 bc_emit_instruction (duplicate);
2686
2687 dest_innermost = bc_expand_address (to);
2688
2689 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2690 take care of it here. */
2691
2692 bc_store_memory (TREE_TYPE (to), dest_innermost);
2693 return NULL;
2694 }
2695
2696 /* Assignment of a structure component needs special treatment
2697 if the structure component's rtx is not simply a MEM.
2698 Assignment of an array element at a constant index, and assignment of
2699 an array element in an unaligned packed structure field, has the same
2700 problem. */
2701
2702 if (TREE_CODE (to) == COMPONENT_REF
2703 || TREE_CODE (to) == BIT_FIELD_REF
2704 || (TREE_CODE (to) == ARRAY_REF
2705 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2706 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2707 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2708 {
2709 enum machine_mode mode1;
2710 int bitsize;
2711 int bitpos;
2712 tree offset;
2713 int unsignedp;
2714 int volatilep = 0;
2715 tree tem;
2716 int alignment;
2717
2718 push_temp_slots ();
2719 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2720 &mode1, &unsignedp, &volatilep);
2721
2722 /* If we are going to use store_bit_field and extract_bit_field,
2723 make sure to_rtx will be safe for multiple use. */
2724
2725 if (mode1 == VOIDmode && want_value)
2726 tem = stabilize_reference (tem);
2727
2728 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2729 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2730 if (offset != 0)
2731 {
2732 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2733
2734 if (GET_CODE (to_rtx) != MEM)
2735 abort ();
2736 to_rtx = change_address (to_rtx, VOIDmode,
2737 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2738 force_reg (ptr_mode, offset_rtx)));
2739 /* If we have a variable offset, the known alignment
2740 is only that of the innermost structure containing the field.
2741 (Actually, we could sometimes do better by using the
2742 align of an element of the innermost array, but no need.) */
2743 if (TREE_CODE (to) == COMPONENT_REF
2744 || TREE_CODE (to) == BIT_FIELD_REF)
2745 alignment
2746 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2747 }
2748 if (volatilep)
2749 {
2750 if (GET_CODE (to_rtx) == MEM)
2751 {
2752 /* When the offset is zero, to_rtx is the address of the
2753 structure we are storing into, and hence may be shared.
2754 We must make a new MEM before setting the volatile bit. */
2755 if (offset == 0)
2756 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2757 MEM_VOLATILE_P (to_rtx) = 1;
2758 }
2759 #if 0 /* This was turned off because, when a field is volatile
2760 in an object which is not volatile, the object may be in a register,
2761 and then we would abort over here. */
2762 else
2763 abort ();
2764 #endif
2765 }
2766
2767 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2768 (want_value
2769 /* Spurious cast makes HPUX compiler happy. */
2770 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2771 : VOIDmode),
2772 unsignedp,
2773 /* Required alignment of containing datum. */
2774 alignment,
2775 int_size_in_bytes (TREE_TYPE (tem)));
2776 preserve_temp_slots (result);
2777 free_temp_slots ();
2778 pop_temp_slots ();
2779
2780 /* If the value is meaningful, convert RESULT to the proper mode.
2781 Otherwise, return nothing. */
2782 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2783 TYPE_MODE (TREE_TYPE (from)),
2784 result,
2785 TREE_UNSIGNED (TREE_TYPE (to)))
2786 : NULL_RTX);
2787 }
2788
2789 /* If the rhs is a function call and its value is not an aggregate,
2790 call the function before we start to compute the lhs.
2791 This is needed for correct code for cases such as
2792 val = setjmp (buf) on machines where reference to val
2793 requires loading up part of an address in a separate insn.
2794
2795 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2796 a promoted variable where the zero- or sign- extension needs to be done.
2797 Handling this in the normal way is safe because no computation is done
2798 before the call. */
2799 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2800 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2801 {
2802 rtx value;
2803
2804 push_temp_slots ();
2805 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2806 if (to_rtx == 0)
2807 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2808
2809 if (GET_MODE (to_rtx) == BLKmode)
2810 emit_block_move (to_rtx, value, expr_size (from),
2811 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2812 else
2813 emit_move_insn (to_rtx, value);
2814 preserve_temp_slots (to_rtx);
2815 free_temp_slots ();
2816 pop_temp_slots ();
2817 return want_value ? to_rtx : NULL_RTX;
2818 }
2819
2820 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2821 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2822
2823 if (to_rtx == 0)
2824 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2825
2826 /* Don't move directly into a return register. */
2827 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2828 {
2829 rtx temp;
2830
2831 push_temp_slots ();
2832 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2833 emit_move_insn (to_rtx, temp);
2834 preserve_temp_slots (to_rtx);
2835 free_temp_slots ();
2836 pop_temp_slots ();
2837 return want_value ? to_rtx : NULL_RTX;
2838 }
2839
2840 /* In case we are returning the contents of an object which overlaps
2841 the place the value is being stored, use a safe function when copying
2842 a value through a pointer into a structure value return block. */
2843 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2844 && current_function_returns_struct
2845 && !current_function_returns_pcc_struct)
2846 {
2847 rtx from_rtx, size;
2848
2849 push_temp_slots ();
2850 size = expr_size (from);
2851 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2852
2853 #ifdef TARGET_MEM_FUNCTIONS
2854 emit_library_call (memcpy_libfunc, 0,
2855 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2856 XEXP (from_rtx, 0), Pmode,
2857 convert_to_mode (TYPE_MODE (sizetype),
2858 size, TREE_UNSIGNED (sizetype)),
2859 TYPE_MODE (sizetype));
2860 #else
2861 emit_library_call (bcopy_libfunc, 0,
2862 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2863 XEXP (to_rtx, 0), Pmode,
2864 convert_to_mode (TYPE_MODE (integer_type_node),
2865 size, TREE_UNSIGNED (integer_type_node)),
2866 TYPE_MODE (integer_type_node));
2867 #endif
2868
2869 preserve_temp_slots (to_rtx);
2870 free_temp_slots ();
2871 pop_temp_slots ();
2872 return want_value ? to_rtx : NULL_RTX;
2873 }
2874
2875 /* Compute FROM and store the value in the rtx we got. */
2876
2877 push_temp_slots ();
2878 result = store_expr (from, to_rtx, want_value);
2879 preserve_temp_slots (result);
2880 free_temp_slots ();
2881 pop_temp_slots ();
2882 return want_value ? result : NULL_RTX;
2883 }
2884
2885 /* Generate code for computing expression EXP,
2886 and storing the value into TARGET.
2887 TARGET may contain a QUEUED rtx.
2888
2889 If WANT_VALUE is nonzero, return a copy of the value
2890 not in TARGET, so that we can be sure to use the proper
2891 value in a containing expression even if TARGET has something
2892 else stored in it. If possible, we copy the value through a pseudo
2893 and return that pseudo. Or, if the value is constant, we try to
2894 return the constant. In some cases, we return a pseudo
2895 copied *from* TARGET.
2896
2897 If the mode is BLKmode then we may return TARGET itself.
2898 It turns out that in BLKmode it doesn't cause a problem.
2899 because C has no operators that could combine two different
2900 assignments into the same BLKmode object with different values
2901 with no sequence point. Will other languages need this to
2902 be more thorough?
2903
2904 If WANT_VALUE is 0, we return NULL, to make sure
2905 to catch quickly any cases where the caller uses the value
2906 and fails to set WANT_VALUE. */
2907
2908 rtx
2909 store_expr (exp, target, want_value)
2910 register tree exp;
2911 register rtx target;
2912 int want_value;
2913 {
2914 register rtx temp;
2915 int dont_return_target = 0;
2916
2917 if (TREE_CODE (exp) == COMPOUND_EXPR)
2918 {
2919 /* Perform first part of compound expression, then assign from second
2920 part. */
2921 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2922 emit_queue ();
2923 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2924 }
2925 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2926 {
2927 /* For conditional expression, get safe form of the target. Then
2928 test the condition, doing the appropriate assignment on either
2929 side. This avoids the creation of unnecessary temporaries.
2930 For non-BLKmode, it is more efficient not to do this. */
2931
2932 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2933
2934 emit_queue ();
2935 target = protect_from_queue (target, 1);
2936
2937 do_pending_stack_adjust ();
2938 NO_DEFER_POP;
2939 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2940 store_expr (TREE_OPERAND (exp, 1), target, 0);
2941 emit_queue ();
2942 emit_jump_insn (gen_jump (lab2));
2943 emit_barrier ();
2944 emit_label (lab1);
2945 store_expr (TREE_OPERAND (exp, 2), target, 0);
2946 emit_queue ();
2947 emit_label (lab2);
2948 OK_DEFER_POP;
2949 return want_value ? target : NULL_RTX;
2950 }
2951 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2952 && GET_MODE (target) != BLKmode)
2953 /* If target is in memory and caller wants value in a register instead,
2954 arrange that. Pass TARGET as target for expand_expr so that,
2955 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2956 We know expand_expr will not use the target in that case.
2957 Don't do this if TARGET is volatile because we are supposed
2958 to write it and then read it. */
2959 {
2960 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2961 GET_MODE (target), 0);
2962 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2963 temp = copy_to_reg (temp);
2964 dont_return_target = 1;
2965 }
2966 else if (queued_subexp_p (target))
2967 /* If target contains a postincrement, let's not risk
2968 using it as the place to generate the rhs. */
2969 {
2970 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2971 {
2972 /* Expand EXP into a new pseudo. */
2973 temp = gen_reg_rtx (GET_MODE (target));
2974 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2975 }
2976 else
2977 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2978
2979 /* If target is volatile, ANSI requires accessing the value
2980 *from* the target, if it is accessed. So make that happen.
2981 In no case return the target itself. */
2982 if (! MEM_VOLATILE_P (target) && want_value)
2983 dont_return_target = 1;
2984 }
2985 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2986 /* If this is an scalar in a register that is stored in a wider mode
2987 than the declared mode, compute the result into its declared mode
2988 and then convert to the wider mode. Our value is the computed
2989 expression. */
2990 {
2991 /* If we don't want a value, we can do the conversion inside EXP,
2992 which will often result in some optimizations. Do the conversion
2993 in two steps: first change the signedness, if needed, then
2994 the extend. */
2995 if (! want_value)
2996 {
2997 if (TREE_UNSIGNED (TREE_TYPE (exp))
2998 != SUBREG_PROMOTED_UNSIGNED_P (target))
2999 exp
3000 = convert
3001 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3002 TREE_TYPE (exp)),
3003 exp);
3004
3005 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3006 SUBREG_PROMOTED_UNSIGNED_P (target)),
3007 exp);
3008 }
3009
3010 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3011
3012 /* If TEMP is a volatile MEM and we want a result value, make
3013 the access now so it gets done only once. Likewise if
3014 it contains TARGET. */
3015 if (GET_CODE (temp) == MEM && want_value
3016 && (MEM_VOLATILE_P (temp)
3017 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3018 temp = copy_to_reg (temp);
3019
3020 /* If TEMP is a VOIDmode constant, use convert_modes to make
3021 sure that we properly convert it. */
3022 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3023 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3024 TYPE_MODE (TREE_TYPE (exp)), temp,
3025 SUBREG_PROMOTED_UNSIGNED_P (target));
3026
3027 convert_move (SUBREG_REG (target), temp,
3028 SUBREG_PROMOTED_UNSIGNED_P (target));
3029 return want_value ? temp : NULL_RTX;
3030 }
3031 else
3032 {
3033 temp = expand_expr (exp, target, GET_MODE (target), 0);
3034 /* Return TARGET if it's a specified hardware register.
3035 If TARGET is a volatile mem ref, either return TARGET
3036 or return a reg copied *from* TARGET; ANSI requires this.
3037
3038 Otherwise, if TEMP is not TARGET, return TEMP
3039 if it is constant (for efficiency),
3040 or if we really want the correct value. */
3041 if (!(target && GET_CODE (target) == REG
3042 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3043 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3044 && temp != target
3045 && (CONSTANT_P (temp) || want_value))
3046 dont_return_target = 1;
3047 }
3048
3049 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3050 the same as that of TARGET, adjust the constant. This is needed, for
3051 example, in case it is a CONST_DOUBLE and we want only a word-sized
3052 value. */
3053 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3054 && TREE_CODE (exp) != ERROR_MARK
3055 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3056 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3057 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3058
3059 /* If value was not generated in the target, store it there.
3060 Convert the value to TARGET's type first if nec. */
3061
3062 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3063 {
3064 target = protect_from_queue (target, 1);
3065 if (GET_MODE (temp) != GET_MODE (target)
3066 && GET_MODE (temp) != VOIDmode)
3067 {
3068 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3069 if (dont_return_target)
3070 {
3071 /* In this case, we will return TEMP,
3072 so make sure it has the proper mode.
3073 But don't forget to store the value into TARGET. */
3074 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3075 emit_move_insn (target, temp);
3076 }
3077 else
3078 convert_move (target, temp, unsignedp);
3079 }
3080
3081 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3082 {
3083 /* Handle copying a string constant into an array.
3084 The string constant may be shorter than the array.
3085 So copy just the string's actual length, and clear the rest. */
3086 rtx size;
3087 rtx addr;
3088
3089 /* Get the size of the data type of the string,
3090 which is actually the size of the target. */
3091 size = expr_size (exp);
3092 if (GET_CODE (size) == CONST_INT
3093 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3094 emit_block_move (target, temp, size,
3095 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3096 else
3097 {
3098 /* Compute the size of the data to copy from the string. */
3099 tree copy_size
3100 = size_binop (MIN_EXPR,
3101 make_tree (sizetype, size),
3102 convert (sizetype,
3103 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3104 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3105 VOIDmode, 0);
3106 rtx label = 0;
3107
3108 /* Copy that much. */
3109 emit_block_move (target, temp, copy_size_rtx,
3110 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3111
3112 /* Figure out how much is left in TARGET that we have to clear.
3113 Do all calculations in ptr_mode. */
3114
3115 addr = XEXP (target, 0);
3116 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3117
3118 if (GET_CODE (copy_size_rtx) == CONST_INT)
3119 {
3120 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3121 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3122 }
3123 else
3124 {
3125 addr = force_reg (ptr_mode, addr);
3126 addr = expand_binop (ptr_mode, add_optab, addr,
3127 copy_size_rtx, NULL_RTX, 0,
3128 OPTAB_LIB_WIDEN);
3129
3130 size = expand_binop (ptr_mode, sub_optab, size,
3131 copy_size_rtx, NULL_RTX, 0,
3132 OPTAB_LIB_WIDEN);
3133
3134 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3135 GET_MODE (size), 0, 0);
3136 label = gen_label_rtx ();
3137 emit_jump_insn (gen_blt (label));
3138 }
3139
3140 if (size != const0_rtx)
3141 {
3142 #ifdef TARGET_MEM_FUNCTIONS
3143 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3144 addr, Pmode,
3145 const0_rtx, TYPE_MODE (integer_type_node),
3146 convert_to_mode (TYPE_MODE (sizetype),
3147 size,
3148 TREE_UNSIGNED (sizetype)),
3149 TYPE_MODE (sizetype));
3150 #else
3151 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3152 addr, Pmode,
3153 convert_to_mode (TYPE_MODE (integer_type_node),
3154 size,
3155 TREE_UNSIGNED (integer_type_node)),
3156 TYPE_MODE (integer_type_node));
3157 #endif
3158 }
3159
3160 if (label)
3161 emit_label (label);
3162 }
3163 }
3164 else if (GET_MODE (temp) == BLKmode)
3165 emit_block_move (target, temp, expr_size (exp),
3166 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3167 else
3168 emit_move_insn (target, temp);
3169 }
3170
3171 /* If we don't want a value, return NULL_RTX. */
3172 if (! want_value)
3173 return NULL_RTX;
3174
3175 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3176 ??? The latter test doesn't seem to make sense. */
3177 else if (dont_return_target && GET_CODE (temp) != MEM)
3178 return temp;
3179
3180 /* Return TARGET itself if it is a hard register. */
3181 else if (want_value && GET_MODE (target) != BLKmode
3182 && ! (GET_CODE (target) == REG
3183 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3184 return copy_to_reg (target);
3185
3186 else
3187 return target;
3188 }
3189 \f
3190 /* Return 1 if EXP just contains zeros. */
3191
3192 static int
3193 is_zeros_p (exp)
3194 tree exp;
3195 {
3196 tree elt;
3197
3198 switch (TREE_CODE (exp))
3199 {
3200 case CONVERT_EXPR:
3201 case NOP_EXPR:
3202 case NON_LVALUE_EXPR:
3203 return is_zeros_p (TREE_OPERAND (exp, 0));
3204
3205 case INTEGER_CST:
3206 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3207
3208 case COMPLEX_CST:
3209 return
3210 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3211
3212 case REAL_CST:
3213 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3214
3215 case CONSTRUCTOR:
3216 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3217 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3218 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3219 if (! is_zeros_p (TREE_VALUE (elt)))
3220 return 0;
3221
3222 return 1;
3223 }
3224
3225 return 0;
3226 }
3227
3228 /* Return 1 if EXP contains mostly (3/4) zeros. */
3229
3230 static int
3231 mostly_zeros_p (exp)
3232 tree exp;
3233 {
3234 if (TREE_CODE (exp) == CONSTRUCTOR)
3235 {
3236 int elts = 0, zeros = 0;
3237 tree elt = CONSTRUCTOR_ELTS (exp);
3238 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3239 {
3240 /* If there are no ranges of true bits, it is all zero. */
3241 return elt == NULL_TREE;
3242 }
3243 for (; elt; elt = TREE_CHAIN (elt))
3244 {
3245 /* We do not handle the case where the index is a RANGE_EXPR,
3246 so the statistic will be somewhat inaccurate.
3247 We do make a more accurate count in store_constructor itself,
3248 so since this function is only used for nested array elements,
3249 this should be close enough. */
3250 if (mostly_zeros_p (TREE_VALUE (elt)))
3251 zeros++;
3252 elts++;
3253 }
3254
3255 return 4 * zeros >= 3 * elts;
3256 }
3257
3258 return is_zeros_p (exp);
3259 }
3260 \f
3261 /* Helper function for store_constructor.
3262 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3263 TYPE is the type of the CONSTRUCTOR, not the element type.
3264 CLEARED is as for store_constructor. */
3265
3266 static void
3267 store_constructor_field (target, bitsize, bitpos,
3268 mode, exp, type, cleared)
3269 rtx target;
3270 int bitsize, bitpos;
3271 enum machine_mode mode;
3272 tree exp, type;
3273 int cleared;
3274 {
3275 if (TREE_CODE (exp) == CONSTRUCTOR
3276 && (bitpos % BITS_PER_UNIT) == 0)
3277 {
3278 if (bitpos != 0)
3279 target = change_address (target, VOIDmode,
3280 plus_constant (XEXP (target, 0),
3281 bitpos / BITS_PER_UNIT));
3282 store_constructor (exp, target, cleared);
3283 }
3284 else
3285 store_field (target, bitsize, bitpos, mode, exp,
3286 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3287 int_size_in_bytes (type));
3288 }
3289
3290 /* Store the value of constructor EXP into the rtx TARGET.
3291 TARGET is either a REG or a MEM.
3292 CLEARED is true if TARGET is known to have been zero'd. */
3293
3294 static void
3295 store_constructor (exp, target, cleared)
3296 tree exp;
3297 rtx target;
3298 int cleared;
3299 {
3300 tree type = TREE_TYPE (exp);
3301
3302 /* We know our target cannot conflict, since safe_from_p has been called. */
3303 #if 0
3304 /* Don't try copying piece by piece into a hard register
3305 since that is vulnerable to being clobbered by EXP.
3306 Instead, construct in a pseudo register and then copy it all. */
3307 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3308 {
3309 rtx temp = gen_reg_rtx (GET_MODE (target));
3310 store_constructor (exp, temp, 0);
3311 emit_move_insn (target, temp);
3312 return;
3313 }
3314 #endif
3315
3316 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3317 || TREE_CODE (type) == QUAL_UNION_TYPE)
3318 {
3319 register tree elt;
3320
3321 /* Inform later passes that the whole union value is dead. */
3322 if (TREE_CODE (type) == UNION_TYPE
3323 || TREE_CODE (type) == QUAL_UNION_TYPE)
3324 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3325
3326 /* If we are building a static constructor into a register,
3327 set the initial value as zero so we can fold the value into
3328 a constant. But if more than one register is involved,
3329 this probably loses. */
3330 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3331 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3332 {
3333 if (! cleared)
3334 emit_move_insn (target, const0_rtx);
3335
3336 cleared = 1;
3337 }
3338
3339 /* If the constructor has fewer fields than the structure
3340 or if we are initializing the structure to mostly zeros,
3341 clear the whole structure first. */
3342 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3343 != list_length (TYPE_FIELDS (type)))
3344 || mostly_zeros_p (exp))
3345 {
3346 if (! cleared)
3347 clear_storage (target, expr_size (exp),
3348 TYPE_ALIGN (type) / BITS_PER_UNIT);
3349
3350 cleared = 1;
3351 }
3352 else
3353 /* Inform later passes that the old value is dead. */
3354 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3355
3356 /* Store each element of the constructor into
3357 the corresponding field of TARGET. */
3358
3359 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3360 {
3361 register tree field = TREE_PURPOSE (elt);
3362 register enum machine_mode mode;
3363 int bitsize;
3364 int bitpos = 0;
3365 int unsignedp;
3366 tree pos, constant = 0, offset = 0;
3367 rtx to_rtx = target;
3368
3369 /* Just ignore missing fields.
3370 We cleared the whole structure, above,
3371 if any fields are missing. */
3372 if (field == 0)
3373 continue;
3374
3375 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3376 continue;
3377
3378 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3379 unsignedp = TREE_UNSIGNED (field);
3380 mode = DECL_MODE (field);
3381 if (DECL_BIT_FIELD (field))
3382 mode = VOIDmode;
3383
3384 pos = DECL_FIELD_BITPOS (field);
3385 if (TREE_CODE (pos) == INTEGER_CST)
3386 constant = pos;
3387 else if (TREE_CODE (pos) == PLUS_EXPR
3388 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3389 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3390 else
3391 offset = pos;
3392
3393 if (constant)
3394 bitpos = TREE_INT_CST_LOW (constant);
3395
3396 if (offset)
3397 {
3398 rtx offset_rtx;
3399
3400 if (contains_placeholder_p (offset))
3401 offset = build (WITH_RECORD_EXPR, sizetype,
3402 offset, exp);
3403
3404 offset = size_binop (FLOOR_DIV_EXPR, offset,
3405 size_int (BITS_PER_UNIT));
3406
3407 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3408 if (GET_CODE (to_rtx) != MEM)
3409 abort ();
3410
3411 to_rtx
3412 = change_address (to_rtx, VOIDmode,
3413 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3414 force_reg (ptr_mode, offset_rtx)));
3415 }
3416 if (TREE_READONLY (field))
3417 {
3418 if (GET_CODE (to_rtx) == MEM)
3419 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3420 XEXP (to_rtx, 0));
3421 RTX_UNCHANGING_P (to_rtx) = 1;
3422 }
3423
3424 store_constructor_field (to_rtx, bitsize, bitpos,
3425 mode, TREE_VALUE (elt), type, cleared);
3426 }
3427 }
3428 else if (TREE_CODE (type) == ARRAY_TYPE)
3429 {
3430 register tree elt;
3431 register int i;
3432 int need_to_clear;
3433 tree domain = TYPE_DOMAIN (type);
3434 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3435 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3436 tree elttype = TREE_TYPE (type);
3437
3438 /* If the constructor has fewer elements than the array,
3439 clear the whole array first. Similarly if this this is
3440 static constructor of a non-BLKmode object. */
3441 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3442 need_to_clear = 1;
3443 else
3444 {
3445 HOST_WIDE_INT count = 0, zero_count = 0;
3446 need_to_clear = 0;
3447 /* This loop is a more accurate version of the loop in
3448 mostly_zeros_p (it handles RANGE_EXPR in an index).
3449 It is also needed to check for missing elements. */
3450 for (elt = CONSTRUCTOR_ELTS (exp);
3451 elt != NULL_TREE;
3452 elt = TREE_CHAIN (elt), i++)
3453 {
3454 tree index = TREE_PURPOSE (elt);
3455 HOST_WIDE_INT this_node_count;
3456 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3457 {
3458 tree lo_index = TREE_OPERAND (index, 0);
3459 tree hi_index = TREE_OPERAND (index, 1);
3460 if (TREE_CODE (lo_index) != INTEGER_CST
3461 || TREE_CODE (hi_index) != INTEGER_CST)
3462 {
3463 need_to_clear = 1;
3464 break;
3465 }
3466 this_node_count = TREE_INT_CST_LOW (hi_index)
3467 - TREE_INT_CST_LOW (lo_index) + 1;
3468 }
3469 else
3470 this_node_count = 1;
3471 count += this_node_count;
3472 if (mostly_zeros_p (TREE_VALUE (elt)))
3473 zero_count += this_node_count;
3474 }
3475 if (4 * zero_count >= 3 * count)
3476 need_to_clear = 1;
3477 }
3478 if (need_to_clear)
3479 {
3480 if (! cleared)
3481 clear_storage (target, expr_size (exp),
3482 TYPE_ALIGN (type) / BITS_PER_UNIT);
3483 cleared = 1;
3484 }
3485 else
3486 /* Inform later passes that the old value is dead. */
3487 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3488
3489 /* Store each element of the constructor into
3490 the corresponding element of TARGET, determined
3491 by counting the elements. */
3492 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3493 elt;
3494 elt = TREE_CHAIN (elt), i++)
3495 {
3496 register enum machine_mode mode;
3497 int bitsize;
3498 int bitpos;
3499 int unsignedp;
3500 tree value = TREE_VALUE (elt);
3501 tree index = TREE_PURPOSE (elt);
3502 rtx xtarget = target;
3503
3504 if (cleared && is_zeros_p (value))
3505 continue;
3506
3507 mode = TYPE_MODE (elttype);
3508 bitsize = GET_MODE_BITSIZE (mode);
3509 unsignedp = TREE_UNSIGNED (elttype);
3510
3511 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3512 {
3513 tree lo_index = TREE_OPERAND (index, 0);
3514 tree hi_index = TREE_OPERAND (index, 1);
3515 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3516 struct nesting *loop;
3517 tree position;
3518
3519 if (TREE_CODE (lo_index) == INTEGER_CST
3520 && TREE_CODE (hi_index) == INTEGER_CST)
3521 {
3522 HOST_WIDE_INT lo = TREE_INT_CST_LOW (lo_index);
3523 HOST_WIDE_INT hi = TREE_INT_CST_LOW (hi_index);
3524 HOST_WIDE_INT count = hi - lo + 1;
3525
3526 /* If the range is constant and "small", unroll the loop.
3527 We must also use store_field if the target is not MEM. */
3528 if (GET_CODE (target) != MEM
3529 || count <= 2
3530 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3531 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3532 <= 40 * 8))
3533 {
3534 lo -= minelt; hi -= minelt;
3535 for (; lo <= hi; lo++)
3536 {
3537 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3538 store_constructor_field (target, bitsize, bitpos,
3539 mode, value, type, cleared);
3540 }
3541 }
3542 }
3543 else
3544 {
3545 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3546 loop_top = gen_label_rtx ();
3547 loop_end = gen_label_rtx ();
3548
3549 unsignedp = TREE_UNSIGNED (domain);
3550
3551 index = build_decl (VAR_DECL, NULL_TREE, domain);
3552
3553 DECL_RTL (index) = index_r
3554 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3555 &unsignedp, 0));
3556
3557 if (TREE_CODE (value) == SAVE_EXPR
3558 && SAVE_EXPR_RTL (value) == 0)
3559 {
3560 /* Make sure value gets expanded once before the loop. */
3561 expand_expr (value, const0_rtx, VOIDmode, 0);
3562 emit_queue ();
3563 }
3564 store_expr (lo_index, index_r, 0);
3565 loop = expand_start_loop (0);
3566
3567 /* Assign value to element index. */
3568 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3569 size_int (BITS_PER_UNIT));
3570 position = size_binop (MULT_EXPR,
3571 size_binop (MINUS_EXPR, index,
3572 TYPE_MIN_VALUE (domain)),
3573 position);
3574 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3575 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3576 xtarget = change_address (target, mode, addr);
3577 if (TREE_CODE (value) == CONSTRUCTOR)
3578 store_constructor (exp, xtarget, cleared);
3579 else
3580 store_expr (value, xtarget, 0);
3581
3582 expand_exit_loop_if_false (loop,
3583 build (LT_EXPR, integer_type_node,
3584 index, hi_index));
3585
3586 expand_increment (build (PREINCREMENT_EXPR,
3587 TREE_TYPE (index),
3588 index, integer_one_node), 0);
3589 expand_end_loop ();
3590 emit_label (loop_end);
3591
3592 /* Needed by stupid register allocation. to extend the
3593 lifetime of pseudo-regs used by target past the end
3594 of the loop. */
3595 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3596 }
3597 }
3598 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3599 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3600 {
3601 rtx pos_rtx, addr;
3602 tree position;
3603
3604 if (index == 0)
3605 index = size_int (i);
3606
3607 if (minelt)
3608 index = size_binop (MINUS_EXPR, index,
3609 TYPE_MIN_VALUE (domain));
3610 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3611 size_int (BITS_PER_UNIT));
3612 position = size_binop (MULT_EXPR, index, position);
3613 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3614 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3615 xtarget = change_address (target, mode, addr);
3616 store_expr (value, xtarget, 0);
3617 }
3618 else
3619 {
3620 if (index != 0)
3621 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3622 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3623 else
3624 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3625 store_constructor_field (target, bitsize, bitpos,
3626 mode, value, type, cleared);
3627 }
3628 }
3629 }
3630 /* set constructor assignments */
3631 else if (TREE_CODE (type) == SET_TYPE)
3632 {
3633 tree elt = CONSTRUCTOR_ELTS (exp);
3634 rtx xtarget = XEXP (target, 0);
3635 int set_word_size = TYPE_ALIGN (type);
3636 int nbytes = int_size_in_bytes (type), nbits;
3637 tree domain = TYPE_DOMAIN (type);
3638 tree domain_min, domain_max, bitlength;
3639
3640 /* The default implementation strategy is to extract the constant
3641 parts of the constructor, use that to initialize the target,
3642 and then "or" in whatever non-constant ranges we need in addition.
3643
3644 If a large set is all zero or all ones, it is
3645 probably better to set it using memset (if available) or bzero.
3646 Also, if a large set has just a single range, it may also be
3647 better to first clear all the first clear the set (using
3648 bzero/memset), and set the bits we want. */
3649
3650 /* Check for all zeros. */
3651 if (elt == NULL_TREE)
3652 {
3653 if (!cleared)
3654 clear_storage (target, expr_size (exp),
3655 TYPE_ALIGN (type) / BITS_PER_UNIT);
3656 return;
3657 }
3658
3659 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3660 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3661 bitlength = size_binop (PLUS_EXPR,
3662 size_binop (MINUS_EXPR, domain_max, domain_min),
3663 size_one_node);
3664
3665 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3666 abort ();
3667 nbits = TREE_INT_CST_LOW (bitlength);
3668
3669 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3670 are "complicated" (more than one range), initialize (the
3671 constant parts) by copying from a constant. */
3672 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3673 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3674 {
3675 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3676 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3677 char *bit_buffer = (char*) alloca (nbits);
3678 HOST_WIDE_INT word = 0;
3679 int bit_pos = 0;
3680 int ibit = 0;
3681 int offset = 0; /* In bytes from beginning of set. */
3682 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3683 for (;;)
3684 {
3685 if (bit_buffer[ibit])
3686 {
3687 if (BYTES_BIG_ENDIAN)
3688 word |= (1 << (set_word_size - 1 - bit_pos));
3689 else
3690 word |= 1 << bit_pos;
3691 }
3692 bit_pos++; ibit++;
3693 if (bit_pos >= set_word_size || ibit == nbits)
3694 {
3695 if (word != 0 || ! cleared)
3696 {
3697 rtx datum = GEN_INT (word);
3698 rtx to_rtx;
3699 /* The assumption here is that it is safe to use XEXP if
3700 the set is multi-word, but not if it's single-word. */
3701 if (GET_CODE (target) == MEM)
3702 {
3703 to_rtx = plus_constant (XEXP (target, 0), offset);
3704 to_rtx = change_address (target, mode, to_rtx);
3705 }
3706 else if (offset == 0)
3707 to_rtx = target;
3708 else
3709 abort ();
3710 emit_move_insn (to_rtx, datum);
3711 }
3712 if (ibit == nbits)
3713 break;
3714 word = 0;
3715 bit_pos = 0;
3716 offset += set_word_size / BITS_PER_UNIT;
3717 }
3718 }
3719 }
3720 else if (!cleared)
3721 {
3722 /* Don't bother clearing storage if the set is all ones. */
3723 if (TREE_CHAIN (elt) != NULL_TREE
3724 || (TREE_PURPOSE (elt) == NULL_TREE
3725 ? nbits != 1
3726 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3727 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3728 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3729 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3730 != nbits))))
3731 clear_storage (target, expr_size (exp),
3732 TYPE_ALIGN (type) / BITS_PER_UNIT);
3733 }
3734
3735 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3736 {
3737 /* start of range of element or NULL */
3738 tree startbit = TREE_PURPOSE (elt);
3739 /* end of range of element, or element value */
3740 tree endbit = TREE_VALUE (elt);
3741 HOST_WIDE_INT startb, endb;
3742 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3743
3744 bitlength_rtx = expand_expr (bitlength,
3745 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3746
3747 /* handle non-range tuple element like [ expr ] */
3748 if (startbit == NULL_TREE)
3749 {
3750 startbit = save_expr (endbit);
3751 endbit = startbit;
3752 }
3753 startbit = convert (sizetype, startbit);
3754 endbit = convert (sizetype, endbit);
3755 if (! integer_zerop (domain_min))
3756 {
3757 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3758 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3759 }
3760 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3761 EXPAND_CONST_ADDRESS);
3762 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3763 EXPAND_CONST_ADDRESS);
3764
3765 if (REG_P (target))
3766 {
3767 targetx = assign_stack_temp (GET_MODE (target),
3768 GET_MODE_SIZE (GET_MODE (target)),
3769 0);
3770 emit_move_insn (targetx, target);
3771 }
3772 else if (GET_CODE (target) == MEM)
3773 targetx = target;
3774 else
3775 abort ();
3776
3777 #ifdef TARGET_MEM_FUNCTIONS
3778 /* Optimization: If startbit and endbit are
3779 constants divisible by BITS_PER_UNIT,
3780 call memset instead. */
3781 if (TREE_CODE (startbit) == INTEGER_CST
3782 && TREE_CODE (endbit) == INTEGER_CST
3783 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3784 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3785 {
3786 emit_library_call (memset_libfunc, 0,
3787 VOIDmode, 3,
3788 plus_constant (XEXP (targetx, 0),
3789 startb / BITS_PER_UNIT),
3790 Pmode,
3791 constm1_rtx, TYPE_MODE (integer_type_node),
3792 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3793 TYPE_MODE (sizetype));
3794 }
3795 else
3796 #endif
3797 {
3798 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3799 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3800 bitlength_rtx, TYPE_MODE (sizetype),
3801 startbit_rtx, TYPE_MODE (sizetype),
3802 endbit_rtx, TYPE_MODE (sizetype));
3803 }
3804 if (REG_P (target))
3805 emit_move_insn (target, targetx);
3806 }
3807 }
3808
3809 else
3810 abort ();
3811 }
3812
3813 /* Store the value of EXP (an expression tree)
3814 into a subfield of TARGET which has mode MODE and occupies
3815 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3816 If MODE is VOIDmode, it means that we are storing into a bit-field.
3817
3818 If VALUE_MODE is VOIDmode, return nothing in particular.
3819 UNSIGNEDP is not used in this case.
3820
3821 Otherwise, return an rtx for the value stored. This rtx
3822 has mode VALUE_MODE if that is convenient to do.
3823 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3824
3825 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3826 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3827
3828 static rtx
3829 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3830 unsignedp, align, total_size)
3831 rtx target;
3832 int bitsize, bitpos;
3833 enum machine_mode mode;
3834 tree exp;
3835 enum machine_mode value_mode;
3836 int unsignedp;
3837 int align;
3838 int total_size;
3839 {
3840 HOST_WIDE_INT width_mask = 0;
3841
3842 if (bitsize < HOST_BITS_PER_WIDE_INT)
3843 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3844
3845 /* If we are storing into an unaligned field of an aligned union that is
3846 in a register, we may have the mode of TARGET being an integer mode but
3847 MODE == BLKmode. In that case, get an aligned object whose size and
3848 alignment are the same as TARGET and store TARGET into it (we can avoid
3849 the store if the field being stored is the entire width of TARGET). Then
3850 call ourselves recursively to store the field into a BLKmode version of
3851 that object. Finally, load from the object into TARGET. This is not
3852 very efficient in general, but should only be slightly more expensive
3853 than the otherwise-required unaligned accesses. Perhaps this can be
3854 cleaned up later. */
3855
3856 if (mode == BLKmode
3857 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3858 {
3859 rtx object = assign_stack_temp (GET_MODE (target),
3860 GET_MODE_SIZE (GET_MODE (target)), 0);
3861 rtx blk_object = copy_rtx (object);
3862
3863 MEM_IN_STRUCT_P (object) = 1;
3864 MEM_IN_STRUCT_P (blk_object) = 1;
3865 PUT_MODE (blk_object, BLKmode);
3866
3867 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3868 emit_move_insn (object, target);
3869
3870 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3871 align, total_size);
3872
3873 /* Even though we aren't returning target, we need to
3874 give it the updated value. */
3875 emit_move_insn (target, object);
3876
3877 return blk_object;
3878 }
3879
3880 /* If the structure is in a register or if the component
3881 is a bit field, we cannot use addressing to access it.
3882 Use bit-field techniques or SUBREG to store in it. */
3883
3884 if (mode == VOIDmode
3885 || (mode != BLKmode && ! direct_store[(int) mode])
3886 || GET_CODE (target) == REG
3887 || GET_CODE (target) == SUBREG
3888 /* If the field isn't aligned enough to store as an ordinary memref,
3889 store it as a bit field. */
3890 || (SLOW_UNALIGNED_ACCESS
3891 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3892 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3893 {
3894 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3895
3896 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3897 MODE. */
3898 if (mode != VOIDmode && mode != BLKmode
3899 && mode != TYPE_MODE (TREE_TYPE (exp)))
3900 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3901
3902 /* Store the value in the bitfield. */
3903 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3904 if (value_mode != VOIDmode)
3905 {
3906 /* The caller wants an rtx for the value. */
3907 /* If possible, avoid refetching from the bitfield itself. */
3908 if (width_mask != 0
3909 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3910 {
3911 tree count;
3912 enum machine_mode tmode;
3913
3914 if (unsignedp)
3915 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3916 tmode = GET_MODE (temp);
3917 if (tmode == VOIDmode)
3918 tmode = value_mode;
3919 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3920 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3921 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3922 }
3923 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3924 NULL_RTX, value_mode, 0, align,
3925 total_size);
3926 }
3927 return const0_rtx;
3928 }
3929 else
3930 {
3931 rtx addr = XEXP (target, 0);
3932 rtx to_rtx;
3933
3934 /* If a value is wanted, it must be the lhs;
3935 so make the address stable for multiple use. */
3936
3937 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3938 && ! CONSTANT_ADDRESS_P (addr)
3939 /* A frame-pointer reference is already stable. */
3940 && ! (GET_CODE (addr) == PLUS
3941 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3942 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3943 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3944 addr = copy_to_reg (addr);
3945
3946 /* Now build a reference to just the desired component. */
3947
3948 to_rtx = change_address (target, mode,
3949 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3950 MEM_IN_STRUCT_P (to_rtx) = 1;
3951
3952 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3953 }
3954 }
3955 \f
3956 /* Return true if any object containing the innermost array is an unaligned
3957 packed structure field. */
3958
3959 static int
3960 get_inner_unaligned_p (exp)
3961 tree exp;
3962 {
3963 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3964
3965 while (1)
3966 {
3967 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3968 {
3969 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3970 < needed_alignment)
3971 return 1;
3972 }
3973 else if (TREE_CODE (exp) != ARRAY_REF
3974 && TREE_CODE (exp) != NON_LVALUE_EXPR
3975 && ! ((TREE_CODE (exp) == NOP_EXPR
3976 || TREE_CODE (exp) == CONVERT_EXPR)
3977 && (TYPE_MODE (TREE_TYPE (exp))
3978 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3979 break;
3980
3981 exp = TREE_OPERAND (exp, 0);
3982 }
3983
3984 return 0;
3985 }
3986
3987 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3988 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3989 ARRAY_REFs and find the ultimate containing object, which we return.
3990
3991 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3992 bit position, and *PUNSIGNEDP to the signedness of the field.
3993 If the position of the field is variable, we store a tree
3994 giving the variable offset (in units) in *POFFSET.
3995 This offset is in addition to the bit position.
3996 If the position is not variable, we store 0 in *POFFSET.
3997
3998 If any of the extraction expressions is volatile,
3999 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4000
4001 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4002 is a mode that can be used to access the field. In that case, *PBITSIZE
4003 is redundant.
4004
4005 If the field describes a variable-sized object, *PMODE is set to
4006 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4007 this case, but the address of the object can be found. */
4008
4009 tree
4010 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4011 punsignedp, pvolatilep)
4012 tree exp;
4013 int *pbitsize;
4014 int *pbitpos;
4015 tree *poffset;
4016 enum machine_mode *pmode;
4017 int *punsignedp;
4018 int *pvolatilep;
4019 {
4020 tree orig_exp = exp;
4021 tree size_tree = 0;
4022 enum machine_mode mode = VOIDmode;
4023 tree offset = integer_zero_node;
4024
4025 if (TREE_CODE (exp) == COMPONENT_REF)
4026 {
4027 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4028 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4029 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4030 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4031 }
4032 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4033 {
4034 size_tree = TREE_OPERAND (exp, 1);
4035 *punsignedp = TREE_UNSIGNED (exp);
4036 }
4037 else
4038 {
4039 mode = TYPE_MODE (TREE_TYPE (exp));
4040 *pbitsize = GET_MODE_BITSIZE (mode);
4041 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4042 }
4043
4044 if (size_tree)
4045 {
4046 if (TREE_CODE (size_tree) != INTEGER_CST)
4047 mode = BLKmode, *pbitsize = -1;
4048 else
4049 *pbitsize = TREE_INT_CST_LOW (size_tree);
4050 }
4051
4052 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4053 and find the ultimate containing object. */
4054
4055 *pbitpos = 0;
4056
4057 while (1)
4058 {
4059 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4060 {
4061 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4062 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4063 : TREE_OPERAND (exp, 2));
4064 tree constant = integer_zero_node, var = pos;
4065
4066 /* If this field hasn't been filled in yet, don't go
4067 past it. This should only happen when folding expressions
4068 made during type construction. */
4069 if (pos == 0)
4070 break;
4071
4072 /* Assume here that the offset is a multiple of a unit.
4073 If not, there should be an explicitly added constant. */
4074 if (TREE_CODE (pos) == PLUS_EXPR
4075 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4076 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4077 else if (TREE_CODE (pos) == INTEGER_CST)
4078 constant = pos, var = integer_zero_node;
4079
4080 *pbitpos += TREE_INT_CST_LOW (constant);
4081
4082 if (var)
4083 offset = size_binop (PLUS_EXPR, offset,
4084 size_binop (EXACT_DIV_EXPR, var,
4085 size_int (BITS_PER_UNIT)));
4086 }
4087
4088 else if (TREE_CODE (exp) == ARRAY_REF)
4089 {
4090 /* This code is based on the code in case ARRAY_REF in expand_expr
4091 below. We assume here that the size of an array element is
4092 always an integral multiple of BITS_PER_UNIT. */
4093
4094 tree index = TREE_OPERAND (exp, 1);
4095 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4096 tree low_bound
4097 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4098 tree index_type = TREE_TYPE (index);
4099
4100 if (! integer_zerop (low_bound))
4101 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4102
4103 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4104 {
4105 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4106 index);
4107 index_type = TREE_TYPE (index);
4108 }
4109
4110 index = fold (build (MULT_EXPR, index_type, index,
4111 TYPE_SIZE (TREE_TYPE (exp))));
4112
4113 if (TREE_CODE (index) == INTEGER_CST
4114 && TREE_INT_CST_HIGH (index) == 0)
4115 *pbitpos += TREE_INT_CST_LOW (index);
4116 else
4117 offset = size_binop (PLUS_EXPR, offset,
4118 size_binop (FLOOR_DIV_EXPR, index,
4119 size_int (BITS_PER_UNIT)));
4120 }
4121 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4122 && ! ((TREE_CODE (exp) == NOP_EXPR
4123 || TREE_CODE (exp) == CONVERT_EXPR)
4124 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4125 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4126 != UNION_TYPE))
4127 && (TYPE_MODE (TREE_TYPE (exp))
4128 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4129 break;
4130
4131 /* If any reference in the chain is volatile, the effect is volatile. */
4132 if (TREE_THIS_VOLATILE (exp))
4133 *pvolatilep = 1;
4134 exp = TREE_OPERAND (exp, 0);
4135 }
4136
4137 /* If this was a bit-field, see if there is a mode that allows direct
4138 access in case EXP is in memory. */
4139 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
4140 {
4141 mode = mode_for_size (*pbitsize, MODE_INT, 0);
4142 if (mode == BLKmode)
4143 mode = VOIDmode;
4144 }
4145
4146 if (integer_zerop (offset))
4147 offset = 0;
4148
4149 if (offset != 0 && contains_placeholder_p (offset))
4150 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4151
4152 *pmode = mode;
4153 *poffset = offset;
4154 return exp;
4155 }
4156 \f
4157 /* Given an rtx VALUE that may contain additions and multiplications,
4158 return an equivalent value that just refers to a register or memory.
4159 This is done by generating instructions to perform the arithmetic
4160 and returning a pseudo-register containing the value.
4161
4162 The returned value may be a REG, SUBREG, MEM or constant. */
4163
4164 rtx
4165 force_operand (value, target)
4166 rtx value, target;
4167 {
4168 register optab binoptab = 0;
4169 /* Use a temporary to force order of execution of calls to
4170 `force_operand'. */
4171 rtx tmp;
4172 register rtx op2;
4173 /* Use subtarget as the target for operand 0 of a binary operation. */
4174 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4175
4176 if (GET_CODE (value) == PLUS)
4177 binoptab = add_optab;
4178 else if (GET_CODE (value) == MINUS)
4179 binoptab = sub_optab;
4180 else if (GET_CODE (value) == MULT)
4181 {
4182 op2 = XEXP (value, 1);
4183 if (!CONSTANT_P (op2)
4184 && !(GET_CODE (op2) == REG && op2 != subtarget))
4185 subtarget = 0;
4186 tmp = force_operand (XEXP (value, 0), subtarget);
4187 return expand_mult (GET_MODE (value), tmp,
4188 force_operand (op2, NULL_RTX),
4189 target, 0);
4190 }
4191
4192 if (binoptab)
4193 {
4194 op2 = XEXP (value, 1);
4195 if (!CONSTANT_P (op2)
4196 && !(GET_CODE (op2) == REG && op2 != subtarget))
4197 subtarget = 0;
4198 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4199 {
4200 binoptab = add_optab;
4201 op2 = negate_rtx (GET_MODE (value), op2);
4202 }
4203
4204 /* Check for an addition with OP2 a constant integer and our first
4205 operand a PLUS of a virtual register and something else. In that
4206 case, we want to emit the sum of the virtual register and the
4207 constant first and then add the other value. This allows virtual
4208 register instantiation to simply modify the constant rather than
4209 creating another one around this addition. */
4210 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4211 && GET_CODE (XEXP (value, 0)) == PLUS
4212 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4213 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4214 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4215 {
4216 rtx temp = expand_binop (GET_MODE (value), binoptab,
4217 XEXP (XEXP (value, 0), 0), op2,
4218 subtarget, 0, OPTAB_LIB_WIDEN);
4219 return expand_binop (GET_MODE (value), binoptab, temp,
4220 force_operand (XEXP (XEXP (value, 0), 1), 0),
4221 target, 0, OPTAB_LIB_WIDEN);
4222 }
4223
4224 tmp = force_operand (XEXP (value, 0), subtarget);
4225 return expand_binop (GET_MODE (value), binoptab, tmp,
4226 force_operand (op2, NULL_RTX),
4227 target, 0, OPTAB_LIB_WIDEN);
4228 /* We give UNSIGNEDP = 0 to expand_binop
4229 because the only operations we are expanding here are signed ones. */
4230 }
4231 return value;
4232 }
4233 \f
4234 /* Subroutine of expand_expr:
4235 save the non-copied parts (LIST) of an expr (LHS), and return a list
4236 which can restore these values to their previous values,
4237 should something modify their storage. */
4238
4239 static tree
4240 save_noncopied_parts (lhs, list)
4241 tree lhs;
4242 tree list;
4243 {
4244 tree tail;
4245 tree parts = 0;
4246
4247 for (tail = list; tail; tail = TREE_CHAIN (tail))
4248 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4249 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4250 else
4251 {
4252 tree part = TREE_VALUE (tail);
4253 tree part_type = TREE_TYPE (part);
4254 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4255 rtx target = assign_temp (part_type, 0, 1, 1);
4256 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4257 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4258 parts = tree_cons (to_be_saved,
4259 build (RTL_EXPR, part_type, NULL_TREE,
4260 (tree) target),
4261 parts);
4262 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4263 }
4264 return parts;
4265 }
4266
4267 /* Subroutine of expand_expr:
4268 record the non-copied parts (LIST) of an expr (LHS), and return a list
4269 which specifies the initial values of these parts. */
4270
4271 static tree
4272 init_noncopied_parts (lhs, list)
4273 tree lhs;
4274 tree list;
4275 {
4276 tree tail;
4277 tree parts = 0;
4278
4279 for (tail = list; tail; tail = TREE_CHAIN (tail))
4280 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4281 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4282 else
4283 {
4284 tree part = TREE_VALUE (tail);
4285 tree part_type = TREE_TYPE (part);
4286 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4287 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4288 }
4289 return parts;
4290 }
4291
4292 /* Subroutine of expand_expr: return nonzero iff there is no way that
4293 EXP can reference X, which is being modified. */
4294
4295 static int
4296 safe_from_p (x, exp)
4297 rtx x;
4298 tree exp;
4299 {
4300 rtx exp_rtl = 0;
4301 int i, nops;
4302
4303 if (x == 0
4304 /* If EXP has varying size, we MUST use a target since we currently
4305 have no way of allocating temporaries of variable size. So we
4306 assume here that something at a higher level has prevented a
4307 clash. This is somewhat bogus, but the best we can do. Only
4308 do this when X is BLKmode. */
4309 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4310 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4311 && GET_MODE (x) == BLKmode))
4312 return 1;
4313
4314 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4315 find the underlying pseudo. */
4316 if (GET_CODE (x) == SUBREG)
4317 {
4318 x = SUBREG_REG (x);
4319 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4320 return 0;
4321 }
4322
4323 /* If X is a location in the outgoing argument area, it is always safe. */
4324 if (GET_CODE (x) == MEM
4325 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4326 || (GET_CODE (XEXP (x, 0)) == PLUS
4327 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4328 return 1;
4329
4330 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4331 {
4332 case 'd':
4333 exp_rtl = DECL_RTL (exp);
4334 break;
4335
4336 case 'c':
4337 return 1;
4338
4339 case 'x':
4340 if (TREE_CODE (exp) == TREE_LIST)
4341 return ((TREE_VALUE (exp) == 0
4342 || safe_from_p (x, TREE_VALUE (exp)))
4343 && (TREE_CHAIN (exp) == 0
4344 || safe_from_p (x, TREE_CHAIN (exp))));
4345 else
4346 return 0;
4347
4348 case '1':
4349 return safe_from_p (x, TREE_OPERAND (exp, 0));
4350
4351 case '2':
4352 case '<':
4353 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4354 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4355
4356 case 'e':
4357 case 'r':
4358 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4359 the expression. If it is set, we conflict iff we are that rtx or
4360 both are in memory. Otherwise, we check all operands of the
4361 expression recursively. */
4362
4363 switch (TREE_CODE (exp))
4364 {
4365 case ADDR_EXPR:
4366 return (staticp (TREE_OPERAND (exp, 0))
4367 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4368
4369 case INDIRECT_REF:
4370 if (GET_CODE (x) == MEM)
4371 return 0;
4372 break;
4373
4374 case CALL_EXPR:
4375 exp_rtl = CALL_EXPR_RTL (exp);
4376 if (exp_rtl == 0)
4377 {
4378 /* Assume that the call will clobber all hard registers and
4379 all of memory. */
4380 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4381 || GET_CODE (x) == MEM)
4382 return 0;
4383 }
4384
4385 break;
4386
4387 case RTL_EXPR:
4388 /* If a sequence exists, we would have to scan every instruction
4389 in the sequence to see if it was safe. This is probably not
4390 worthwhile. */
4391 if (RTL_EXPR_SEQUENCE (exp))
4392 return 0;
4393
4394 exp_rtl = RTL_EXPR_RTL (exp);
4395 break;
4396
4397 case WITH_CLEANUP_EXPR:
4398 exp_rtl = RTL_EXPR_RTL (exp);
4399 break;
4400
4401 case CLEANUP_POINT_EXPR:
4402 return safe_from_p (x, TREE_OPERAND (exp, 0));
4403
4404 case SAVE_EXPR:
4405 exp_rtl = SAVE_EXPR_RTL (exp);
4406 break;
4407
4408 case BIND_EXPR:
4409 /* The only operand we look at is operand 1. The rest aren't
4410 part of the expression. */
4411 return safe_from_p (x, TREE_OPERAND (exp, 1));
4412
4413 case METHOD_CALL_EXPR:
4414 /* This takes a rtx argument, but shouldn't appear here. */
4415 abort ();
4416 }
4417
4418 /* If we have an rtx, we do not need to scan our operands. */
4419 if (exp_rtl)
4420 break;
4421
4422 nops = tree_code_length[(int) TREE_CODE (exp)];
4423 for (i = 0; i < nops; i++)
4424 if (TREE_OPERAND (exp, i) != 0
4425 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4426 return 0;
4427 }
4428
4429 /* If we have an rtl, find any enclosed object. Then see if we conflict
4430 with it. */
4431 if (exp_rtl)
4432 {
4433 if (GET_CODE (exp_rtl) == SUBREG)
4434 {
4435 exp_rtl = SUBREG_REG (exp_rtl);
4436 if (GET_CODE (exp_rtl) == REG
4437 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4438 return 0;
4439 }
4440
4441 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4442 are memory and EXP is not readonly. */
4443 return ! (rtx_equal_p (x, exp_rtl)
4444 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4445 && ! TREE_READONLY (exp)));
4446 }
4447
4448 /* If we reach here, it is safe. */
4449 return 1;
4450 }
4451
4452 /* Subroutine of expand_expr: return nonzero iff EXP is an
4453 expression whose type is statically determinable. */
4454
4455 static int
4456 fixed_type_p (exp)
4457 tree exp;
4458 {
4459 if (TREE_CODE (exp) == PARM_DECL
4460 || TREE_CODE (exp) == VAR_DECL
4461 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4462 || TREE_CODE (exp) == COMPONENT_REF
4463 || TREE_CODE (exp) == ARRAY_REF)
4464 return 1;
4465 return 0;
4466 }
4467 \f
4468 /* expand_expr: generate code for computing expression EXP.
4469 An rtx for the computed value is returned. The value is never null.
4470 In the case of a void EXP, const0_rtx is returned.
4471
4472 The value may be stored in TARGET if TARGET is nonzero.
4473 TARGET is just a suggestion; callers must assume that
4474 the rtx returned may not be the same as TARGET.
4475
4476 If TARGET is CONST0_RTX, it means that the value will be ignored.
4477
4478 If TMODE is not VOIDmode, it suggests generating the
4479 result in mode TMODE. But this is done only when convenient.
4480 Otherwise, TMODE is ignored and the value generated in its natural mode.
4481 TMODE is just a suggestion; callers must assume that
4482 the rtx returned may not have mode TMODE.
4483
4484 Note that TARGET may have neither TMODE nor MODE. In that case, it
4485 probably will not be used.
4486
4487 If MODIFIER is EXPAND_SUM then when EXP is an addition
4488 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4489 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4490 products as above, or REG or MEM, or constant.
4491 Ordinarily in such cases we would output mul or add instructions
4492 and then return a pseudo reg containing the sum.
4493
4494 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4495 it also marks a label as absolutely required (it can't be dead).
4496 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4497 This is used for outputting expressions used in initializers.
4498
4499 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4500 with a constant address even if that address is not normally legitimate.
4501 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4502
4503 rtx
4504 expand_expr (exp, target, tmode, modifier)
4505 register tree exp;
4506 rtx target;
4507 enum machine_mode tmode;
4508 enum expand_modifier modifier;
4509 {
4510 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4511 This is static so it will be accessible to our recursive callees. */
4512 static tree placeholder_list = 0;
4513 register rtx op0, op1, temp;
4514 tree type = TREE_TYPE (exp);
4515 int unsignedp = TREE_UNSIGNED (type);
4516 register enum machine_mode mode = TYPE_MODE (type);
4517 register enum tree_code code = TREE_CODE (exp);
4518 optab this_optab;
4519 /* Use subtarget as the target for operand 0 of a binary operation. */
4520 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4521 rtx original_target = target;
4522 /* Maybe defer this until sure not doing bytecode? */
4523 int ignore = (target == const0_rtx
4524 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4525 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4526 || code == COND_EXPR)
4527 && TREE_CODE (type) == VOID_TYPE));
4528 tree context;
4529
4530
4531 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4532 {
4533 bc_expand_expr (exp);
4534 return NULL;
4535 }
4536
4537 /* Don't use hard regs as subtargets, because the combiner
4538 can only handle pseudo regs. */
4539 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4540 subtarget = 0;
4541 /* Avoid subtargets inside loops,
4542 since they hide some invariant expressions. */
4543 if (preserve_subexpressions_p ())
4544 subtarget = 0;
4545
4546 /* If we are going to ignore this result, we need only do something
4547 if there is a side-effect somewhere in the expression. If there
4548 is, short-circuit the most common cases here. Note that we must
4549 not call expand_expr with anything but const0_rtx in case this
4550 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4551
4552 if (ignore)
4553 {
4554 if (! TREE_SIDE_EFFECTS (exp))
4555 return const0_rtx;
4556
4557 /* Ensure we reference a volatile object even if value is ignored. */
4558 if (TREE_THIS_VOLATILE (exp)
4559 && TREE_CODE (exp) != FUNCTION_DECL
4560 && mode != VOIDmode && mode != BLKmode)
4561 {
4562 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4563 if (GET_CODE (temp) == MEM)
4564 temp = copy_to_reg (temp);
4565 return const0_rtx;
4566 }
4567
4568 if (TREE_CODE_CLASS (code) == '1')
4569 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4570 VOIDmode, modifier);
4571 else if (TREE_CODE_CLASS (code) == '2'
4572 || TREE_CODE_CLASS (code) == '<')
4573 {
4574 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4575 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4576 return const0_rtx;
4577 }
4578 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4579 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4580 /* If the second operand has no side effects, just evaluate
4581 the first. */
4582 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4583 VOIDmode, modifier);
4584
4585 target = 0;
4586 }
4587
4588 /* If will do cse, generate all results into pseudo registers
4589 since 1) that allows cse to find more things
4590 and 2) otherwise cse could produce an insn the machine
4591 cannot support. */
4592
4593 if (! cse_not_expected && mode != BLKmode && target
4594 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4595 target = subtarget;
4596
4597 switch (code)
4598 {
4599 case LABEL_DECL:
4600 {
4601 tree function = decl_function_context (exp);
4602 /* Handle using a label in a containing function. */
4603 if (function != current_function_decl && function != 0)
4604 {
4605 struct function *p = find_function_data (function);
4606 /* Allocate in the memory associated with the function
4607 that the label is in. */
4608 push_obstacks (p->function_obstack,
4609 p->function_maybepermanent_obstack);
4610
4611 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4612 label_rtx (exp), p->forced_labels);
4613 pop_obstacks ();
4614 }
4615 else if (modifier == EXPAND_INITIALIZER)
4616 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4617 label_rtx (exp), forced_labels);
4618 temp = gen_rtx (MEM, FUNCTION_MODE,
4619 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4620 if (function != current_function_decl && function != 0)
4621 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4622 return temp;
4623 }
4624
4625 case PARM_DECL:
4626 if (DECL_RTL (exp) == 0)
4627 {
4628 error_with_decl (exp, "prior parameter's size depends on `%s'");
4629 return CONST0_RTX (mode);
4630 }
4631
4632 /* ... fall through ... */
4633
4634 case VAR_DECL:
4635 /* If a static var's type was incomplete when the decl was written,
4636 but the type is complete now, lay out the decl now. */
4637 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4638 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4639 {
4640 push_obstacks_nochange ();
4641 end_temporary_allocation ();
4642 layout_decl (exp, 0);
4643 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4644 pop_obstacks ();
4645 }
4646
4647 /* ... fall through ... */
4648
4649 case FUNCTION_DECL:
4650 case RESULT_DECL:
4651 if (DECL_RTL (exp) == 0)
4652 abort ();
4653
4654 /* Ensure variable marked as used even if it doesn't go through
4655 a parser. If it hasn't be used yet, write out an external
4656 definition. */
4657 if (! TREE_USED (exp))
4658 {
4659 assemble_external (exp);
4660 TREE_USED (exp) = 1;
4661 }
4662
4663 /* Show we haven't gotten RTL for this yet. */
4664 temp = 0;
4665
4666 /* Handle variables inherited from containing functions. */
4667 context = decl_function_context (exp);
4668
4669 /* We treat inline_function_decl as an alias for the current function
4670 because that is the inline function whose vars, types, etc.
4671 are being merged into the current function.
4672 See expand_inline_function. */
4673
4674 if (context != 0 && context != current_function_decl
4675 && context != inline_function_decl
4676 /* If var is static, we don't need a static chain to access it. */
4677 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4678 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4679 {
4680 rtx addr;
4681
4682 /* Mark as non-local and addressable. */
4683 DECL_NONLOCAL (exp) = 1;
4684 mark_addressable (exp);
4685 if (GET_CODE (DECL_RTL (exp)) != MEM)
4686 abort ();
4687 addr = XEXP (DECL_RTL (exp), 0);
4688 if (GET_CODE (addr) == MEM)
4689 addr = gen_rtx (MEM, Pmode,
4690 fix_lexical_addr (XEXP (addr, 0), exp));
4691 else
4692 addr = fix_lexical_addr (addr, exp);
4693 temp = change_address (DECL_RTL (exp), mode, addr);
4694 }
4695
4696 /* This is the case of an array whose size is to be determined
4697 from its initializer, while the initializer is still being parsed.
4698 See expand_decl. */
4699
4700 else if (GET_CODE (DECL_RTL (exp)) == MEM
4701 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4702 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4703 XEXP (DECL_RTL (exp), 0));
4704
4705 /* If DECL_RTL is memory, we are in the normal case and either
4706 the address is not valid or it is not a register and -fforce-addr
4707 is specified, get the address into a register. */
4708
4709 else if (GET_CODE (DECL_RTL (exp)) == MEM
4710 && modifier != EXPAND_CONST_ADDRESS
4711 && modifier != EXPAND_SUM
4712 && modifier != EXPAND_INITIALIZER
4713 && (! memory_address_p (DECL_MODE (exp),
4714 XEXP (DECL_RTL (exp), 0))
4715 || (flag_force_addr
4716 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4717 temp = change_address (DECL_RTL (exp), VOIDmode,
4718 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4719
4720 /* If we got something, return it. But first, set the alignment
4721 the address is a register. */
4722 if (temp != 0)
4723 {
4724 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4725 mark_reg_pointer (XEXP (temp, 0),
4726 DECL_ALIGN (exp) / BITS_PER_UNIT);
4727
4728 return temp;
4729 }
4730
4731 /* If the mode of DECL_RTL does not match that of the decl, it
4732 must be a promoted value. We return a SUBREG of the wanted mode,
4733 but mark it so that we know that it was already extended. */
4734
4735 if (GET_CODE (DECL_RTL (exp)) == REG
4736 && GET_MODE (DECL_RTL (exp)) != mode)
4737 {
4738 /* Get the signedness used for this variable. Ensure we get the
4739 same mode we got when the variable was declared. */
4740 if (GET_MODE (DECL_RTL (exp))
4741 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4742 abort ();
4743
4744 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4745 SUBREG_PROMOTED_VAR_P (temp) = 1;
4746 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4747 return temp;
4748 }
4749
4750 return DECL_RTL (exp);
4751
4752 case INTEGER_CST:
4753 return immed_double_const (TREE_INT_CST_LOW (exp),
4754 TREE_INT_CST_HIGH (exp),
4755 mode);
4756
4757 case CONST_DECL:
4758 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4759
4760 case REAL_CST:
4761 /* If optimized, generate immediate CONST_DOUBLE
4762 which will be turned into memory by reload if necessary.
4763
4764 We used to force a register so that loop.c could see it. But
4765 this does not allow gen_* patterns to perform optimizations with
4766 the constants. It also produces two insns in cases like "x = 1.0;".
4767 On most machines, floating-point constants are not permitted in
4768 many insns, so we'd end up copying it to a register in any case.
4769
4770 Now, we do the copying in expand_binop, if appropriate. */
4771 return immed_real_const (exp);
4772
4773 case COMPLEX_CST:
4774 case STRING_CST:
4775 if (! TREE_CST_RTL (exp))
4776 output_constant_def (exp);
4777
4778 /* TREE_CST_RTL probably contains a constant address.
4779 On RISC machines where a constant address isn't valid,
4780 make some insns to get that address into a register. */
4781 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4782 && modifier != EXPAND_CONST_ADDRESS
4783 && modifier != EXPAND_INITIALIZER
4784 && modifier != EXPAND_SUM
4785 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4786 || (flag_force_addr
4787 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4788 return change_address (TREE_CST_RTL (exp), VOIDmode,
4789 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4790 return TREE_CST_RTL (exp);
4791
4792 case SAVE_EXPR:
4793 context = decl_function_context (exp);
4794
4795 /* We treat inline_function_decl as an alias for the current function
4796 because that is the inline function whose vars, types, etc.
4797 are being merged into the current function.
4798 See expand_inline_function. */
4799 if (context == current_function_decl || context == inline_function_decl)
4800 context = 0;
4801
4802 /* If this is non-local, handle it. */
4803 if (context)
4804 {
4805 temp = SAVE_EXPR_RTL (exp);
4806 if (temp && GET_CODE (temp) == REG)
4807 {
4808 put_var_into_stack (exp);
4809 temp = SAVE_EXPR_RTL (exp);
4810 }
4811 if (temp == 0 || GET_CODE (temp) != MEM)
4812 abort ();
4813 return change_address (temp, mode,
4814 fix_lexical_addr (XEXP (temp, 0), exp));
4815 }
4816 if (SAVE_EXPR_RTL (exp) == 0)
4817 {
4818 if (mode == VOIDmode)
4819 temp = const0_rtx;
4820 else
4821 temp = assign_temp (type, 0, 0, 0);
4822
4823 SAVE_EXPR_RTL (exp) = temp;
4824 if (!optimize && GET_CODE (temp) == REG)
4825 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4826 save_expr_regs);
4827
4828 /* If the mode of TEMP does not match that of the expression, it
4829 must be a promoted value. We pass store_expr a SUBREG of the
4830 wanted mode but mark it so that we know that it was already
4831 extended. Note that `unsignedp' was modified above in
4832 this case. */
4833
4834 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4835 {
4836 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4837 SUBREG_PROMOTED_VAR_P (temp) = 1;
4838 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4839 }
4840
4841 if (temp == const0_rtx)
4842 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4843 else
4844 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4845 }
4846
4847 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4848 must be a promoted value. We return a SUBREG of the wanted mode,
4849 but mark it so that we know that it was already extended. */
4850
4851 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4852 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4853 {
4854 /* Compute the signedness and make the proper SUBREG. */
4855 promote_mode (type, mode, &unsignedp, 0);
4856 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4857 SUBREG_PROMOTED_VAR_P (temp) = 1;
4858 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4859 return temp;
4860 }
4861
4862 return SAVE_EXPR_RTL (exp);
4863
4864 case PLACEHOLDER_EXPR:
4865 /* If there is an object on the head of the placeholder list,
4866 see if some object in it's references is of type TYPE. For
4867 further information, see tree.def. */
4868 if (placeholder_list)
4869 {
4870 tree object;
4871 tree old_list = placeholder_list;
4872
4873 for (object = TREE_PURPOSE (placeholder_list);
4874 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4875 != TYPE_MAIN_VARIANT (type))
4876 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4877 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4878 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4879 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4880 object = TREE_OPERAND (object, 0))
4881 ;
4882
4883 if (object != 0
4884 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4885 == TYPE_MAIN_VARIANT (type)))
4886 {
4887 /* Expand this object skipping the list entries before
4888 it was found in case it is also a PLACEHOLDER_EXPR.
4889 In that case, we want to translate it using subsequent
4890 entries. */
4891 placeholder_list = TREE_CHAIN (placeholder_list);
4892 temp = expand_expr (object, original_target, tmode, modifier);
4893 placeholder_list = old_list;
4894 return temp;
4895 }
4896 }
4897
4898 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4899 abort ();
4900
4901 case WITH_RECORD_EXPR:
4902 /* Put the object on the placeholder list, expand our first operand,
4903 and pop the list. */
4904 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4905 placeholder_list);
4906 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4907 tmode, modifier);
4908 placeholder_list = TREE_CHAIN (placeholder_list);
4909 return target;
4910
4911 case EXIT_EXPR:
4912 expand_exit_loop_if_false (NULL_PTR,
4913 invert_truthvalue (TREE_OPERAND (exp, 0)));
4914 return const0_rtx;
4915
4916 case LOOP_EXPR:
4917 push_temp_slots ();
4918 expand_start_loop (1);
4919 expand_expr_stmt (TREE_OPERAND (exp, 0));
4920 expand_end_loop ();
4921 pop_temp_slots ();
4922
4923 return const0_rtx;
4924
4925 case BIND_EXPR:
4926 {
4927 tree vars = TREE_OPERAND (exp, 0);
4928 int vars_need_expansion = 0;
4929
4930 /* Need to open a binding contour here because
4931 if there are any cleanups they most be contained here. */
4932 expand_start_bindings (0);
4933
4934 /* Mark the corresponding BLOCK for output in its proper place. */
4935 if (TREE_OPERAND (exp, 2) != 0
4936 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4937 insert_block (TREE_OPERAND (exp, 2));
4938
4939 /* If VARS have not yet been expanded, expand them now. */
4940 while (vars)
4941 {
4942 if (DECL_RTL (vars) == 0)
4943 {
4944 vars_need_expansion = 1;
4945 expand_decl (vars);
4946 }
4947 expand_decl_init (vars);
4948 vars = TREE_CHAIN (vars);
4949 }
4950
4951 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4952
4953 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4954
4955 return temp;
4956 }
4957
4958 case RTL_EXPR:
4959 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4960 abort ();
4961 emit_insns (RTL_EXPR_SEQUENCE (exp));
4962 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4963 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4964 free_temps_for_rtl_expr (exp);
4965 return RTL_EXPR_RTL (exp);
4966
4967 case CONSTRUCTOR:
4968 /* If we don't need the result, just ensure we evaluate any
4969 subexpressions. */
4970 if (ignore)
4971 {
4972 tree elt;
4973 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4974 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4975 return const0_rtx;
4976 }
4977
4978 /* All elts simple constants => refer to a constant in memory. But
4979 if this is a non-BLKmode mode, let it store a field at a time
4980 since that should make a CONST_INT or CONST_DOUBLE when we
4981 fold. Likewise, if we have a target we can use, it is best to
4982 store directly into the target unless the type is large enough
4983 that memcpy will be used. If we are making an initializer and
4984 all operands are constant, put it in memory as well. */
4985 else if ((TREE_STATIC (exp)
4986 && ((mode == BLKmode
4987 && ! (target != 0 && safe_from_p (target, exp)))
4988 || TREE_ADDRESSABLE (exp)
4989 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4990 && (move_by_pieces_ninsns
4991 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4992 TYPE_ALIGN (type) / BITS_PER_UNIT)
4993 > MOVE_RATIO)
4994 && ! mostly_zeros_p (exp))))
4995 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4996 {
4997 rtx constructor = output_constant_def (exp);
4998 if (modifier != EXPAND_CONST_ADDRESS
4999 && modifier != EXPAND_INITIALIZER
5000 && modifier != EXPAND_SUM
5001 && (! memory_address_p (GET_MODE (constructor),
5002 XEXP (constructor, 0))
5003 || (flag_force_addr
5004 && GET_CODE (XEXP (constructor, 0)) != REG)))
5005 constructor = change_address (constructor, VOIDmode,
5006 XEXP (constructor, 0));
5007 return constructor;
5008 }
5009
5010 else
5011 {
5012 if (target == 0 || ! safe_from_p (target, exp))
5013 {
5014 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5015 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5016 else
5017 target = assign_temp (type, 0, 1, 1);
5018 }
5019
5020 if (TREE_READONLY (exp))
5021 {
5022 if (GET_CODE (target) == MEM)
5023 target = change_address (target, GET_MODE (target),
5024 XEXP (target, 0));
5025 RTX_UNCHANGING_P (target) = 1;
5026 }
5027
5028 store_constructor (exp, target, 0);
5029 return target;
5030 }
5031
5032 case INDIRECT_REF:
5033 {
5034 tree exp1 = TREE_OPERAND (exp, 0);
5035 tree exp2;
5036
5037 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
5038 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
5039 This code has the same general effect as simply doing
5040 expand_expr on the save expr, except that the expression PTR
5041 is computed for use as a memory address. This means different
5042 code, suitable for indexing, may be generated. */
5043 if (TREE_CODE (exp1) == SAVE_EXPR
5044 && SAVE_EXPR_RTL (exp1) == 0
5045 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
5046 {
5047 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
5048 VOIDmode, EXPAND_SUM);
5049 op0 = memory_address (mode, temp);
5050 op0 = copy_all_regs (op0);
5051 SAVE_EXPR_RTL (exp1) = op0;
5052 }
5053 else
5054 {
5055 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5056 op0 = memory_address (mode, op0);
5057 }
5058
5059 temp = gen_rtx (MEM, mode, op0);
5060 /* If address was computed by addition,
5061 mark this as an element of an aggregate. */
5062 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5063 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5064 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5065 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5066 || (TREE_CODE (exp1) == ADDR_EXPR
5067 && (exp2 = TREE_OPERAND (exp1, 0))
5068 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5069 MEM_IN_STRUCT_P (temp) = 1;
5070 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5071
5072 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5073 here, because, in C and C++, the fact that a location is accessed
5074 through a pointer to const does not mean that the value there can
5075 never change. Languages where it can never change should
5076 also set TREE_STATIC. */
5077 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5078 return temp;
5079 }
5080
5081 case ARRAY_REF:
5082 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5083 abort ();
5084
5085 {
5086 tree array = TREE_OPERAND (exp, 0);
5087 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5088 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5089 tree index = TREE_OPERAND (exp, 1);
5090 tree index_type = TREE_TYPE (index);
5091 int i;
5092
5093 if (TREE_CODE (low_bound) != INTEGER_CST
5094 && contains_placeholder_p (low_bound))
5095 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5096
5097 /* Optimize the special-case of a zero lower bound.
5098
5099 We convert the low_bound to sizetype to avoid some problems
5100 with constant folding. (E.g. suppose the lower bound is 1,
5101 and its mode is QI. Without the conversion, (ARRAY
5102 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5103 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5104
5105 But sizetype isn't quite right either (especially if
5106 the lowbound is negative). FIXME */
5107
5108 if (! integer_zerop (low_bound))
5109 index = fold (build (MINUS_EXPR, index_type, index,
5110 convert (sizetype, low_bound)));
5111
5112 if ((TREE_CODE (index) != INTEGER_CST
5113 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5114 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5115 {
5116 /* Nonconstant array index or nonconstant element size, and
5117 not an array in an unaligned (packed) structure field.
5118 Generate the tree for *(&array+index) and expand that,
5119 except do it in a language-independent way
5120 and don't complain about non-lvalue arrays.
5121 `mark_addressable' should already have been called
5122 for any array for which this case will be reached. */
5123
5124 /* Don't forget the const or volatile flag from the array
5125 element. */
5126 tree variant_type = build_type_variant (type,
5127 TREE_READONLY (exp),
5128 TREE_THIS_VOLATILE (exp));
5129 tree array_adr = build1 (ADDR_EXPR,
5130 build_pointer_type (variant_type), array);
5131 tree elt;
5132 tree size = size_in_bytes (type);
5133
5134 /* Convert the integer argument to a type the same size as sizetype
5135 so the multiply won't overflow spuriously. */
5136 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5137 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5138 index);
5139
5140 if (TREE_CODE (size) != INTEGER_CST
5141 && contains_placeholder_p (size))
5142 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5143
5144 /* Don't think the address has side effects
5145 just because the array does.
5146 (In some cases the address might have side effects,
5147 and we fail to record that fact here. However, it should not
5148 matter, since expand_expr should not care.) */
5149 TREE_SIDE_EFFECTS (array_adr) = 0;
5150
5151 elt
5152 = build1
5153 (INDIRECT_REF, type,
5154 fold (build (PLUS_EXPR,
5155 TYPE_POINTER_TO (variant_type),
5156 array_adr,
5157 fold
5158 (build1
5159 (NOP_EXPR,
5160 TYPE_POINTER_TO (variant_type),
5161 fold (build (MULT_EXPR, TREE_TYPE (index),
5162 index,
5163 convert (TREE_TYPE (index),
5164 size))))))));;
5165
5166 /* Volatility, etc., of new expression is same as old
5167 expression. */
5168 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5169 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5170 TREE_READONLY (elt) = TREE_READONLY (exp);
5171
5172 return expand_expr (elt, target, tmode, modifier);
5173 }
5174
5175 /* Fold an expression like: "foo"[2].
5176 This is not done in fold so it won't happen inside &.
5177 Don't fold if this is for wide characters since it's too
5178 difficult to do correctly and this is a very rare case. */
5179
5180 if (TREE_CODE (array) == STRING_CST
5181 && TREE_CODE (index) == INTEGER_CST
5182 && !TREE_INT_CST_HIGH (index)
5183 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5184 && GET_MODE_CLASS (mode) == MODE_INT
5185 && GET_MODE_SIZE (mode) == 1)
5186 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5187
5188 /* If this is a constant index into a constant array,
5189 just get the value from the array. Handle both the cases when
5190 we have an explicit constructor and when our operand is a variable
5191 that was declared const. */
5192
5193 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5194 {
5195 if (TREE_CODE (index) == INTEGER_CST
5196 && TREE_INT_CST_HIGH (index) == 0)
5197 {
5198 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5199
5200 i = TREE_INT_CST_LOW (index);
5201 while (elem && i--)
5202 elem = TREE_CHAIN (elem);
5203 if (elem)
5204 return expand_expr (fold (TREE_VALUE (elem)), target,
5205 tmode, modifier);
5206 }
5207 }
5208
5209 else if (optimize >= 1
5210 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5211 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5212 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5213 {
5214 if (TREE_CODE (index) == INTEGER_CST
5215 && TREE_INT_CST_HIGH (index) == 0)
5216 {
5217 tree init = DECL_INITIAL (array);
5218
5219 i = TREE_INT_CST_LOW (index);
5220 if (TREE_CODE (init) == CONSTRUCTOR)
5221 {
5222 tree elem = CONSTRUCTOR_ELTS (init);
5223
5224 while (elem
5225 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5226 elem = TREE_CHAIN (elem);
5227 if (elem)
5228 return expand_expr (fold (TREE_VALUE (elem)), target,
5229 tmode, modifier);
5230 }
5231 else if (TREE_CODE (init) == STRING_CST
5232 && i < TREE_STRING_LENGTH (init))
5233 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5234 }
5235 }
5236 }
5237
5238 /* Treat array-ref with constant index as a component-ref. */
5239
5240 case COMPONENT_REF:
5241 case BIT_FIELD_REF:
5242 /* If the operand is a CONSTRUCTOR, we can just extract the
5243 appropriate field if it is present. Don't do this if we have
5244 already written the data since we want to refer to that copy
5245 and varasm.c assumes that's what we'll do. */
5246 if (code != ARRAY_REF
5247 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5248 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5249 {
5250 tree elt;
5251
5252 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5253 elt = TREE_CHAIN (elt))
5254 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5255 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5256 }
5257
5258 {
5259 enum machine_mode mode1;
5260 int bitsize;
5261 int bitpos;
5262 tree offset;
5263 int volatilep = 0;
5264 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5265 &mode1, &unsignedp, &volatilep);
5266 int alignment;
5267
5268 /* If we got back the original object, something is wrong. Perhaps
5269 we are evaluating an expression too early. In any event, don't
5270 infinitely recurse. */
5271 if (tem == exp)
5272 abort ();
5273
5274 /* If TEM's type is a union of variable size, pass TARGET to the inner
5275 computation, since it will need a temporary and TARGET is known
5276 to have to do. This occurs in unchecked conversion in Ada. */
5277
5278 op0 = expand_expr (tem,
5279 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5280 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5281 != INTEGER_CST)
5282 ? target : NULL_RTX),
5283 VOIDmode,
5284 modifier == EXPAND_INITIALIZER ? modifier : 0);
5285
5286 /* If this is a constant, put it into a register if it is a
5287 legitimate constant and memory if it isn't. */
5288 if (CONSTANT_P (op0))
5289 {
5290 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5291 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5292 op0 = force_reg (mode, op0);
5293 else
5294 op0 = validize_mem (force_const_mem (mode, op0));
5295 }
5296
5297 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5298 if (offset != 0)
5299 {
5300 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5301
5302 if (GET_CODE (op0) != MEM)
5303 abort ();
5304 op0 = change_address (op0, VOIDmode,
5305 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5306 force_reg (ptr_mode, offset_rtx)));
5307 /* If we have a variable offset, the known alignment
5308 is only that of the innermost structure containing the field.
5309 (Actually, we could sometimes do better by using the
5310 size of an element of the innermost array, but no need.) */
5311 if (TREE_CODE (exp) == COMPONENT_REF
5312 || TREE_CODE (exp) == BIT_FIELD_REF)
5313 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5314 / BITS_PER_UNIT);
5315 }
5316
5317 /* Don't forget about volatility even if this is a bitfield. */
5318 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5319 {
5320 op0 = copy_rtx (op0);
5321 MEM_VOLATILE_P (op0) = 1;
5322 }
5323
5324 /* In cases where an aligned union has an unaligned object
5325 as a field, we might be extracting a BLKmode value from
5326 an integer-mode (e.g., SImode) object. Handle this case
5327 by doing the extract into an object as wide as the field
5328 (which we know to be the width of a basic mode), then
5329 storing into memory, and changing the mode to BLKmode. */
5330 if (mode1 == VOIDmode
5331 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5332 || (modifier != EXPAND_CONST_ADDRESS
5333 && modifier != EXPAND_SUM
5334 && modifier != EXPAND_INITIALIZER
5335 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5336 /* If the field isn't aligned enough to fetch as a memref,
5337 fetch it as a bit field. */
5338 || (SLOW_UNALIGNED_ACCESS
5339 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5340 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5341 {
5342 enum machine_mode ext_mode = mode;
5343
5344 if (ext_mode == BLKmode)
5345 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5346
5347 if (ext_mode == BLKmode)
5348 abort ();
5349
5350 op0 = validize_mem (op0);
5351
5352 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5353 mark_reg_pointer (XEXP (op0, 0), alignment);
5354
5355 op0 = extract_bit_field (op0, bitsize, bitpos,
5356 unsignedp, target, ext_mode, ext_mode,
5357 alignment,
5358 int_size_in_bytes (TREE_TYPE (tem)));
5359 if (mode == BLKmode)
5360 {
5361 rtx new = assign_stack_temp (ext_mode,
5362 bitsize / BITS_PER_UNIT, 0);
5363
5364 emit_move_insn (new, op0);
5365 op0 = copy_rtx (new);
5366 PUT_MODE (op0, BLKmode);
5367 MEM_IN_STRUCT_P (op0) = 1;
5368 }
5369
5370 return op0;
5371 }
5372
5373 /* If the result is BLKmode, use that to access the object
5374 now as well. */
5375 if (mode == BLKmode)
5376 mode1 = BLKmode;
5377
5378 /* Get a reference to just this component. */
5379 if (modifier == EXPAND_CONST_ADDRESS
5380 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5381 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5382 (bitpos / BITS_PER_UNIT)));
5383 else
5384 op0 = change_address (op0, mode1,
5385 plus_constant (XEXP (op0, 0),
5386 (bitpos / BITS_PER_UNIT)));
5387 if (GET_CODE (XEXP (op0, 0)) == REG)
5388 mark_reg_pointer (XEXP (op0, 0), alignment);
5389
5390 MEM_IN_STRUCT_P (op0) = 1;
5391 MEM_VOLATILE_P (op0) |= volatilep;
5392 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5393 return op0;
5394 if (target == 0)
5395 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5396 convert_move (target, op0, unsignedp);
5397 return target;
5398 }
5399
5400 case OFFSET_REF:
5401 {
5402 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
5403 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
5404 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
5405 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
5406 MEM_IN_STRUCT_P (temp) = 1;
5407 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
5408 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
5409 a location is accessed through a pointer to const does not mean
5410 that the value there can never change. */
5411 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
5412 #endif
5413 return temp;
5414 }
5415
5416 /* Intended for a reference to a buffer of a file-object in Pascal.
5417 But it's not certain that a special tree code will really be
5418 necessary for these. INDIRECT_REF might work for them. */
5419 case BUFFER_REF:
5420 abort ();
5421
5422 case IN_EXPR:
5423 {
5424 /* Pascal set IN expression.
5425
5426 Algorithm:
5427 rlo = set_low - (set_low%bits_per_word);
5428 the_word = set [ (index - rlo)/bits_per_word ];
5429 bit_index = index % bits_per_word;
5430 bitmask = 1 << bit_index;
5431 return !!(the_word & bitmask); */
5432
5433 tree set = TREE_OPERAND (exp, 0);
5434 tree index = TREE_OPERAND (exp, 1);
5435 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5436 tree set_type = TREE_TYPE (set);
5437 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5438 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5439 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5440 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5441 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5442 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5443 rtx setaddr = XEXP (setval, 0);
5444 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5445 rtx rlow;
5446 rtx diff, quo, rem, addr, bit, result;
5447
5448 preexpand_calls (exp);
5449
5450 /* If domain is empty, answer is no. Likewise if index is constant
5451 and out of bounds. */
5452 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5453 && TREE_CODE (set_low_bound) == INTEGER_CST
5454 && tree_int_cst_lt (set_high_bound, set_low_bound)
5455 || (TREE_CODE (index) == INTEGER_CST
5456 && TREE_CODE (set_low_bound) == INTEGER_CST
5457 && tree_int_cst_lt (index, set_low_bound))
5458 || (TREE_CODE (set_high_bound) == INTEGER_CST
5459 && TREE_CODE (index) == INTEGER_CST
5460 && tree_int_cst_lt (set_high_bound, index))))
5461 return const0_rtx;
5462
5463 if (target == 0)
5464 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5465
5466 /* If we get here, we have to generate the code for both cases
5467 (in range and out of range). */
5468
5469 op0 = gen_label_rtx ();
5470 op1 = gen_label_rtx ();
5471
5472 if (! (GET_CODE (index_val) == CONST_INT
5473 && GET_CODE (lo_r) == CONST_INT))
5474 {
5475 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5476 GET_MODE (index_val), iunsignedp, 0);
5477 emit_jump_insn (gen_blt (op1));
5478 }
5479
5480 if (! (GET_CODE (index_val) == CONST_INT
5481 && GET_CODE (hi_r) == CONST_INT))
5482 {
5483 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5484 GET_MODE (index_val), iunsignedp, 0);
5485 emit_jump_insn (gen_bgt (op1));
5486 }
5487
5488 /* Calculate the element number of bit zero in the first word
5489 of the set. */
5490 if (GET_CODE (lo_r) == CONST_INT)
5491 rlow = GEN_INT (INTVAL (lo_r)
5492 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5493 else
5494 rlow = expand_binop (index_mode, and_optab, lo_r,
5495 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5496 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5497
5498 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5499 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5500
5501 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5502 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5503 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5504 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5505
5506 addr = memory_address (byte_mode,
5507 expand_binop (index_mode, add_optab, diff,
5508 setaddr, NULL_RTX, iunsignedp,
5509 OPTAB_LIB_WIDEN));
5510
5511 /* Extract the bit we want to examine */
5512 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5513 gen_rtx (MEM, byte_mode, addr),
5514 make_tree (TREE_TYPE (index), rem),
5515 NULL_RTX, 1);
5516 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5517 GET_MODE (target) == byte_mode ? target : 0,
5518 1, OPTAB_LIB_WIDEN);
5519
5520 if (result != target)
5521 convert_move (target, result, 1);
5522
5523 /* Output the code to handle the out-of-range case. */
5524 emit_jump (op0);
5525 emit_label (op1);
5526 emit_move_insn (target, const0_rtx);
5527 emit_label (op0);
5528 return target;
5529 }
5530
5531 case WITH_CLEANUP_EXPR:
5532 if (RTL_EXPR_RTL (exp) == 0)
5533 {
5534 RTL_EXPR_RTL (exp)
5535 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5536 cleanups_this_call
5537 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5538 /* That's it for this cleanup. */
5539 TREE_OPERAND (exp, 2) = 0;
5540 (*interim_eh_hook) (NULL_TREE);
5541 }
5542 return RTL_EXPR_RTL (exp);
5543
5544 case CLEANUP_POINT_EXPR:
5545 {
5546 extern int temp_slot_level;
5547 tree old_cleanups = cleanups_this_call;
5548 int old_temp_level = target_temp_slot_level;
5549 push_temp_slots ();
5550 target_temp_slot_level = temp_slot_level;
5551 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5552 /* If we're going to use this value, load it up now. */
5553 if (! ignore)
5554 op0 = force_not_mem (op0);
5555 expand_cleanups_to (old_cleanups);
5556 preserve_temp_slots (op0);
5557 free_temp_slots ();
5558 pop_temp_slots ();
5559 target_temp_slot_level = old_temp_level;
5560 }
5561 return op0;
5562
5563 case CALL_EXPR:
5564 /* Check for a built-in function. */
5565 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5566 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5567 == FUNCTION_DECL)
5568 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5569 return expand_builtin (exp, target, subtarget, tmode, ignore);
5570
5571 /* If this call was expanded already by preexpand_calls,
5572 just return the result we got. */
5573 if (CALL_EXPR_RTL (exp) != 0)
5574 return CALL_EXPR_RTL (exp);
5575
5576 return expand_call (exp, target, ignore);
5577
5578 case NON_LVALUE_EXPR:
5579 case NOP_EXPR:
5580 case CONVERT_EXPR:
5581 case REFERENCE_EXPR:
5582 if (TREE_CODE (type) == UNION_TYPE)
5583 {
5584 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5585 if (target == 0)
5586 {
5587 if (mode != BLKmode)
5588 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5589 else
5590 target = assign_temp (type, 0, 1, 1);
5591 }
5592
5593 if (GET_CODE (target) == MEM)
5594 /* Store data into beginning of memory target. */
5595 store_expr (TREE_OPERAND (exp, 0),
5596 change_address (target, TYPE_MODE (valtype), 0), 0);
5597
5598 else if (GET_CODE (target) == REG)
5599 /* Store this field into a union of the proper type. */
5600 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5601 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5602 VOIDmode, 0, 1,
5603 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5604 else
5605 abort ();
5606
5607 /* Return the entire union. */
5608 return target;
5609 }
5610
5611 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5612 {
5613 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5614 modifier);
5615
5616 /* If the signedness of the conversion differs and OP0 is
5617 a promoted SUBREG, clear that indication since we now
5618 have to do the proper extension. */
5619 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5620 && GET_CODE (op0) == SUBREG)
5621 SUBREG_PROMOTED_VAR_P (op0) = 0;
5622
5623 return op0;
5624 }
5625
5626 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5627 if (GET_MODE (op0) == mode)
5628 return op0;
5629
5630 /* If OP0 is a constant, just convert it into the proper mode. */
5631 if (CONSTANT_P (op0))
5632 return
5633 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5634 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5635
5636 if (modifier == EXPAND_INITIALIZER)
5637 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5638
5639 if (target == 0)
5640 return
5641 convert_to_mode (mode, op0,
5642 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5643 else
5644 convert_move (target, op0,
5645 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5646 return target;
5647
5648 case PLUS_EXPR:
5649 /* We come here from MINUS_EXPR when the second operand is a constant. */
5650 plus_expr:
5651 this_optab = add_optab;
5652
5653 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5654 something else, make sure we add the register to the constant and
5655 then to the other thing. This case can occur during strength
5656 reduction and doing it this way will produce better code if the
5657 frame pointer or argument pointer is eliminated.
5658
5659 fold-const.c will ensure that the constant is always in the inner
5660 PLUS_EXPR, so the only case we need to do anything about is if
5661 sp, ap, or fp is our second argument, in which case we must swap
5662 the innermost first argument and our second argument. */
5663
5664 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5665 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5666 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5667 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5668 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5669 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5670 {
5671 tree t = TREE_OPERAND (exp, 1);
5672
5673 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5674 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5675 }
5676
5677 /* If the result is to be ptr_mode and we are adding an integer to
5678 something, we might be forming a constant. So try to use
5679 plus_constant. If it produces a sum and we can't accept it,
5680 use force_operand. This allows P = &ARR[const] to generate
5681 efficient code on machines where a SYMBOL_REF is not a valid
5682 address.
5683
5684 If this is an EXPAND_SUM call, always return the sum. */
5685 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5686 || mode == ptr_mode)
5687 {
5688 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5689 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5690 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5691 {
5692 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5693 EXPAND_SUM);
5694 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5695 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5696 op1 = force_operand (op1, target);
5697 return op1;
5698 }
5699
5700 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5701 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5702 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5703 {
5704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5705 EXPAND_SUM);
5706 if (! CONSTANT_P (op0))
5707 {
5708 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5709 VOIDmode, modifier);
5710 /* Don't go to both_summands if modifier
5711 says it's not right to return a PLUS. */
5712 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5713 goto binop2;
5714 goto both_summands;
5715 }
5716 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5717 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5718 op0 = force_operand (op0, target);
5719 return op0;
5720 }
5721 }
5722
5723 /* No sense saving up arithmetic to be done
5724 if it's all in the wrong mode to form part of an address.
5725 And force_operand won't know whether to sign-extend or
5726 zero-extend. */
5727 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5728 || mode != ptr_mode)
5729 goto binop;
5730
5731 preexpand_calls (exp);
5732 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5733 subtarget = 0;
5734
5735 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5736 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5737
5738 both_summands:
5739 /* Make sure any term that's a sum with a constant comes last. */
5740 if (GET_CODE (op0) == PLUS
5741 && CONSTANT_P (XEXP (op0, 1)))
5742 {
5743 temp = op0;
5744 op0 = op1;
5745 op1 = temp;
5746 }
5747 /* If adding to a sum including a constant,
5748 associate it to put the constant outside. */
5749 if (GET_CODE (op1) == PLUS
5750 && CONSTANT_P (XEXP (op1, 1)))
5751 {
5752 rtx constant_term = const0_rtx;
5753
5754 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5755 if (temp != 0)
5756 op0 = temp;
5757 /* Ensure that MULT comes first if there is one. */
5758 else if (GET_CODE (op0) == MULT)
5759 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5760 else
5761 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5762
5763 /* Let's also eliminate constants from op0 if possible. */
5764 op0 = eliminate_constant_term (op0, &constant_term);
5765
5766 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5767 their sum should be a constant. Form it into OP1, since the
5768 result we want will then be OP0 + OP1. */
5769
5770 temp = simplify_binary_operation (PLUS, mode, constant_term,
5771 XEXP (op1, 1));
5772 if (temp != 0)
5773 op1 = temp;
5774 else
5775 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5776 }
5777
5778 /* Put a constant term last and put a multiplication first. */
5779 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5780 temp = op1, op1 = op0, op0 = temp;
5781
5782 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5783 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5784
5785 case MINUS_EXPR:
5786 /* For initializers, we are allowed to return a MINUS of two
5787 symbolic constants. Here we handle all cases when both operands
5788 are constant. */
5789 /* Handle difference of two symbolic constants,
5790 for the sake of an initializer. */
5791 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5792 && really_constant_p (TREE_OPERAND (exp, 0))
5793 && really_constant_p (TREE_OPERAND (exp, 1)))
5794 {
5795 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5796 VOIDmode, modifier);
5797 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5798 VOIDmode, modifier);
5799
5800 /* If the last operand is a CONST_INT, use plus_constant of
5801 the negated constant. Else make the MINUS. */
5802 if (GET_CODE (op1) == CONST_INT)
5803 return plus_constant (op0, - INTVAL (op1));
5804 else
5805 return gen_rtx (MINUS, mode, op0, op1);
5806 }
5807 /* Convert A - const to A + (-const). */
5808 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5809 {
5810 tree negated = fold (build1 (NEGATE_EXPR, type,
5811 TREE_OPERAND (exp, 1)));
5812
5813 /* Deal with the case where we can't negate the constant
5814 in TYPE. */
5815 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5816 {
5817 tree newtype = signed_type (type);
5818 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5819 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5820 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5821
5822 if (! TREE_OVERFLOW (newneg))
5823 return expand_expr (convert (type,
5824 build (PLUS_EXPR, newtype,
5825 newop0, newneg)),
5826 target, tmode, modifier);
5827 }
5828 else
5829 {
5830 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5831 goto plus_expr;
5832 }
5833 }
5834 this_optab = sub_optab;
5835 goto binop;
5836
5837 case MULT_EXPR:
5838 preexpand_calls (exp);
5839 /* If first operand is constant, swap them.
5840 Thus the following special case checks need only
5841 check the second operand. */
5842 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5843 {
5844 register tree t1 = TREE_OPERAND (exp, 0);
5845 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5846 TREE_OPERAND (exp, 1) = t1;
5847 }
5848
5849 /* Attempt to return something suitable for generating an
5850 indexed address, for machines that support that. */
5851
5852 if (modifier == EXPAND_SUM && mode == ptr_mode
5853 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5854 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5855 {
5856 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5857
5858 /* Apply distributive law if OP0 is x+c. */
5859 if (GET_CODE (op0) == PLUS
5860 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5861 return gen_rtx (PLUS, mode,
5862 gen_rtx (MULT, mode, XEXP (op0, 0),
5863 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5864 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5865 * INTVAL (XEXP (op0, 1))));
5866
5867 if (GET_CODE (op0) != REG)
5868 op0 = force_operand (op0, NULL_RTX);
5869 if (GET_CODE (op0) != REG)
5870 op0 = copy_to_mode_reg (mode, op0);
5871
5872 return gen_rtx (MULT, mode, op0,
5873 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5874 }
5875
5876 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5877 subtarget = 0;
5878
5879 /* Check for multiplying things that have been extended
5880 from a narrower type. If this machine supports multiplying
5881 in that narrower type with a result in the desired type,
5882 do it that way, and avoid the explicit type-conversion. */
5883 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5884 && TREE_CODE (type) == INTEGER_TYPE
5885 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5886 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5887 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5888 && int_fits_type_p (TREE_OPERAND (exp, 1),
5889 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5890 /* Don't use a widening multiply if a shift will do. */
5891 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5892 > HOST_BITS_PER_WIDE_INT)
5893 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5894 ||
5895 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5896 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5897 ==
5898 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5899 /* If both operands are extended, they must either both
5900 be zero-extended or both be sign-extended. */
5901 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5902 ==
5903 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5904 {
5905 enum machine_mode innermode
5906 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5907 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5908 ? smul_widen_optab : umul_widen_optab);
5909 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5910 ? umul_widen_optab : smul_widen_optab);
5911 if (mode == GET_MODE_WIDER_MODE (innermode))
5912 {
5913 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5914 {
5915 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5916 NULL_RTX, VOIDmode, 0);
5917 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5918 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5919 VOIDmode, 0);
5920 else
5921 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5922 NULL_RTX, VOIDmode, 0);
5923 goto binop2;
5924 }
5925 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5926 && innermode == word_mode)
5927 {
5928 rtx htem;
5929 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5930 NULL_RTX, VOIDmode, 0);
5931 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5932 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5933 VOIDmode, 0);
5934 else
5935 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5936 NULL_RTX, VOIDmode, 0);
5937 temp = expand_binop (mode, other_optab, op0, op1, target,
5938 unsignedp, OPTAB_LIB_WIDEN);
5939 htem = expand_mult_highpart_adjust (innermode,
5940 gen_highpart (innermode, temp),
5941 op0, op1,
5942 gen_highpart (innermode, temp),
5943 unsignedp);
5944 emit_move_insn (gen_highpart (innermode, temp), htem);
5945 return temp;
5946 }
5947 }
5948 }
5949 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5950 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5951 return expand_mult (mode, op0, op1, target, unsignedp);
5952
5953 case TRUNC_DIV_EXPR:
5954 case FLOOR_DIV_EXPR:
5955 case CEIL_DIV_EXPR:
5956 case ROUND_DIV_EXPR:
5957 case EXACT_DIV_EXPR:
5958 preexpand_calls (exp);
5959 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5960 subtarget = 0;
5961 /* Possible optimization: compute the dividend with EXPAND_SUM
5962 then if the divisor is constant can optimize the case
5963 where some terms of the dividend have coeffs divisible by it. */
5964 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5965 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5966 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5967
5968 case RDIV_EXPR:
5969 this_optab = flodiv_optab;
5970 goto binop;
5971
5972 case TRUNC_MOD_EXPR:
5973 case FLOOR_MOD_EXPR:
5974 case CEIL_MOD_EXPR:
5975 case ROUND_MOD_EXPR:
5976 preexpand_calls (exp);
5977 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5978 subtarget = 0;
5979 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5980 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5981 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5982
5983 case FIX_ROUND_EXPR:
5984 case FIX_FLOOR_EXPR:
5985 case FIX_CEIL_EXPR:
5986 abort (); /* Not used for C. */
5987
5988 case FIX_TRUNC_EXPR:
5989 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5990 if (target == 0)
5991 target = gen_reg_rtx (mode);
5992 expand_fix (target, op0, unsignedp);
5993 return target;
5994
5995 case FLOAT_EXPR:
5996 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5997 if (target == 0)
5998 target = gen_reg_rtx (mode);
5999 /* expand_float can't figure out what to do if FROM has VOIDmode.
6000 So give it the correct mode. With -O, cse will optimize this. */
6001 if (GET_MODE (op0) == VOIDmode)
6002 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6003 op0);
6004 expand_float (target, op0,
6005 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6006 return target;
6007
6008 case NEGATE_EXPR:
6009 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6010 temp = expand_unop (mode, neg_optab, op0, target, 0);
6011 if (temp == 0)
6012 abort ();
6013 return temp;
6014
6015 case ABS_EXPR:
6016 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6017
6018 /* Handle complex values specially. */
6019 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6020 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6021 return expand_complex_abs (mode, op0, target, unsignedp);
6022
6023 /* Unsigned abs is simply the operand. Testing here means we don't
6024 risk generating incorrect code below. */
6025 if (TREE_UNSIGNED (type))
6026 return op0;
6027
6028 return expand_abs (mode, op0, target, unsignedp,
6029 safe_from_p (target, TREE_OPERAND (exp, 0)));
6030
6031 case MAX_EXPR:
6032 case MIN_EXPR:
6033 target = original_target;
6034 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6035 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6036 || GET_MODE (target) != mode
6037 || (GET_CODE (target) == REG
6038 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6039 target = gen_reg_rtx (mode);
6040 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6041 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6042
6043 /* First try to do it with a special MIN or MAX instruction.
6044 If that does not win, use a conditional jump to select the proper
6045 value. */
6046 this_optab = (TREE_UNSIGNED (type)
6047 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6048 : (code == MIN_EXPR ? smin_optab : smax_optab));
6049
6050 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6051 OPTAB_WIDEN);
6052 if (temp != 0)
6053 return temp;
6054
6055 /* At this point, a MEM target is no longer useful; we will get better
6056 code without it. */
6057
6058 if (GET_CODE (target) == MEM)
6059 target = gen_reg_rtx (mode);
6060
6061 if (target != op0)
6062 emit_move_insn (target, op0);
6063
6064 op0 = gen_label_rtx ();
6065
6066 /* If this mode is an integer too wide to compare properly,
6067 compare word by word. Rely on cse to optimize constant cases. */
6068 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6069 {
6070 if (code == MAX_EXPR)
6071 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6072 target, op1, NULL_RTX, op0);
6073 else
6074 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6075 op1, target, NULL_RTX, op0);
6076 emit_move_insn (target, op1);
6077 }
6078 else
6079 {
6080 if (code == MAX_EXPR)
6081 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6082 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6083 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6084 else
6085 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6086 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6087 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6088 if (temp == const0_rtx)
6089 emit_move_insn (target, op1);
6090 else if (temp != const_true_rtx)
6091 {
6092 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6093 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6094 else
6095 abort ();
6096 emit_move_insn (target, op1);
6097 }
6098 }
6099 emit_label (op0);
6100 return target;
6101
6102 case BIT_NOT_EXPR:
6103 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6104 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6105 if (temp == 0)
6106 abort ();
6107 return temp;
6108
6109 case FFS_EXPR:
6110 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6111 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6112 if (temp == 0)
6113 abort ();
6114 return temp;
6115
6116 /* ??? Can optimize bitwise operations with one arg constant.
6117 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6118 and (a bitwise1 b) bitwise2 b (etc)
6119 but that is probably not worth while. */
6120
6121 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6122 boolean values when we want in all cases to compute both of them. In
6123 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6124 as actual zero-or-1 values and then bitwise anding. In cases where
6125 there cannot be any side effects, better code would be made by
6126 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6127 how to recognize those cases. */
6128
6129 case TRUTH_AND_EXPR:
6130 case BIT_AND_EXPR:
6131 this_optab = and_optab;
6132 goto binop;
6133
6134 case TRUTH_OR_EXPR:
6135 case BIT_IOR_EXPR:
6136 this_optab = ior_optab;
6137 goto binop;
6138
6139 case TRUTH_XOR_EXPR:
6140 case BIT_XOR_EXPR:
6141 this_optab = xor_optab;
6142 goto binop;
6143
6144 case LSHIFT_EXPR:
6145 case RSHIFT_EXPR:
6146 case LROTATE_EXPR:
6147 case RROTATE_EXPR:
6148 preexpand_calls (exp);
6149 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6150 subtarget = 0;
6151 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6152 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6153 unsignedp);
6154
6155 /* Could determine the answer when only additive constants differ. Also,
6156 the addition of one can be handled by changing the condition. */
6157 case LT_EXPR:
6158 case LE_EXPR:
6159 case GT_EXPR:
6160 case GE_EXPR:
6161 case EQ_EXPR:
6162 case NE_EXPR:
6163 preexpand_calls (exp);
6164 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6165 if (temp != 0)
6166 return temp;
6167
6168 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6169 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6170 && original_target
6171 && GET_CODE (original_target) == REG
6172 && (GET_MODE (original_target)
6173 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6174 {
6175 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6176 VOIDmode, 0);
6177
6178 if (temp != original_target)
6179 temp = copy_to_reg (temp);
6180
6181 op1 = gen_label_rtx ();
6182 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6183 GET_MODE (temp), unsignedp, 0);
6184 emit_jump_insn (gen_beq (op1));
6185 emit_move_insn (temp, const1_rtx);
6186 emit_label (op1);
6187 return temp;
6188 }
6189
6190 /* If no set-flag instruction, must generate a conditional
6191 store into a temporary variable. Drop through
6192 and handle this like && and ||. */
6193
6194 case TRUTH_ANDIF_EXPR:
6195 case TRUTH_ORIF_EXPR:
6196 if (! ignore
6197 && (target == 0 || ! safe_from_p (target, exp)
6198 /* Make sure we don't have a hard reg (such as function's return
6199 value) live across basic blocks, if not optimizing. */
6200 || (!optimize && GET_CODE (target) == REG
6201 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6202 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6203
6204 if (target)
6205 emit_clr_insn (target);
6206
6207 op1 = gen_label_rtx ();
6208 jumpifnot (exp, op1);
6209
6210 if (target)
6211 emit_0_to_1_insn (target);
6212
6213 emit_label (op1);
6214 return ignore ? const0_rtx : target;
6215
6216 case TRUTH_NOT_EXPR:
6217 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6218 /* The parser is careful to generate TRUTH_NOT_EXPR
6219 only with operands that are always zero or one. */
6220 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6221 target, 1, OPTAB_LIB_WIDEN);
6222 if (temp == 0)
6223 abort ();
6224 return temp;
6225
6226 case COMPOUND_EXPR:
6227 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6228 emit_queue ();
6229 return expand_expr (TREE_OPERAND (exp, 1),
6230 (ignore ? const0_rtx : target),
6231 VOIDmode, 0);
6232
6233 case COND_EXPR:
6234 {
6235 rtx flag = NULL_RTX;
6236 tree left_cleanups = NULL_TREE;
6237 tree right_cleanups = NULL_TREE;
6238
6239 /* Used to save a pointer to the place to put the setting of
6240 the flag that indicates if this side of the conditional was
6241 taken. We backpatch the code, if we find out later that we
6242 have any conditional cleanups that need to be performed. */
6243 rtx dest_right_flag = NULL_RTX;
6244 rtx dest_left_flag = NULL_RTX;
6245
6246 /* Note that COND_EXPRs whose type is a structure or union
6247 are required to be constructed to contain assignments of
6248 a temporary variable, so that we can evaluate them here
6249 for side effect only. If type is void, we must do likewise. */
6250
6251 /* If an arm of the branch requires a cleanup,
6252 only that cleanup is performed. */
6253
6254 tree singleton = 0;
6255 tree binary_op = 0, unary_op = 0;
6256 tree old_cleanups = cleanups_this_call;
6257
6258 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6259 convert it to our mode, if necessary. */
6260 if (integer_onep (TREE_OPERAND (exp, 1))
6261 && integer_zerop (TREE_OPERAND (exp, 2))
6262 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6263 {
6264 if (ignore)
6265 {
6266 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6267 modifier);
6268 return const0_rtx;
6269 }
6270
6271 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6272 if (GET_MODE (op0) == mode)
6273 return op0;
6274
6275 if (target == 0)
6276 target = gen_reg_rtx (mode);
6277 convert_move (target, op0, unsignedp);
6278 return target;
6279 }
6280
6281 /* If we are not to produce a result, we have no target. Otherwise,
6282 if a target was specified use it; it will not be used as an
6283 intermediate target unless it is safe. If no target, use a
6284 temporary. */
6285
6286 if (ignore)
6287 temp = 0;
6288 else if (original_target
6289 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
6290 && GET_MODE (original_target) == mode
6291 && ! (GET_CODE (original_target) == MEM
6292 && MEM_VOLATILE_P (original_target)))
6293 temp = original_target;
6294 else
6295 temp = assign_temp (type, 0, 0, 1);
6296
6297 /* Check for X ? A + B : A. If we have this, we can copy
6298 A to the output and conditionally add B. Similarly for unary
6299 operations. Don't do this if X has side-effects because
6300 those side effects might affect A or B and the "?" operation is
6301 a sequence point in ANSI. (We test for side effects later.) */
6302
6303 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6304 && operand_equal_p (TREE_OPERAND (exp, 2),
6305 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6306 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6307 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6308 && operand_equal_p (TREE_OPERAND (exp, 1),
6309 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6310 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6311 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6312 && operand_equal_p (TREE_OPERAND (exp, 2),
6313 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6314 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6315 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6316 && operand_equal_p (TREE_OPERAND (exp, 1),
6317 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6318 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6319
6320 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6321 operation, do this as A + (X != 0). Similarly for other simple
6322 binary operators. */
6323 if (temp && singleton && binary_op
6324 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6325 && (TREE_CODE (binary_op) == PLUS_EXPR
6326 || TREE_CODE (binary_op) == MINUS_EXPR
6327 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6328 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6329 && integer_onep (TREE_OPERAND (binary_op, 1))
6330 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6331 {
6332 rtx result;
6333 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6334 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6335 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6336 : xor_optab);
6337
6338 /* If we had X ? A : A + 1, do this as A + (X == 0).
6339
6340 We have to invert the truth value here and then put it
6341 back later if do_store_flag fails. We cannot simply copy
6342 TREE_OPERAND (exp, 0) to another variable and modify that
6343 because invert_truthvalue can modify the tree pointed to
6344 by its argument. */
6345 if (singleton == TREE_OPERAND (exp, 1))
6346 TREE_OPERAND (exp, 0)
6347 = invert_truthvalue (TREE_OPERAND (exp, 0));
6348
6349 result = do_store_flag (TREE_OPERAND (exp, 0),
6350 (safe_from_p (temp, singleton)
6351 ? temp : NULL_RTX),
6352 mode, BRANCH_COST <= 1);
6353
6354 if (result)
6355 {
6356 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6357 return expand_binop (mode, boptab, op1, result, temp,
6358 unsignedp, OPTAB_LIB_WIDEN);
6359 }
6360 else if (singleton == TREE_OPERAND (exp, 1))
6361 TREE_OPERAND (exp, 0)
6362 = invert_truthvalue (TREE_OPERAND (exp, 0));
6363 }
6364
6365 do_pending_stack_adjust ();
6366 NO_DEFER_POP;
6367 op0 = gen_label_rtx ();
6368
6369 flag = gen_reg_rtx (word_mode);
6370 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6371 {
6372 if (temp != 0)
6373 {
6374 /* If the target conflicts with the other operand of the
6375 binary op, we can't use it. Also, we can't use the target
6376 if it is a hard register, because evaluating the condition
6377 might clobber it. */
6378 if ((binary_op
6379 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6380 || (GET_CODE (temp) == REG
6381 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6382 temp = gen_reg_rtx (mode);
6383 store_expr (singleton, temp, 0);
6384 }
6385 else
6386 expand_expr (singleton,
6387 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6388 dest_left_flag = get_last_insn ();
6389 if (singleton == TREE_OPERAND (exp, 1))
6390 jumpif (TREE_OPERAND (exp, 0), op0);
6391 else
6392 jumpifnot (TREE_OPERAND (exp, 0), op0);
6393
6394 /* Allows cleanups up to here. */
6395 old_cleanups = cleanups_this_call;
6396 if (binary_op && temp == 0)
6397 /* Just touch the other operand. */
6398 expand_expr (TREE_OPERAND (binary_op, 1),
6399 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6400 else if (binary_op)
6401 store_expr (build (TREE_CODE (binary_op), type,
6402 make_tree (type, temp),
6403 TREE_OPERAND (binary_op, 1)),
6404 temp, 0);
6405 else
6406 store_expr (build1 (TREE_CODE (unary_op), type,
6407 make_tree (type, temp)),
6408 temp, 0);
6409 op1 = op0;
6410 dest_right_flag = get_last_insn ();
6411 }
6412 #if 0
6413 /* This is now done in jump.c and is better done there because it
6414 produces shorter register lifetimes. */
6415
6416 /* Check for both possibilities either constants or variables
6417 in registers (but not the same as the target!). If so, can
6418 save branches by assigning one, branching, and assigning the
6419 other. */
6420 else if (temp && GET_MODE (temp) != BLKmode
6421 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6422 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6423 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6424 && DECL_RTL (TREE_OPERAND (exp, 1))
6425 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6426 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6427 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6428 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6429 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6430 && DECL_RTL (TREE_OPERAND (exp, 2))
6431 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6432 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6433 {
6434 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6435 temp = gen_reg_rtx (mode);
6436 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6437 dest_left_flag = get_last_insn ();
6438 jumpifnot (TREE_OPERAND (exp, 0), op0);
6439
6440 /* Allows cleanups up to here. */
6441 old_cleanups = cleanups_this_call;
6442 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6443 op1 = op0;
6444 dest_right_flag = get_last_insn ();
6445 }
6446 #endif
6447 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6448 comparison operator. If we have one of these cases, set the
6449 output to A, branch on A (cse will merge these two references),
6450 then set the output to FOO. */
6451 else if (temp
6452 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6453 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6454 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6455 TREE_OPERAND (exp, 1), 0)
6456 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6457 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6458 {
6459 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6460 temp = gen_reg_rtx (mode);
6461 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6462 dest_left_flag = get_last_insn ();
6463 jumpif (TREE_OPERAND (exp, 0), op0);
6464
6465 /* Allows cleanups up to here. */
6466 old_cleanups = cleanups_this_call;
6467 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6468 op1 = op0;
6469 dest_right_flag = get_last_insn ();
6470 }
6471 else if (temp
6472 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6473 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6474 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6475 TREE_OPERAND (exp, 2), 0)
6476 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6477 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6478 {
6479 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6480 temp = gen_reg_rtx (mode);
6481 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6482 dest_left_flag = get_last_insn ();
6483 jumpifnot (TREE_OPERAND (exp, 0), op0);
6484
6485 /* Allows cleanups up to here. */
6486 old_cleanups = cleanups_this_call;
6487 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6488 op1 = op0;
6489 dest_right_flag = get_last_insn ();
6490 }
6491 else
6492 {
6493 op1 = gen_label_rtx ();
6494 jumpifnot (TREE_OPERAND (exp, 0), op0);
6495
6496 /* Allows cleanups up to here. */
6497 old_cleanups = cleanups_this_call;
6498 if (temp != 0)
6499 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6500 else
6501 expand_expr (TREE_OPERAND (exp, 1),
6502 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6503 dest_left_flag = get_last_insn ();
6504
6505 /* Handle conditional cleanups, if any. */
6506 left_cleanups = defer_cleanups_to (old_cleanups);
6507
6508 emit_queue ();
6509 emit_jump_insn (gen_jump (op1));
6510 emit_barrier ();
6511 emit_label (op0);
6512 if (temp != 0)
6513 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6514 else
6515 expand_expr (TREE_OPERAND (exp, 2),
6516 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6517 dest_right_flag = get_last_insn ();
6518 }
6519
6520 /* Handle conditional cleanups, if any. */
6521 right_cleanups = defer_cleanups_to (old_cleanups);
6522
6523 emit_queue ();
6524 emit_label (op1);
6525 OK_DEFER_POP;
6526
6527 /* Add back in, any conditional cleanups. */
6528 if (left_cleanups || right_cleanups)
6529 {
6530 tree new_cleanups;
6531 tree cond;
6532 rtx last;
6533
6534 /* Now that we know that a flag is needed, go back and add in the
6535 setting of the flag. */
6536
6537 /* Do the left side flag. */
6538 last = get_last_insn ();
6539 /* Flag left cleanups as needed. */
6540 emit_move_insn (flag, const1_rtx);
6541 /* ??? deprecated, use sequences instead. */
6542 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6543
6544 /* Do the right side flag. */
6545 last = get_last_insn ();
6546 /* Flag left cleanups as needed. */
6547 emit_move_insn (flag, const0_rtx);
6548 /* ??? deprecated, use sequences instead. */
6549 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6550
6551 /* All cleanups must be on the function_obstack. */
6552 push_obstacks_nochange ();
6553 resume_temporary_allocation ();
6554
6555 /* convert flag, which is an rtx, into a tree. */
6556 cond = make_node (RTL_EXPR);
6557 TREE_TYPE (cond) = integer_type_node;
6558 RTL_EXPR_RTL (cond) = flag;
6559 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6560 cond = save_expr (cond);
6561
6562 if (! left_cleanups)
6563 left_cleanups = integer_zero_node;
6564 if (! right_cleanups)
6565 right_cleanups = integer_zero_node;
6566 new_cleanups = build (COND_EXPR, void_type_node,
6567 truthvalue_conversion (cond),
6568 left_cleanups, right_cleanups);
6569 new_cleanups = fold (new_cleanups);
6570
6571 pop_obstacks ();
6572
6573 /* Now add in the conditionalized cleanups. */
6574 cleanups_this_call
6575 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6576 (*interim_eh_hook) (NULL_TREE);
6577 }
6578 return temp;
6579 }
6580
6581 case TARGET_EXPR:
6582 {
6583 int need_exception_region = 0;
6584 /* Something needs to be initialized, but we didn't know
6585 where that thing was when building the tree. For example,
6586 it could be the return value of a function, or a parameter
6587 to a function which lays down in the stack, or a temporary
6588 variable which must be passed by reference.
6589
6590 We guarantee that the expression will either be constructed
6591 or copied into our original target. */
6592
6593 tree slot = TREE_OPERAND (exp, 0);
6594 tree exp1;
6595 rtx temp;
6596
6597 if (TREE_CODE (slot) != VAR_DECL)
6598 abort ();
6599
6600 if (! ignore)
6601 target = original_target;
6602
6603 if (target == 0)
6604 {
6605 if (DECL_RTL (slot) != 0)
6606 {
6607 target = DECL_RTL (slot);
6608 /* If we have already expanded the slot, so don't do
6609 it again. (mrs) */
6610 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6611 return target;
6612 }
6613 else
6614 {
6615 target = assign_temp (type, 2, 1, 1);
6616 /* All temp slots at this level must not conflict. */
6617 preserve_temp_slots (target);
6618 DECL_RTL (slot) = target;
6619
6620 /* Since SLOT is not known to the called function
6621 to belong to its stack frame, we must build an explicit
6622 cleanup. This case occurs when we must build up a reference
6623 to pass the reference as an argument. In this case,
6624 it is very likely that such a reference need not be
6625 built here. */
6626
6627 if (TREE_OPERAND (exp, 2) == 0)
6628 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6629 if (TREE_OPERAND (exp, 2))
6630 {
6631 cleanups_this_call = tree_cons (NULL_TREE,
6632 TREE_OPERAND (exp, 2),
6633 cleanups_this_call);
6634 need_exception_region = 1;
6635 }
6636 }
6637 }
6638 else
6639 {
6640 /* This case does occur, when expanding a parameter which
6641 needs to be constructed on the stack. The target
6642 is the actual stack address that we want to initialize.
6643 The function we call will perform the cleanup in this case. */
6644
6645 /* If we have already assigned it space, use that space,
6646 not target that we were passed in, as our target
6647 parameter is only a hint. */
6648 if (DECL_RTL (slot) != 0)
6649 {
6650 target = DECL_RTL (slot);
6651 /* If we have already expanded the slot, so don't do
6652 it again. (mrs) */
6653 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6654 return target;
6655 }
6656
6657 DECL_RTL (slot) = target;
6658 }
6659
6660 exp1 = TREE_OPERAND (exp, 1);
6661 /* Mark it as expanded. */
6662 TREE_OPERAND (exp, 1) = NULL_TREE;
6663
6664 temp = expand_expr (exp1, target, tmode, modifier);
6665
6666 if (need_exception_region)
6667 (*interim_eh_hook) (NULL_TREE);
6668
6669 return temp;
6670 }
6671
6672 case INIT_EXPR:
6673 {
6674 tree lhs = TREE_OPERAND (exp, 0);
6675 tree rhs = TREE_OPERAND (exp, 1);
6676 tree noncopied_parts = 0;
6677 tree lhs_type = TREE_TYPE (lhs);
6678
6679 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6680 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6681 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6682 TYPE_NONCOPIED_PARTS (lhs_type));
6683 while (noncopied_parts != 0)
6684 {
6685 expand_assignment (TREE_VALUE (noncopied_parts),
6686 TREE_PURPOSE (noncopied_parts), 0, 0);
6687 noncopied_parts = TREE_CHAIN (noncopied_parts);
6688 }
6689 return temp;
6690 }
6691
6692 case MODIFY_EXPR:
6693 {
6694 /* If lhs is complex, expand calls in rhs before computing it.
6695 That's so we don't compute a pointer and save it over a call.
6696 If lhs is simple, compute it first so we can give it as a
6697 target if the rhs is just a call. This avoids an extra temp and copy
6698 and that prevents a partial-subsumption which makes bad code.
6699 Actually we could treat component_ref's of vars like vars. */
6700
6701 tree lhs = TREE_OPERAND (exp, 0);
6702 tree rhs = TREE_OPERAND (exp, 1);
6703 tree noncopied_parts = 0;
6704 tree lhs_type = TREE_TYPE (lhs);
6705
6706 temp = 0;
6707
6708 if (TREE_CODE (lhs) != VAR_DECL
6709 && TREE_CODE (lhs) != RESULT_DECL
6710 && TREE_CODE (lhs) != PARM_DECL)
6711 preexpand_calls (exp);
6712
6713 /* Check for |= or &= of a bitfield of size one into another bitfield
6714 of size 1. In this case, (unless we need the result of the
6715 assignment) we can do this more efficiently with a
6716 test followed by an assignment, if necessary.
6717
6718 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6719 things change so we do, this code should be enhanced to
6720 support it. */
6721 if (ignore
6722 && TREE_CODE (lhs) == COMPONENT_REF
6723 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6724 || TREE_CODE (rhs) == BIT_AND_EXPR)
6725 && TREE_OPERAND (rhs, 0) == lhs
6726 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6727 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6728 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6729 {
6730 rtx label = gen_label_rtx ();
6731
6732 do_jump (TREE_OPERAND (rhs, 1),
6733 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6734 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6735 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6736 (TREE_CODE (rhs) == BIT_IOR_EXPR
6737 ? integer_one_node
6738 : integer_zero_node)),
6739 0, 0);
6740 do_pending_stack_adjust ();
6741 emit_label (label);
6742 return const0_rtx;
6743 }
6744
6745 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6746 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6747 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6748 TYPE_NONCOPIED_PARTS (lhs_type));
6749
6750 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6751 while (noncopied_parts != 0)
6752 {
6753 expand_assignment (TREE_PURPOSE (noncopied_parts),
6754 TREE_VALUE (noncopied_parts), 0, 0);
6755 noncopied_parts = TREE_CHAIN (noncopied_parts);
6756 }
6757 return temp;
6758 }
6759
6760 case PREINCREMENT_EXPR:
6761 case PREDECREMENT_EXPR:
6762 return expand_increment (exp, 0);
6763
6764 case POSTINCREMENT_EXPR:
6765 case POSTDECREMENT_EXPR:
6766 /* Faster to treat as pre-increment if result is not used. */
6767 return expand_increment (exp, ! ignore);
6768
6769 case ADDR_EXPR:
6770 /* If nonzero, TEMP will be set to the address of something that might
6771 be a MEM corresponding to a stack slot. */
6772 temp = 0;
6773
6774 /* Are we taking the address of a nested function? */
6775 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6776 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6777 {
6778 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6779 op0 = force_operand (op0, target);
6780 }
6781 /* If we are taking the address of something erroneous, just
6782 return a zero. */
6783 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6784 return const0_rtx;
6785 else
6786 {
6787 /* We make sure to pass const0_rtx down if we came in with
6788 ignore set, to avoid doing the cleanups twice for something. */
6789 op0 = expand_expr (TREE_OPERAND (exp, 0),
6790 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6791 (modifier == EXPAND_INITIALIZER
6792 ? modifier : EXPAND_CONST_ADDRESS));
6793
6794 /* If we are going to ignore the result, OP0 will have been set
6795 to const0_rtx, so just return it. Don't get confused and
6796 think we are taking the address of the constant. */
6797 if (ignore)
6798 return op0;
6799
6800 op0 = protect_from_queue (op0, 0);
6801
6802 /* We would like the object in memory. If it is a constant,
6803 we can have it be statically allocated into memory. For
6804 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6805 memory and store the value into it. */
6806
6807 if (CONSTANT_P (op0))
6808 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6809 op0);
6810 else if (GET_CODE (op0) == MEM)
6811 {
6812 mark_temp_addr_taken (op0);
6813 temp = XEXP (op0, 0);
6814 }
6815
6816 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6817 || GET_CODE (op0) == CONCAT)
6818 {
6819 /* If this object is in a register, it must be not
6820 be BLKmode. */
6821 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6822 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6823
6824 mark_temp_addr_taken (memloc);
6825 emit_move_insn (memloc, op0);
6826 op0 = memloc;
6827 }
6828
6829 if (GET_CODE (op0) != MEM)
6830 abort ();
6831
6832 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6833 {
6834 temp = XEXP (op0, 0);
6835 #ifdef POINTERS_EXTEND_UNSIGNED
6836 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6837 && mode == ptr_mode)
6838 temp = convert_memory_address (ptr_mode, temp);
6839 #endif
6840 return temp;
6841 }
6842
6843 op0 = force_operand (XEXP (op0, 0), target);
6844 }
6845
6846 if (flag_force_addr && GET_CODE (op0) != REG)
6847 op0 = force_reg (Pmode, op0);
6848
6849 if (GET_CODE (op0) == REG
6850 && ! REG_USERVAR_P (op0))
6851 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
6852
6853 /* If we might have had a temp slot, add an equivalent address
6854 for it. */
6855 if (temp != 0)
6856 update_temp_slot_address (temp, op0);
6857
6858 #ifdef POINTERS_EXTEND_UNSIGNED
6859 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6860 && mode == ptr_mode)
6861 op0 = convert_memory_address (ptr_mode, op0);
6862 #endif
6863
6864 return op0;
6865
6866 case ENTRY_VALUE_EXPR:
6867 abort ();
6868
6869 /* COMPLEX type for Extended Pascal & Fortran */
6870 case COMPLEX_EXPR:
6871 {
6872 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6873 rtx insns;
6874
6875 /* Get the rtx code of the operands. */
6876 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6877 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6878
6879 if (! target)
6880 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6881
6882 start_sequence ();
6883
6884 /* Move the real (op0) and imaginary (op1) parts to their location. */
6885 emit_move_insn (gen_realpart (mode, target), op0);
6886 emit_move_insn (gen_imagpart (mode, target), op1);
6887
6888 insns = get_insns ();
6889 end_sequence ();
6890
6891 /* Complex construction should appear as a single unit. */
6892 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6893 each with a separate pseudo as destination.
6894 It's not correct for flow to treat them as a unit. */
6895 if (GET_CODE (target) != CONCAT)
6896 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6897 else
6898 emit_insns (insns);
6899
6900 return target;
6901 }
6902
6903 case REALPART_EXPR:
6904 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6905 return gen_realpart (mode, op0);
6906
6907 case IMAGPART_EXPR:
6908 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6909 return gen_imagpart (mode, op0);
6910
6911 case CONJ_EXPR:
6912 {
6913 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6914 rtx imag_t;
6915 rtx insns;
6916
6917 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6918
6919 if (! target)
6920 target = gen_reg_rtx (mode);
6921
6922 start_sequence ();
6923
6924 /* Store the realpart and the negated imagpart to target. */
6925 emit_move_insn (gen_realpart (partmode, target),
6926 gen_realpart (partmode, op0));
6927
6928 imag_t = gen_imagpart (partmode, target);
6929 temp = expand_unop (partmode, neg_optab,
6930 gen_imagpart (partmode, op0), imag_t, 0);
6931 if (temp != imag_t)
6932 emit_move_insn (imag_t, temp);
6933
6934 insns = get_insns ();
6935 end_sequence ();
6936
6937 /* Conjugate should appear as a single unit
6938 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6939 each with a separate pseudo as destination.
6940 It's not correct for flow to treat them as a unit. */
6941 if (GET_CODE (target) != CONCAT)
6942 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6943 else
6944 emit_insns (insns);
6945
6946 return target;
6947 }
6948
6949 case ERROR_MARK:
6950 op0 = CONST0_RTX (tmode);
6951 if (op0 != 0)
6952 return op0;
6953 return const0_rtx;
6954
6955 default:
6956 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6957 }
6958
6959 /* Here to do an ordinary binary operator, generating an instruction
6960 from the optab already placed in `this_optab'. */
6961 binop:
6962 preexpand_calls (exp);
6963 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6964 subtarget = 0;
6965 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6967 binop2:
6968 temp = expand_binop (mode, this_optab, op0, op1, target,
6969 unsignedp, OPTAB_LIB_WIDEN);
6970 if (temp == 0)
6971 abort ();
6972 return temp;
6973 }
6974
6975
6976 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6977 void
6978 bc_expand_expr (exp)
6979 tree exp;
6980 {
6981 enum tree_code code;
6982 tree type, arg0;
6983 rtx r;
6984 struct binary_operator *binoptab;
6985 struct unary_operator *unoptab;
6986 struct increment_operator *incroptab;
6987 struct bc_label *lab, *lab1;
6988 enum bytecode_opcode opcode;
6989
6990
6991 code = TREE_CODE (exp);
6992
6993 switch (code)
6994 {
6995 case PARM_DECL:
6996
6997 if (DECL_RTL (exp) == 0)
6998 {
6999 error_with_decl (exp, "prior parameter's size depends on `%s'");
7000 return;
7001 }
7002
7003 bc_load_parmaddr (DECL_RTL (exp));
7004 bc_load_memory (TREE_TYPE (exp), exp);
7005
7006 return;
7007
7008 case VAR_DECL:
7009
7010 if (DECL_RTL (exp) == 0)
7011 abort ();
7012
7013 #if 0
7014 if (BYTECODE_LABEL (DECL_RTL (exp)))
7015 bc_load_externaddr (DECL_RTL (exp));
7016 else
7017 bc_load_localaddr (DECL_RTL (exp));
7018 #endif
7019 if (TREE_PUBLIC (exp))
7020 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7021 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7022 else
7023 bc_load_localaddr (DECL_RTL (exp));
7024
7025 bc_load_memory (TREE_TYPE (exp), exp);
7026 return;
7027
7028 case INTEGER_CST:
7029
7030 #ifdef DEBUG_PRINT_CODE
7031 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7032 #endif
7033 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7034 ? SImode
7035 : TYPE_MODE (TREE_TYPE (exp)))],
7036 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7037 return;
7038
7039 case REAL_CST:
7040
7041 #if 0
7042 #ifdef DEBUG_PRINT_CODE
7043 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7044 #endif
7045 /* FIX THIS: find a better way to pass real_cst's. -bson */
7046 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7047 (double) TREE_REAL_CST (exp));
7048 #else
7049 abort ();
7050 #endif
7051
7052 return;
7053
7054 case CALL_EXPR:
7055
7056 /* We build a call description vector describing the type of
7057 the return value and of the arguments; this call vector,
7058 together with a pointer to a location for the return value
7059 and the base of the argument list, is passed to the low
7060 level machine dependent call subroutine, which is responsible
7061 for putting the arguments wherever real functions expect
7062 them, as well as getting the return value back. */
7063 {
7064 tree calldesc = 0, arg;
7065 int nargs = 0, i;
7066 rtx retval;
7067
7068 /* Push the evaluated args on the evaluation stack in reverse
7069 order. Also make an entry for each arg in the calldesc
7070 vector while we're at it. */
7071
7072 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7073
7074 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7075 {
7076 ++nargs;
7077 bc_expand_expr (TREE_VALUE (arg));
7078
7079 calldesc = tree_cons ((tree) 0,
7080 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7081 calldesc);
7082 calldesc = tree_cons ((tree) 0,
7083 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7084 calldesc);
7085 }
7086
7087 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7088
7089 /* Allocate a location for the return value and push its
7090 address on the evaluation stack. Also make an entry
7091 at the front of the calldesc for the return value type. */
7092
7093 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7094 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7095 bc_load_localaddr (retval);
7096
7097 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7098 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7099
7100 /* Prepend the argument count. */
7101 calldesc = tree_cons ((tree) 0,
7102 build_int_2 (nargs, 0),
7103 calldesc);
7104
7105 /* Push the address of the call description vector on the stack. */
7106 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7107 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7108 build_index_type (build_int_2 (nargs * 2, 0)));
7109 r = output_constant_def (calldesc);
7110 bc_load_externaddr (r);
7111
7112 /* Push the address of the function to be called. */
7113 bc_expand_expr (TREE_OPERAND (exp, 0));
7114
7115 /* Call the function, popping its address and the calldesc vector
7116 address off the evaluation stack in the process. */
7117 bc_emit_instruction (call);
7118
7119 /* Pop the arguments off the stack. */
7120 bc_adjust_stack (nargs);
7121
7122 /* Load the return value onto the stack. */
7123 bc_load_localaddr (retval);
7124 bc_load_memory (type, TREE_OPERAND (exp, 0));
7125 }
7126 return;
7127
7128 case SAVE_EXPR:
7129
7130 if (!SAVE_EXPR_RTL (exp))
7131 {
7132 /* First time around: copy to local variable */
7133 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7134 TYPE_ALIGN (TREE_TYPE(exp)));
7135 bc_expand_expr (TREE_OPERAND (exp, 0));
7136 bc_emit_instruction (duplicate);
7137
7138 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7139 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7140 }
7141 else
7142 {
7143 /* Consecutive reference: use saved copy */
7144 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7145 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7146 }
7147 return;
7148
7149 #if 0
7150 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7151 how are they handled instead? */
7152 case LET_STMT:
7153
7154 TREE_USED (exp) = 1;
7155 bc_expand_expr (STMT_BODY (exp));
7156 return;
7157 #endif
7158
7159 case NOP_EXPR:
7160 case CONVERT_EXPR:
7161
7162 bc_expand_expr (TREE_OPERAND (exp, 0));
7163 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7164 return;
7165
7166 case MODIFY_EXPR:
7167
7168 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7169 return;
7170
7171 case ADDR_EXPR:
7172
7173 bc_expand_address (TREE_OPERAND (exp, 0));
7174 return;
7175
7176 case INDIRECT_REF:
7177
7178 bc_expand_expr (TREE_OPERAND (exp, 0));
7179 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7180 return;
7181
7182 case ARRAY_REF:
7183
7184 bc_expand_expr (bc_canonicalize_array_ref (exp));
7185 return;
7186
7187 case COMPONENT_REF:
7188
7189 bc_expand_component_address (exp);
7190
7191 /* If we have a bitfield, generate a proper load */
7192 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7193 return;
7194
7195 case COMPOUND_EXPR:
7196
7197 bc_expand_expr (TREE_OPERAND (exp, 0));
7198 bc_emit_instruction (drop);
7199 bc_expand_expr (TREE_OPERAND (exp, 1));
7200 return;
7201
7202 case COND_EXPR:
7203
7204 bc_expand_expr (TREE_OPERAND (exp, 0));
7205 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7206 lab = bc_get_bytecode_label ();
7207 bc_emit_bytecode (xjumpifnot);
7208 bc_emit_bytecode_labelref (lab);
7209
7210 #ifdef DEBUG_PRINT_CODE
7211 fputc ('\n', stderr);
7212 #endif
7213 bc_expand_expr (TREE_OPERAND (exp, 1));
7214 lab1 = bc_get_bytecode_label ();
7215 bc_emit_bytecode (jump);
7216 bc_emit_bytecode_labelref (lab1);
7217
7218 #ifdef DEBUG_PRINT_CODE
7219 fputc ('\n', stderr);
7220 #endif
7221
7222 bc_emit_bytecode_labeldef (lab);
7223 bc_expand_expr (TREE_OPERAND (exp, 2));
7224 bc_emit_bytecode_labeldef (lab1);
7225 return;
7226
7227 case TRUTH_ANDIF_EXPR:
7228
7229 opcode = xjumpifnot;
7230 goto andorif;
7231
7232 case TRUTH_ORIF_EXPR:
7233
7234 opcode = xjumpif;
7235 goto andorif;
7236
7237 case PLUS_EXPR:
7238
7239 binoptab = optab_plus_expr;
7240 goto binop;
7241
7242 case MINUS_EXPR:
7243
7244 binoptab = optab_minus_expr;
7245 goto binop;
7246
7247 case MULT_EXPR:
7248
7249 binoptab = optab_mult_expr;
7250 goto binop;
7251
7252 case TRUNC_DIV_EXPR:
7253 case FLOOR_DIV_EXPR:
7254 case CEIL_DIV_EXPR:
7255 case ROUND_DIV_EXPR:
7256 case EXACT_DIV_EXPR:
7257
7258 binoptab = optab_trunc_div_expr;
7259 goto binop;
7260
7261 case TRUNC_MOD_EXPR:
7262 case FLOOR_MOD_EXPR:
7263 case CEIL_MOD_EXPR:
7264 case ROUND_MOD_EXPR:
7265
7266 binoptab = optab_trunc_mod_expr;
7267 goto binop;
7268
7269 case FIX_ROUND_EXPR:
7270 case FIX_FLOOR_EXPR:
7271 case FIX_CEIL_EXPR:
7272 abort (); /* Not used for C. */
7273
7274 case FIX_TRUNC_EXPR:
7275 case FLOAT_EXPR:
7276 case MAX_EXPR:
7277 case MIN_EXPR:
7278 case FFS_EXPR:
7279 case LROTATE_EXPR:
7280 case RROTATE_EXPR:
7281 abort (); /* FIXME */
7282
7283 case RDIV_EXPR:
7284
7285 binoptab = optab_rdiv_expr;
7286 goto binop;
7287
7288 case BIT_AND_EXPR:
7289
7290 binoptab = optab_bit_and_expr;
7291 goto binop;
7292
7293 case BIT_IOR_EXPR:
7294
7295 binoptab = optab_bit_ior_expr;
7296 goto binop;
7297
7298 case BIT_XOR_EXPR:
7299
7300 binoptab = optab_bit_xor_expr;
7301 goto binop;
7302
7303 case LSHIFT_EXPR:
7304
7305 binoptab = optab_lshift_expr;
7306 goto binop;
7307
7308 case RSHIFT_EXPR:
7309
7310 binoptab = optab_rshift_expr;
7311 goto binop;
7312
7313 case TRUTH_AND_EXPR:
7314
7315 binoptab = optab_truth_and_expr;
7316 goto binop;
7317
7318 case TRUTH_OR_EXPR:
7319
7320 binoptab = optab_truth_or_expr;
7321 goto binop;
7322
7323 case LT_EXPR:
7324
7325 binoptab = optab_lt_expr;
7326 goto binop;
7327
7328 case LE_EXPR:
7329
7330 binoptab = optab_le_expr;
7331 goto binop;
7332
7333 case GE_EXPR:
7334
7335 binoptab = optab_ge_expr;
7336 goto binop;
7337
7338 case GT_EXPR:
7339
7340 binoptab = optab_gt_expr;
7341 goto binop;
7342
7343 case EQ_EXPR:
7344
7345 binoptab = optab_eq_expr;
7346 goto binop;
7347
7348 case NE_EXPR:
7349
7350 binoptab = optab_ne_expr;
7351 goto binop;
7352
7353 case NEGATE_EXPR:
7354
7355 unoptab = optab_negate_expr;
7356 goto unop;
7357
7358 case BIT_NOT_EXPR:
7359
7360 unoptab = optab_bit_not_expr;
7361 goto unop;
7362
7363 case TRUTH_NOT_EXPR:
7364
7365 unoptab = optab_truth_not_expr;
7366 goto unop;
7367
7368 case PREDECREMENT_EXPR:
7369
7370 incroptab = optab_predecrement_expr;
7371 goto increment;
7372
7373 case PREINCREMENT_EXPR:
7374
7375 incroptab = optab_preincrement_expr;
7376 goto increment;
7377
7378 case POSTDECREMENT_EXPR:
7379
7380 incroptab = optab_postdecrement_expr;
7381 goto increment;
7382
7383 case POSTINCREMENT_EXPR:
7384
7385 incroptab = optab_postincrement_expr;
7386 goto increment;
7387
7388 case CONSTRUCTOR:
7389
7390 bc_expand_constructor (exp);
7391 return;
7392
7393 case ERROR_MARK:
7394 case RTL_EXPR:
7395
7396 return;
7397
7398 case BIND_EXPR:
7399 {
7400 tree vars = TREE_OPERAND (exp, 0);
7401 int vars_need_expansion = 0;
7402
7403 /* Need to open a binding contour here because
7404 if there are any cleanups they most be contained here. */
7405 expand_start_bindings (0);
7406
7407 /* Mark the corresponding BLOCK for output. */
7408 if (TREE_OPERAND (exp, 2) != 0)
7409 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7410
7411 /* If VARS have not yet been expanded, expand them now. */
7412 while (vars)
7413 {
7414 if (DECL_RTL (vars) == 0)
7415 {
7416 vars_need_expansion = 1;
7417 expand_decl (vars);
7418 }
7419 expand_decl_init (vars);
7420 vars = TREE_CHAIN (vars);
7421 }
7422
7423 bc_expand_expr (TREE_OPERAND (exp, 1));
7424
7425 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7426
7427 return;
7428 }
7429 }
7430
7431 abort ();
7432
7433 binop:
7434
7435 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7436 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7437 return;
7438
7439
7440 unop:
7441
7442 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7443 return;
7444
7445
7446 andorif:
7447
7448 bc_expand_expr (TREE_OPERAND (exp, 0));
7449 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7450 lab = bc_get_bytecode_label ();
7451
7452 bc_emit_instruction (duplicate);
7453 bc_emit_bytecode (opcode);
7454 bc_emit_bytecode_labelref (lab);
7455
7456 #ifdef DEBUG_PRINT_CODE
7457 fputc ('\n', stderr);
7458 #endif
7459
7460 bc_emit_instruction (drop);
7461
7462 bc_expand_expr (TREE_OPERAND (exp, 1));
7463 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7464 bc_emit_bytecode_labeldef (lab);
7465 return;
7466
7467
7468 increment:
7469
7470 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7471
7472 /* Push the quantum. */
7473 bc_expand_expr (TREE_OPERAND (exp, 1));
7474
7475 /* Convert it to the lvalue's type. */
7476 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7477
7478 /* Push the address of the lvalue */
7479 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7480
7481 /* Perform actual increment */
7482 bc_expand_increment (incroptab, type);
7483 return;
7484 }
7485 \f
7486 /* Return the alignment in bits of EXP, a pointer valued expression.
7487 But don't return more than MAX_ALIGN no matter what.
7488 The alignment returned is, by default, the alignment of the thing that
7489 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7490
7491 Otherwise, look at the expression to see if we can do better, i.e., if the
7492 expression is actually pointing at an object whose alignment is tighter. */
7493
7494 static int
7495 get_pointer_alignment (exp, max_align)
7496 tree exp;
7497 unsigned max_align;
7498 {
7499 unsigned align, inner;
7500
7501 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7502 return 0;
7503
7504 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7505 align = MIN (align, max_align);
7506
7507 while (1)
7508 {
7509 switch (TREE_CODE (exp))
7510 {
7511 case NOP_EXPR:
7512 case CONVERT_EXPR:
7513 case NON_LVALUE_EXPR:
7514 exp = TREE_OPERAND (exp, 0);
7515 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7516 return align;
7517 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7518 align = MIN (inner, max_align);
7519 break;
7520
7521 case PLUS_EXPR:
7522 /* If sum of pointer + int, restrict our maximum alignment to that
7523 imposed by the integer. If not, we can't do any better than
7524 ALIGN. */
7525 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7526 return align;
7527
7528 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7529 & (max_align - 1))
7530 != 0)
7531 max_align >>= 1;
7532
7533 exp = TREE_OPERAND (exp, 0);
7534 break;
7535
7536 case ADDR_EXPR:
7537 /* See what we are pointing at and look at its alignment. */
7538 exp = TREE_OPERAND (exp, 0);
7539 if (TREE_CODE (exp) == FUNCTION_DECL)
7540 align = FUNCTION_BOUNDARY;
7541 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7542 align = DECL_ALIGN (exp);
7543 #ifdef CONSTANT_ALIGNMENT
7544 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7545 align = CONSTANT_ALIGNMENT (exp, align);
7546 #endif
7547 return MIN (align, max_align);
7548
7549 default:
7550 return align;
7551 }
7552 }
7553 }
7554 \f
7555 /* Return the tree node and offset if a given argument corresponds to
7556 a string constant. */
7557
7558 static tree
7559 string_constant (arg, ptr_offset)
7560 tree arg;
7561 tree *ptr_offset;
7562 {
7563 STRIP_NOPS (arg);
7564
7565 if (TREE_CODE (arg) == ADDR_EXPR
7566 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7567 {
7568 *ptr_offset = integer_zero_node;
7569 return TREE_OPERAND (arg, 0);
7570 }
7571 else if (TREE_CODE (arg) == PLUS_EXPR)
7572 {
7573 tree arg0 = TREE_OPERAND (arg, 0);
7574 tree arg1 = TREE_OPERAND (arg, 1);
7575
7576 STRIP_NOPS (arg0);
7577 STRIP_NOPS (arg1);
7578
7579 if (TREE_CODE (arg0) == ADDR_EXPR
7580 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7581 {
7582 *ptr_offset = arg1;
7583 return TREE_OPERAND (arg0, 0);
7584 }
7585 else if (TREE_CODE (arg1) == ADDR_EXPR
7586 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7587 {
7588 *ptr_offset = arg0;
7589 return TREE_OPERAND (arg1, 0);
7590 }
7591 }
7592
7593 return 0;
7594 }
7595
7596 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7597 way, because it could contain a zero byte in the middle.
7598 TREE_STRING_LENGTH is the size of the character array, not the string.
7599
7600 Unfortunately, string_constant can't access the values of const char
7601 arrays with initializers, so neither can we do so here. */
7602
7603 static tree
7604 c_strlen (src)
7605 tree src;
7606 {
7607 tree offset_node;
7608 int offset, max;
7609 char *ptr;
7610
7611 src = string_constant (src, &offset_node);
7612 if (src == 0)
7613 return 0;
7614 max = TREE_STRING_LENGTH (src);
7615 ptr = TREE_STRING_POINTER (src);
7616 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7617 {
7618 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7619 compute the offset to the following null if we don't know where to
7620 start searching for it. */
7621 int i;
7622 for (i = 0; i < max; i++)
7623 if (ptr[i] == 0)
7624 return 0;
7625 /* We don't know the starting offset, but we do know that the string
7626 has no internal zero bytes. We can assume that the offset falls
7627 within the bounds of the string; otherwise, the programmer deserves
7628 what he gets. Subtract the offset from the length of the string,
7629 and return that. */
7630 /* This would perhaps not be valid if we were dealing with named
7631 arrays in addition to literal string constants. */
7632 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7633 }
7634
7635 /* We have a known offset into the string. Start searching there for
7636 a null character. */
7637 if (offset_node == 0)
7638 offset = 0;
7639 else
7640 {
7641 /* Did we get a long long offset? If so, punt. */
7642 if (TREE_INT_CST_HIGH (offset_node) != 0)
7643 return 0;
7644 offset = TREE_INT_CST_LOW (offset_node);
7645 }
7646 /* If the offset is known to be out of bounds, warn, and call strlen at
7647 runtime. */
7648 if (offset < 0 || offset > max)
7649 {
7650 warning ("offset outside bounds of constant string");
7651 return 0;
7652 }
7653 /* Use strlen to search for the first zero byte. Since any strings
7654 constructed with build_string will have nulls appended, we win even
7655 if we get handed something like (char[4])"abcd".
7656
7657 Since OFFSET is our starting index into the string, no further
7658 calculation is needed. */
7659 return size_int (strlen (ptr + offset));
7660 }
7661
7662 rtx
7663 expand_builtin_return_addr (fndecl_code, count, tem)
7664 enum built_in_function fndecl_code;
7665 rtx tem;
7666 int count;
7667 {
7668 int i;
7669
7670 /* Some machines need special handling before we can access
7671 arbitrary frames. For example, on the sparc, we must first flush
7672 all register windows to the stack. */
7673 #ifdef SETUP_FRAME_ADDRESSES
7674 SETUP_FRAME_ADDRESSES ();
7675 #endif
7676
7677 /* On the sparc, the return address is not in the frame, it is in a
7678 register. There is no way to access it off of the current frame
7679 pointer, but it can be accessed off the previous frame pointer by
7680 reading the value from the register window save area. */
7681 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7682 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7683 count--;
7684 #endif
7685
7686 /* Scan back COUNT frames to the specified frame. */
7687 for (i = 0; i < count; i++)
7688 {
7689 /* Assume the dynamic chain pointer is in the word that the
7690 frame address points to, unless otherwise specified. */
7691 #ifdef DYNAMIC_CHAIN_ADDRESS
7692 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7693 #endif
7694 tem = memory_address (Pmode, tem);
7695 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7696 }
7697
7698 /* For __builtin_frame_address, return what we've got. */
7699 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7700 return tem;
7701
7702 /* For __builtin_return_address, Get the return address from that
7703 frame. */
7704 #ifdef RETURN_ADDR_RTX
7705 tem = RETURN_ADDR_RTX (count, tem);
7706 #else
7707 tem = memory_address (Pmode,
7708 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7709 tem = gen_rtx (MEM, Pmode, tem);
7710 #endif
7711 return tem;
7712 }
7713 \f
7714 /* Expand an expression EXP that calls a built-in function,
7715 with result going to TARGET if that's convenient
7716 (and in mode MODE if that's convenient).
7717 SUBTARGET may be used as the target for computing one of EXP's operands.
7718 IGNORE is nonzero if the value is to be ignored. */
7719
7720 #define CALLED_AS_BUILT_IN(NODE) \
7721 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7722
7723 static rtx
7724 expand_builtin (exp, target, subtarget, mode, ignore)
7725 tree exp;
7726 rtx target;
7727 rtx subtarget;
7728 enum machine_mode mode;
7729 int ignore;
7730 {
7731 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7732 tree arglist = TREE_OPERAND (exp, 1);
7733 rtx op0;
7734 rtx lab1, insns;
7735 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7736 optab builtin_optab;
7737
7738 switch (DECL_FUNCTION_CODE (fndecl))
7739 {
7740 case BUILT_IN_ABS:
7741 case BUILT_IN_LABS:
7742 case BUILT_IN_FABS:
7743 /* build_function_call changes these into ABS_EXPR. */
7744 abort ();
7745
7746 case BUILT_IN_SIN:
7747 case BUILT_IN_COS:
7748 /* Treat these like sqrt, but only if the user asks for them. */
7749 if (! flag_fast_math)
7750 break;
7751 case BUILT_IN_FSQRT:
7752 /* If not optimizing, call the library function. */
7753 if (! optimize)
7754 break;
7755
7756 if (arglist == 0
7757 /* Arg could be wrong type if user redeclared this fcn wrong. */
7758 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7759 break;
7760
7761 /* Stabilize and compute the argument. */
7762 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7763 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7764 {
7765 exp = copy_node (exp);
7766 arglist = copy_node (arglist);
7767 TREE_OPERAND (exp, 1) = arglist;
7768 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7769 }
7770 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7771
7772 /* Make a suitable register to place result in. */
7773 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7774
7775 emit_queue ();
7776 start_sequence ();
7777
7778 switch (DECL_FUNCTION_CODE (fndecl))
7779 {
7780 case BUILT_IN_SIN:
7781 builtin_optab = sin_optab; break;
7782 case BUILT_IN_COS:
7783 builtin_optab = cos_optab; break;
7784 case BUILT_IN_FSQRT:
7785 builtin_optab = sqrt_optab; break;
7786 default:
7787 abort ();
7788 }
7789
7790 /* Compute into TARGET.
7791 Set TARGET to wherever the result comes back. */
7792 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7793 builtin_optab, op0, target, 0);
7794
7795 /* If we were unable to expand via the builtin, stop the
7796 sequence (without outputting the insns) and break, causing
7797 a call the the library function. */
7798 if (target == 0)
7799 {
7800 end_sequence ();
7801 break;
7802 }
7803
7804 /* Check the results by default. But if flag_fast_math is turned on,
7805 then assume sqrt will always be called with valid arguments. */
7806
7807 if (! flag_fast_math)
7808 {
7809 /* Don't define the builtin FP instructions
7810 if your machine is not IEEE. */
7811 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7812 abort ();
7813
7814 lab1 = gen_label_rtx ();
7815
7816 /* Test the result; if it is NaN, set errno=EDOM because
7817 the argument was not in the domain. */
7818 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7819 emit_jump_insn (gen_beq (lab1));
7820
7821 #ifdef TARGET_EDOM
7822 {
7823 #ifdef GEN_ERRNO_RTX
7824 rtx errno_rtx = GEN_ERRNO_RTX;
7825 #else
7826 rtx errno_rtx
7827 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7828 #endif
7829
7830 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7831 }
7832 #else
7833 /* We can't set errno=EDOM directly; let the library call do it.
7834 Pop the arguments right away in case the call gets deleted. */
7835 NO_DEFER_POP;
7836 expand_call (exp, target, 0);
7837 OK_DEFER_POP;
7838 #endif
7839
7840 emit_label (lab1);
7841 }
7842
7843 /* Output the entire sequence. */
7844 insns = get_insns ();
7845 end_sequence ();
7846 emit_insns (insns);
7847
7848 return target;
7849
7850 /* __builtin_apply_args returns block of memory allocated on
7851 the stack into which is stored the arg pointer, structure
7852 value address, static chain, and all the registers that might
7853 possibly be used in performing a function call. The code is
7854 moved to the start of the function so the incoming values are
7855 saved. */
7856 case BUILT_IN_APPLY_ARGS:
7857 /* Don't do __builtin_apply_args more than once in a function.
7858 Save the result of the first call and reuse it. */
7859 if (apply_args_value != 0)
7860 return apply_args_value;
7861 {
7862 /* When this function is called, it means that registers must be
7863 saved on entry to this function. So we migrate the
7864 call to the first insn of this function. */
7865 rtx temp;
7866 rtx seq;
7867
7868 start_sequence ();
7869 temp = expand_builtin_apply_args ();
7870 seq = get_insns ();
7871 end_sequence ();
7872
7873 apply_args_value = temp;
7874
7875 /* Put the sequence after the NOTE that starts the function.
7876 If this is inside a SEQUENCE, make the outer-level insn
7877 chain current, so the code is placed at the start of the
7878 function. */
7879 push_topmost_sequence ();
7880 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7881 pop_topmost_sequence ();
7882 return temp;
7883 }
7884
7885 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7886 FUNCTION with a copy of the parameters described by
7887 ARGUMENTS, and ARGSIZE. It returns a block of memory
7888 allocated on the stack into which is stored all the registers
7889 that might possibly be used for returning the result of a
7890 function. ARGUMENTS is the value returned by
7891 __builtin_apply_args. ARGSIZE is the number of bytes of
7892 arguments that must be copied. ??? How should this value be
7893 computed? We'll also need a safe worst case value for varargs
7894 functions. */
7895 case BUILT_IN_APPLY:
7896 if (arglist == 0
7897 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7898 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7899 || TREE_CHAIN (arglist) == 0
7900 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7901 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7902 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7903 return const0_rtx;
7904 else
7905 {
7906 int i;
7907 tree t;
7908 rtx ops[3];
7909
7910 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7911 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7912
7913 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7914 }
7915
7916 /* __builtin_return (RESULT) causes the function to return the
7917 value described by RESULT. RESULT is address of the block of
7918 memory returned by __builtin_apply. */
7919 case BUILT_IN_RETURN:
7920 if (arglist
7921 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7922 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7923 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7924 NULL_RTX, VOIDmode, 0));
7925 return const0_rtx;
7926
7927 case BUILT_IN_SAVEREGS:
7928 /* Don't do __builtin_saveregs more than once in a function.
7929 Save the result of the first call and reuse it. */
7930 if (saveregs_value != 0)
7931 return saveregs_value;
7932 {
7933 /* When this function is called, it means that registers must be
7934 saved on entry to this function. So we migrate the
7935 call to the first insn of this function. */
7936 rtx temp;
7937 rtx seq;
7938
7939 /* Now really call the function. `expand_call' does not call
7940 expand_builtin, so there is no danger of infinite recursion here. */
7941 start_sequence ();
7942
7943 #ifdef EXPAND_BUILTIN_SAVEREGS
7944 /* Do whatever the machine needs done in this case. */
7945 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7946 #else
7947 /* The register where the function returns its value
7948 is likely to have something else in it, such as an argument.
7949 So preserve that register around the call. */
7950
7951 if (value_mode != VOIDmode)
7952 {
7953 rtx valreg = hard_libcall_value (value_mode);
7954 rtx saved_valreg = gen_reg_rtx (value_mode);
7955
7956 emit_move_insn (saved_valreg, valreg);
7957 temp = expand_call (exp, target, ignore);
7958 emit_move_insn (valreg, saved_valreg);
7959 }
7960 else
7961 /* Generate the call, putting the value in a pseudo. */
7962 temp = expand_call (exp, target, ignore);
7963 #endif
7964
7965 seq = get_insns ();
7966 end_sequence ();
7967
7968 saveregs_value = temp;
7969
7970 /* Put the sequence after the NOTE that starts the function.
7971 If this is inside a SEQUENCE, make the outer-level insn
7972 chain current, so the code is placed at the start of the
7973 function. */
7974 push_topmost_sequence ();
7975 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7976 pop_topmost_sequence ();
7977 return temp;
7978 }
7979
7980 /* __builtin_args_info (N) returns word N of the arg space info
7981 for the current function. The number and meanings of words
7982 is controlled by the definition of CUMULATIVE_ARGS. */
7983 case BUILT_IN_ARGS_INFO:
7984 {
7985 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7986 int i;
7987 int *word_ptr = (int *) &current_function_args_info;
7988 tree type, elts, result;
7989
7990 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7991 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7992 __FILE__, __LINE__);
7993
7994 if (arglist != 0)
7995 {
7996 tree arg = TREE_VALUE (arglist);
7997 if (TREE_CODE (arg) != INTEGER_CST)
7998 error ("argument of `__builtin_args_info' must be constant");
7999 else
8000 {
8001 int wordnum = TREE_INT_CST_LOW (arg);
8002
8003 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8004 error ("argument of `__builtin_args_info' out of range");
8005 else
8006 return GEN_INT (word_ptr[wordnum]);
8007 }
8008 }
8009 else
8010 error ("missing argument in `__builtin_args_info'");
8011
8012 return const0_rtx;
8013
8014 #if 0
8015 for (i = 0; i < nwords; i++)
8016 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8017
8018 type = build_array_type (integer_type_node,
8019 build_index_type (build_int_2 (nwords, 0)));
8020 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8021 TREE_CONSTANT (result) = 1;
8022 TREE_STATIC (result) = 1;
8023 result = build (INDIRECT_REF, build_pointer_type (type), result);
8024 TREE_CONSTANT (result) = 1;
8025 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8026 #endif
8027 }
8028
8029 /* Return the address of the first anonymous stack arg. */
8030 case BUILT_IN_NEXT_ARG:
8031 {
8032 tree fntype = TREE_TYPE (current_function_decl);
8033
8034 if ((TYPE_ARG_TYPES (fntype) == 0
8035 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8036 == void_type_node))
8037 && ! current_function_varargs)
8038 {
8039 error ("`va_start' used in function with fixed args");
8040 return const0_rtx;
8041 }
8042
8043 if (arglist)
8044 {
8045 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8046 tree arg = TREE_VALUE (arglist);
8047
8048 /* Strip off all nops for the sake of the comparison. This
8049 is not quite the same as STRIP_NOPS. It does more.
8050 We must also strip off INDIRECT_EXPR for C++ reference
8051 parameters. */
8052 while (TREE_CODE (arg) == NOP_EXPR
8053 || TREE_CODE (arg) == CONVERT_EXPR
8054 || TREE_CODE (arg) == NON_LVALUE_EXPR
8055 || TREE_CODE (arg) == INDIRECT_REF)
8056 arg = TREE_OPERAND (arg, 0);
8057 if (arg != last_parm)
8058 warning ("second parameter of `va_start' not last named argument");
8059 }
8060 else if (! current_function_varargs)
8061 /* Evidently an out of date version of <stdarg.h>; can't validate
8062 va_start's second argument, but can still work as intended. */
8063 warning ("`__builtin_next_arg' called without an argument");
8064 }
8065
8066 return expand_binop (Pmode, add_optab,
8067 current_function_internal_arg_pointer,
8068 current_function_arg_offset_rtx,
8069 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8070
8071 case BUILT_IN_CLASSIFY_TYPE:
8072 if (arglist != 0)
8073 {
8074 tree type = TREE_TYPE (TREE_VALUE (arglist));
8075 enum tree_code code = TREE_CODE (type);
8076 if (code == VOID_TYPE)
8077 return GEN_INT (void_type_class);
8078 if (code == INTEGER_TYPE)
8079 return GEN_INT (integer_type_class);
8080 if (code == CHAR_TYPE)
8081 return GEN_INT (char_type_class);
8082 if (code == ENUMERAL_TYPE)
8083 return GEN_INT (enumeral_type_class);
8084 if (code == BOOLEAN_TYPE)
8085 return GEN_INT (boolean_type_class);
8086 if (code == POINTER_TYPE)
8087 return GEN_INT (pointer_type_class);
8088 if (code == REFERENCE_TYPE)
8089 return GEN_INT (reference_type_class);
8090 if (code == OFFSET_TYPE)
8091 return GEN_INT (offset_type_class);
8092 if (code == REAL_TYPE)
8093 return GEN_INT (real_type_class);
8094 if (code == COMPLEX_TYPE)
8095 return GEN_INT (complex_type_class);
8096 if (code == FUNCTION_TYPE)
8097 return GEN_INT (function_type_class);
8098 if (code == METHOD_TYPE)
8099 return GEN_INT (method_type_class);
8100 if (code == RECORD_TYPE)
8101 return GEN_INT (record_type_class);
8102 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8103 return GEN_INT (union_type_class);
8104 if (code == ARRAY_TYPE)
8105 {
8106 if (TYPE_STRING_FLAG (type))
8107 return GEN_INT (string_type_class);
8108 else
8109 return GEN_INT (array_type_class);
8110 }
8111 if (code == SET_TYPE)
8112 return GEN_INT (set_type_class);
8113 if (code == FILE_TYPE)
8114 return GEN_INT (file_type_class);
8115 if (code == LANG_TYPE)
8116 return GEN_INT (lang_type_class);
8117 }
8118 return GEN_INT (no_type_class);
8119
8120 case BUILT_IN_CONSTANT_P:
8121 if (arglist == 0)
8122 return const0_rtx;
8123 else
8124 {
8125 tree arg = TREE_VALUE (arglist);
8126
8127 STRIP_NOPS (arg);
8128 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8129 || (TREE_CODE (arg) == ADDR_EXPR
8130 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8131 ? const1_rtx : const0_rtx);
8132 }
8133
8134 case BUILT_IN_FRAME_ADDRESS:
8135 /* The argument must be a nonnegative integer constant.
8136 It counts the number of frames to scan up the stack.
8137 The value is the address of that frame. */
8138 case BUILT_IN_RETURN_ADDRESS:
8139 /* The argument must be a nonnegative integer constant.
8140 It counts the number of frames to scan up the stack.
8141 The value is the return address saved in that frame. */
8142 if (arglist == 0)
8143 /* Warning about missing arg was already issued. */
8144 return const0_rtx;
8145 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8146 {
8147 error ("invalid arg to `__builtin_return_address'");
8148 return const0_rtx;
8149 }
8150 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8151 {
8152 error ("invalid arg to `__builtin_return_address'");
8153 return const0_rtx;
8154 }
8155 else
8156 {
8157 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8158 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8159 hard_frame_pointer_rtx);
8160
8161 /* For __builtin_frame_address, return what we've got. */
8162 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8163 return tem;
8164
8165 if (GET_CODE (tem) != REG)
8166 tem = copy_to_reg (tem);
8167 return tem;
8168 }
8169
8170 case BUILT_IN_ALLOCA:
8171 if (arglist == 0
8172 /* Arg could be non-integer if user redeclared this fcn wrong. */
8173 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8174 break;
8175
8176 /* Compute the argument. */
8177 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8178
8179 /* Allocate the desired space. */
8180 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8181
8182 case BUILT_IN_FFS:
8183 /* If not optimizing, call the library function. */
8184 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8185 break;
8186
8187 if (arglist == 0
8188 /* Arg could be non-integer if user redeclared this fcn wrong. */
8189 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8190 break;
8191
8192 /* Compute the argument. */
8193 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8194 /* Compute ffs, into TARGET if possible.
8195 Set TARGET to wherever the result comes back. */
8196 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8197 ffs_optab, op0, target, 1);
8198 if (target == 0)
8199 abort ();
8200 return target;
8201
8202 case BUILT_IN_STRLEN:
8203 /* If not optimizing, call the library function. */
8204 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8205 break;
8206
8207 if (arglist == 0
8208 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8209 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8210 break;
8211 else
8212 {
8213 tree src = TREE_VALUE (arglist);
8214 tree len = c_strlen (src);
8215
8216 int align
8217 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8218
8219 rtx result, src_rtx, char_rtx;
8220 enum machine_mode insn_mode = value_mode, char_mode;
8221 enum insn_code icode;
8222
8223 /* If the length is known, just return it. */
8224 if (len != 0)
8225 return expand_expr (len, target, mode, 0);
8226
8227 /* If SRC is not a pointer type, don't do this operation inline. */
8228 if (align == 0)
8229 break;
8230
8231 /* Call a function if we can't compute strlen in the right mode. */
8232
8233 while (insn_mode != VOIDmode)
8234 {
8235 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8236 if (icode != CODE_FOR_nothing)
8237 break;
8238
8239 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8240 }
8241 if (insn_mode == VOIDmode)
8242 break;
8243
8244 /* Make a place to write the result of the instruction. */
8245 result = target;
8246 if (! (result != 0
8247 && GET_CODE (result) == REG
8248 && GET_MODE (result) == insn_mode
8249 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8250 result = gen_reg_rtx (insn_mode);
8251
8252 /* Make sure the operands are acceptable to the predicates. */
8253
8254 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8255 result = gen_reg_rtx (insn_mode);
8256
8257 src_rtx = memory_address (BLKmode,
8258 expand_expr (src, NULL_RTX, ptr_mode,
8259 EXPAND_NORMAL));
8260 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8261 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8262
8263 char_rtx = const0_rtx;
8264 char_mode = insn_operand_mode[(int)icode][2];
8265 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8266 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8267
8268 emit_insn (GEN_FCN (icode) (result,
8269 gen_rtx (MEM, BLKmode, src_rtx),
8270 char_rtx, GEN_INT (align)));
8271
8272 /* Return the value in the proper mode for this function. */
8273 if (GET_MODE (result) == value_mode)
8274 return result;
8275 else if (target != 0)
8276 {
8277 convert_move (target, result, 0);
8278 return target;
8279 }
8280 else
8281 return convert_to_mode (value_mode, result, 0);
8282 }
8283
8284 case BUILT_IN_STRCPY:
8285 /* If not optimizing, call the library function. */
8286 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8287 break;
8288
8289 if (arglist == 0
8290 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8291 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8292 || TREE_CHAIN (arglist) == 0
8293 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8294 break;
8295 else
8296 {
8297 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8298
8299 if (len == 0)
8300 break;
8301
8302 len = size_binop (PLUS_EXPR, len, integer_one_node);
8303
8304 chainon (arglist, build_tree_list (NULL_TREE, len));
8305 }
8306
8307 /* Drops in. */
8308 case BUILT_IN_MEMCPY:
8309 /* If not optimizing, call the library function. */
8310 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8311 break;
8312
8313 if (arglist == 0
8314 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8315 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8316 || TREE_CHAIN (arglist) == 0
8317 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8318 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8319 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8320 break;
8321 else
8322 {
8323 tree dest = TREE_VALUE (arglist);
8324 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8325 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8326 tree type;
8327
8328 int src_align
8329 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8330 int dest_align
8331 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8332 rtx dest_rtx, dest_mem, src_mem;
8333
8334 /* If either SRC or DEST is not a pointer type, don't do
8335 this operation in-line. */
8336 if (src_align == 0 || dest_align == 0)
8337 {
8338 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8339 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8340 break;
8341 }
8342
8343 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8344 dest_mem = gen_rtx (MEM, BLKmode,
8345 memory_address (BLKmode, dest_rtx));
8346 /* There could be a void* cast on top of the object. */
8347 while (TREE_CODE (dest) == NOP_EXPR)
8348 dest = TREE_OPERAND (dest, 0);
8349 type = TREE_TYPE (TREE_TYPE (dest));
8350 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8351 src_mem = gen_rtx (MEM, BLKmode,
8352 memory_address (BLKmode,
8353 expand_expr (src, NULL_RTX,
8354 ptr_mode,
8355 EXPAND_SUM)));
8356 /* There could be a void* cast on top of the object. */
8357 while (TREE_CODE (src) == NOP_EXPR)
8358 src = TREE_OPERAND (src, 0);
8359 type = TREE_TYPE (TREE_TYPE (src));
8360 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8361
8362 /* Copy word part most expediently. */
8363 emit_block_move (dest_mem, src_mem,
8364 expand_expr (len, NULL_RTX, VOIDmode, 0),
8365 MIN (src_align, dest_align));
8366 return force_operand (dest_rtx, NULL_RTX);
8367 }
8368
8369 /* These comparison functions need an instruction that returns an actual
8370 index. An ordinary compare that just sets the condition codes
8371 is not enough. */
8372 #ifdef HAVE_cmpstrsi
8373 case BUILT_IN_STRCMP:
8374 /* If not optimizing, call the library function. */
8375 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8376 break;
8377
8378 if (arglist == 0
8379 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8380 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8381 || TREE_CHAIN (arglist) == 0
8382 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8383 break;
8384 else if (!HAVE_cmpstrsi)
8385 break;
8386 {
8387 tree arg1 = TREE_VALUE (arglist);
8388 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8389 tree offset;
8390 tree len, len2;
8391
8392 len = c_strlen (arg1);
8393 if (len)
8394 len = size_binop (PLUS_EXPR, integer_one_node, len);
8395 len2 = c_strlen (arg2);
8396 if (len2)
8397 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8398
8399 /* If we don't have a constant length for the first, use the length
8400 of the second, if we know it. We don't require a constant for
8401 this case; some cost analysis could be done if both are available
8402 but neither is constant. For now, assume they're equally cheap.
8403
8404 If both strings have constant lengths, use the smaller. This
8405 could arise if optimization results in strcpy being called with
8406 two fixed strings, or if the code was machine-generated. We should
8407 add some code to the `memcmp' handler below to deal with such
8408 situations, someday. */
8409 if (!len || TREE_CODE (len) != INTEGER_CST)
8410 {
8411 if (len2)
8412 len = len2;
8413 else if (len == 0)
8414 break;
8415 }
8416 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8417 {
8418 if (tree_int_cst_lt (len2, len))
8419 len = len2;
8420 }
8421
8422 chainon (arglist, build_tree_list (NULL_TREE, len));
8423 }
8424
8425 /* Drops in. */
8426 case BUILT_IN_MEMCMP:
8427 /* If not optimizing, call the library function. */
8428 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8429 break;
8430
8431 if (arglist == 0
8432 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8433 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8434 || TREE_CHAIN (arglist) == 0
8435 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8436 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8437 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8438 break;
8439 else if (!HAVE_cmpstrsi)
8440 break;
8441 {
8442 tree arg1 = TREE_VALUE (arglist);
8443 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8444 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8445 rtx result;
8446
8447 int arg1_align
8448 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8449 int arg2_align
8450 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8451 enum machine_mode insn_mode
8452 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8453
8454 /* If we don't have POINTER_TYPE, call the function. */
8455 if (arg1_align == 0 || arg2_align == 0)
8456 {
8457 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8458 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8459 break;
8460 }
8461
8462 /* Make a place to write the result of the instruction. */
8463 result = target;
8464 if (! (result != 0
8465 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8466 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8467 result = gen_reg_rtx (insn_mode);
8468
8469 emit_insn (gen_cmpstrsi (result,
8470 gen_rtx (MEM, BLKmode,
8471 expand_expr (arg1, NULL_RTX,
8472 ptr_mode,
8473 EXPAND_NORMAL)),
8474 gen_rtx (MEM, BLKmode,
8475 expand_expr (arg2, NULL_RTX,
8476 ptr_mode,
8477 EXPAND_NORMAL)),
8478 expand_expr (len, NULL_RTX, VOIDmode, 0),
8479 GEN_INT (MIN (arg1_align, arg2_align))));
8480
8481 /* Return the value in the proper mode for this function. */
8482 mode = TYPE_MODE (TREE_TYPE (exp));
8483 if (GET_MODE (result) == mode)
8484 return result;
8485 else if (target != 0)
8486 {
8487 convert_move (target, result, 0);
8488 return target;
8489 }
8490 else
8491 return convert_to_mode (mode, result, 0);
8492 }
8493 #else
8494 case BUILT_IN_STRCMP:
8495 case BUILT_IN_MEMCMP:
8496 break;
8497 #endif
8498
8499 /* __builtin_setjmp is passed a pointer to an array of five words
8500 (not all will be used on all machines). It operates similarly to
8501 the C library function of the same name, but is more efficient.
8502 Much of the code below (and for longjmp) is copied from the handling
8503 of non-local gotos.
8504
8505 NOTE: This is intended for use by GNAT and will only work in
8506 the method used by it. This code will likely NOT survive to
8507 the GCC 2.8.0 release. */
8508 case BUILT_IN_SETJMP:
8509 if (arglist == 0
8510 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8511 break;
8512
8513 {
8514 rtx buf_addr
8515 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), subtarget,
8516 VOIDmode, 0));
8517 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8518 enum machine_mode sa_mode = Pmode;
8519 rtx stack_save;
8520
8521 if (target == 0 || GET_CODE (target) != REG
8522 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8523 target = gen_reg_rtx (value_mode);
8524
8525 emit_queue ();
8526
8527 emit_note (NULL_PTR, NOTE_INSN_SETJMP);
8528 current_function_calls_setjmp = 1;
8529
8530 /* We store the frame pointer and the address of lab1 in the buffer
8531 and use the rest of it for the stack save area, which is
8532 machine-dependent. */
8533 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8534 virtual_stack_vars_rtx);
8535 emit_move_insn
8536 (validize_mem (gen_rtx (MEM, Pmode,
8537 plus_constant (buf_addr,
8538 GET_MODE_SIZE (Pmode)))),
8539 gen_rtx (LABEL_REF, Pmode, lab1));
8540
8541 #ifdef HAVE_save_stack_nonlocal
8542 if (HAVE_save_stack_nonlocal)
8543 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8544 #endif
8545
8546 stack_save = gen_rtx (MEM, sa_mode,
8547 plus_constant (buf_addr,
8548 2 * GET_MODE_SIZE (Pmode)));
8549 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8550
8551 /* Set TARGET to zero and branch around the other case. */
8552 emit_move_insn (target, const0_rtx);
8553 emit_jump_insn (gen_jump (lab2));
8554 emit_barrier ();
8555 emit_label (lab1);
8556
8557 /* Now put in the code to restore the frame pointer, and argument
8558 pointer, if needed. The code below is from expand_end_bindings
8559 in stmt.c; see detailed documentation there. */
8560 #ifdef HAVE_nonlocal_goto
8561 if (! HAVE_nonlocal_goto)
8562 #endif
8563 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8564
8565 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8566 if (fixed_regs[ARG_POINTER_REGNUM])
8567 {
8568 #ifdef ELIMINABLE_REGS
8569 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8570 int i;
8571
8572 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8573 if (elim_regs[i].from == ARG_POINTER_REGNUM
8574 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8575 break;
8576
8577 if (i == sizeof elim_regs / sizeof elim_regs [0])
8578 #endif
8579 {
8580 /* Now restore our arg pointer from the address at which it
8581 was saved in our stack frame.
8582 If there hasn't be space allocated for it yet, make
8583 some now. */
8584 if (arg_pointer_save_area == 0)
8585 arg_pointer_save_area
8586 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8587 emit_move_insn (virtual_incoming_args_rtx,
8588 copy_to_reg (arg_pointer_save_area));
8589 }
8590 }
8591 #endif
8592
8593 /* The result to return is in the static chain pointer. */
8594 if (GET_MODE (static_chain_rtx) == GET_MODE (target))
8595 emit_move_insn (target, static_chain_rtx);
8596 else
8597 convert_move (target, static_chain_rtx, 0);
8598
8599 emit_label (lab2);
8600 return target;
8601 }
8602
8603 /* __builtin_longjmp is passed a pointer to an array of five words
8604 and a value to return. It's similar to the C library longjmp
8605 function but works with __builtin_setjmp above. */
8606 case BUILT_IN_LONGJMP:
8607 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8608 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8609 break;
8610
8611 {
8612 rtx buf_addr
8613 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), NULL_RTX,
8614 VOIDmode, 0));
8615 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8616 rtx lab = gen_rtx (MEM, Pmode,
8617 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8618 enum machine_mode sa_mode
8619 #ifdef HAVE_save_stack_nonlocal
8620 = (HAVE_save_stack_nonlocal
8621 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8622 : Pmode);
8623 #else
8624 = Pmode;
8625 #endif
8626 rtx stack = gen_rtx (MEM, sa_mode,
8627 plus_constant (buf_addr,
8628 2 * GET_MODE_SIZE (Pmode)));
8629 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), NULL_RTX,
8630 VOIDmode, 0);
8631
8632 /* Pick up FP, label, and SP from the block and jump. This code is
8633 from expand_goto in stmt.c; see there for detailed comments. */
8634 #if HAVE_nonlocal_goto
8635 if (HAVE_nonlocal_goto)
8636 emit_insn (gen_nonlocal_goto (fp, lab, stack, value));
8637 else
8638 #endif
8639 {
8640 emit_move_insn (hard_frame_pointer_rtx, fp);
8641 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8642
8643 /* Put in the static chain register the return value. */
8644 emit_move_insn (static_chain_rtx, value);
8645 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8646 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8647 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8648 emit_indirect_jump (copy_to_reg (lab));
8649 }
8650
8651 return const0_rtx;
8652 }
8653
8654 default: /* just do library call, if unknown builtin */
8655 error ("built-in function `%s' not currently supported",
8656 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8657 }
8658
8659 /* The switch statement above can drop through to cause the function
8660 to be called normally. */
8661
8662 return expand_call (exp, target, ignore);
8663 }
8664 \f
8665 /* Built-in functions to perform an untyped call and return. */
8666
8667 /* For each register that may be used for calling a function, this
8668 gives a mode used to copy the register's value. VOIDmode indicates
8669 the register is not used for calling a function. If the machine
8670 has register windows, this gives only the outbound registers.
8671 INCOMING_REGNO gives the corresponding inbound register. */
8672 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8673
8674 /* For each register that may be used for returning values, this gives
8675 a mode used to copy the register's value. VOIDmode indicates the
8676 register is not used for returning values. If the machine has
8677 register windows, this gives only the outbound registers.
8678 INCOMING_REGNO gives the corresponding inbound register. */
8679 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8680
8681 /* For each register that may be used for calling a function, this
8682 gives the offset of that register into the block returned by
8683 __builtin_apply_args. 0 indicates that the register is not
8684 used for calling a function. */
8685 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8686
8687 /* Return the offset of register REGNO into the block returned by
8688 __builtin_apply_args. This is not declared static, since it is
8689 needed in objc-act.c. */
8690
8691 int
8692 apply_args_register_offset (regno)
8693 int regno;
8694 {
8695 apply_args_size ();
8696
8697 /* Arguments are always put in outgoing registers (in the argument
8698 block) if such make sense. */
8699 #ifdef OUTGOING_REGNO
8700 regno = OUTGOING_REGNO(regno);
8701 #endif
8702 return apply_args_reg_offset[regno];
8703 }
8704
8705 /* Return the size required for the block returned by __builtin_apply_args,
8706 and initialize apply_args_mode. */
8707
8708 static int
8709 apply_args_size ()
8710 {
8711 static int size = -1;
8712 int align, regno;
8713 enum machine_mode mode;
8714
8715 /* The values computed by this function never change. */
8716 if (size < 0)
8717 {
8718 /* The first value is the incoming arg-pointer. */
8719 size = GET_MODE_SIZE (Pmode);
8720
8721 /* The second value is the structure value address unless this is
8722 passed as an "invisible" first argument. */
8723 if (struct_value_rtx)
8724 size += GET_MODE_SIZE (Pmode);
8725
8726 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8727 if (FUNCTION_ARG_REGNO_P (regno))
8728 {
8729 /* Search for the proper mode for copying this register's
8730 value. I'm not sure this is right, but it works so far. */
8731 enum machine_mode best_mode = VOIDmode;
8732
8733 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8734 mode != VOIDmode;
8735 mode = GET_MODE_WIDER_MODE (mode))
8736 if (HARD_REGNO_MODE_OK (regno, mode)
8737 && HARD_REGNO_NREGS (regno, mode) == 1)
8738 best_mode = mode;
8739
8740 if (best_mode == VOIDmode)
8741 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8742 mode != VOIDmode;
8743 mode = GET_MODE_WIDER_MODE (mode))
8744 if (HARD_REGNO_MODE_OK (regno, mode)
8745 && (mov_optab->handlers[(int) mode].insn_code
8746 != CODE_FOR_nothing))
8747 best_mode = mode;
8748
8749 mode = best_mode;
8750 if (mode == VOIDmode)
8751 abort ();
8752
8753 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8754 if (size % align != 0)
8755 size = CEIL (size, align) * align;
8756 apply_args_reg_offset[regno] = size;
8757 size += GET_MODE_SIZE (mode);
8758 apply_args_mode[regno] = mode;
8759 }
8760 else
8761 {
8762 apply_args_mode[regno] = VOIDmode;
8763 apply_args_reg_offset[regno] = 0;
8764 }
8765 }
8766 return size;
8767 }
8768
8769 /* Return the size required for the block returned by __builtin_apply,
8770 and initialize apply_result_mode. */
8771
8772 static int
8773 apply_result_size ()
8774 {
8775 static int size = -1;
8776 int align, regno;
8777 enum machine_mode mode;
8778
8779 /* The values computed by this function never change. */
8780 if (size < 0)
8781 {
8782 size = 0;
8783
8784 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8785 if (FUNCTION_VALUE_REGNO_P (regno))
8786 {
8787 /* Search for the proper mode for copying this register's
8788 value. I'm not sure this is right, but it works so far. */
8789 enum machine_mode best_mode = VOIDmode;
8790
8791 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8792 mode != TImode;
8793 mode = GET_MODE_WIDER_MODE (mode))
8794 if (HARD_REGNO_MODE_OK (regno, mode))
8795 best_mode = mode;
8796
8797 if (best_mode == VOIDmode)
8798 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8799 mode != VOIDmode;
8800 mode = GET_MODE_WIDER_MODE (mode))
8801 if (HARD_REGNO_MODE_OK (regno, mode)
8802 && (mov_optab->handlers[(int) mode].insn_code
8803 != CODE_FOR_nothing))
8804 best_mode = mode;
8805
8806 mode = best_mode;
8807 if (mode == VOIDmode)
8808 abort ();
8809
8810 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8811 if (size % align != 0)
8812 size = CEIL (size, align) * align;
8813 size += GET_MODE_SIZE (mode);
8814 apply_result_mode[regno] = mode;
8815 }
8816 else
8817 apply_result_mode[regno] = VOIDmode;
8818
8819 /* Allow targets that use untyped_call and untyped_return to override
8820 the size so that machine-specific information can be stored here. */
8821 #ifdef APPLY_RESULT_SIZE
8822 size = APPLY_RESULT_SIZE;
8823 #endif
8824 }
8825 return size;
8826 }
8827
8828 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8829 /* Create a vector describing the result block RESULT. If SAVEP is true,
8830 the result block is used to save the values; otherwise it is used to
8831 restore the values. */
8832
8833 static rtx
8834 result_vector (savep, result)
8835 int savep;
8836 rtx result;
8837 {
8838 int regno, size, align, nelts;
8839 enum machine_mode mode;
8840 rtx reg, mem;
8841 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8842
8843 size = nelts = 0;
8844 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8845 if ((mode = apply_result_mode[regno]) != VOIDmode)
8846 {
8847 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8848 if (size % align != 0)
8849 size = CEIL (size, align) * align;
8850 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8851 mem = change_address (result, mode,
8852 plus_constant (XEXP (result, 0), size));
8853 savevec[nelts++] = (savep
8854 ? gen_rtx (SET, VOIDmode, mem, reg)
8855 : gen_rtx (SET, VOIDmode, reg, mem));
8856 size += GET_MODE_SIZE (mode);
8857 }
8858 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8859 }
8860 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8861
8862 /* Save the state required to perform an untyped call with the same
8863 arguments as were passed to the current function. */
8864
8865 static rtx
8866 expand_builtin_apply_args ()
8867 {
8868 rtx registers;
8869 int size, align, regno;
8870 enum machine_mode mode;
8871
8872 /* Create a block where the arg-pointer, structure value address,
8873 and argument registers can be saved. */
8874 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8875
8876 /* Walk past the arg-pointer and structure value address. */
8877 size = GET_MODE_SIZE (Pmode);
8878 if (struct_value_rtx)
8879 size += GET_MODE_SIZE (Pmode);
8880
8881 /* Save each register used in calling a function to the block. */
8882 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8883 if ((mode = apply_args_mode[regno]) != VOIDmode)
8884 {
8885 rtx tem;
8886
8887 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8888 if (size % align != 0)
8889 size = CEIL (size, align) * align;
8890
8891 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8892
8893 #ifdef STACK_REGS
8894 /* For reg-stack.c's stack register household.
8895 Compare with a similar piece of code in function.c. */
8896
8897 emit_insn (gen_rtx (USE, mode, tem));
8898 #endif
8899
8900 emit_move_insn (change_address (registers, mode,
8901 plus_constant (XEXP (registers, 0),
8902 size)),
8903 tem);
8904 size += GET_MODE_SIZE (mode);
8905 }
8906
8907 /* Save the arg pointer to the block. */
8908 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8909 copy_to_reg (virtual_incoming_args_rtx));
8910 size = GET_MODE_SIZE (Pmode);
8911
8912 /* Save the structure value address unless this is passed as an
8913 "invisible" first argument. */
8914 if (struct_value_incoming_rtx)
8915 {
8916 emit_move_insn (change_address (registers, Pmode,
8917 plus_constant (XEXP (registers, 0),
8918 size)),
8919 copy_to_reg (struct_value_incoming_rtx));
8920 size += GET_MODE_SIZE (Pmode);
8921 }
8922
8923 /* Return the address of the block. */
8924 return copy_addr_to_reg (XEXP (registers, 0));
8925 }
8926
8927 /* Perform an untyped call and save the state required to perform an
8928 untyped return of whatever value was returned by the given function. */
8929
8930 static rtx
8931 expand_builtin_apply (function, arguments, argsize)
8932 rtx function, arguments, argsize;
8933 {
8934 int size, align, regno;
8935 enum machine_mode mode;
8936 rtx incoming_args, result, reg, dest, call_insn;
8937 rtx old_stack_level = 0;
8938 rtx call_fusage = 0;
8939
8940 /* Create a block where the return registers can be saved. */
8941 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8942
8943 /* ??? The argsize value should be adjusted here. */
8944
8945 /* Fetch the arg pointer from the ARGUMENTS block. */
8946 incoming_args = gen_reg_rtx (Pmode);
8947 emit_move_insn (incoming_args,
8948 gen_rtx (MEM, Pmode, arguments));
8949 #ifndef STACK_GROWS_DOWNWARD
8950 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8951 incoming_args, 0, OPTAB_LIB_WIDEN);
8952 #endif
8953
8954 /* Perform postincrements before actually calling the function. */
8955 emit_queue ();
8956
8957 /* Push a new argument block and copy the arguments. */
8958 do_pending_stack_adjust ();
8959 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8960
8961 /* Push a block of memory onto the stack to store the memory arguments.
8962 Save the address in a register, and copy the memory arguments. ??? I
8963 haven't figured out how the calling convention macros effect this,
8964 but it's likely that the source and/or destination addresses in
8965 the block copy will need updating in machine specific ways. */
8966 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8967 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8968 gen_rtx (MEM, BLKmode, incoming_args),
8969 argsize,
8970 PARM_BOUNDARY / BITS_PER_UNIT);
8971
8972 /* Refer to the argument block. */
8973 apply_args_size ();
8974 arguments = gen_rtx (MEM, BLKmode, arguments);
8975
8976 /* Walk past the arg-pointer and structure value address. */
8977 size = GET_MODE_SIZE (Pmode);
8978 if (struct_value_rtx)
8979 size += GET_MODE_SIZE (Pmode);
8980
8981 /* Restore each of the registers previously saved. Make USE insns
8982 for each of these registers for use in making the call. */
8983 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8984 if ((mode = apply_args_mode[regno]) != VOIDmode)
8985 {
8986 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8987 if (size % align != 0)
8988 size = CEIL (size, align) * align;
8989 reg = gen_rtx (REG, mode, regno);
8990 emit_move_insn (reg,
8991 change_address (arguments, mode,
8992 plus_constant (XEXP (arguments, 0),
8993 size)));
8994
8995 use_reg (&call_fusage, reg);
8996 size += GET_MODE_SIZE (mode);
8997 }
8998
8999 /* Restore the structure value address unless this is passed as an
9000 "invisible" first argument. */
9001 size = GET_MODE_SIZE (Pmode);
9002 if (struct_value_rtx)
9003 {
9004 rtx value = gen_reg_rtx (Pmode);
9005 emit_move_insn (value,
9006 change_address (arguments, Pmode,
9007 plus_constant (XEXP (arguments, 0),
9008 size)));
9009 emit_move_insn (struct_value_rtx, value);
9010 if (GET_CODE (struct_value_rtx) == REG)
9011 use_reg (&call_fusage, struct_value_rtx);
9012 size += GET_MODE_SIZE (Pmode);
9013 }
9014
9015 /* All arguments and registers used for the call are set up by now! */
9016 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9017
9018 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9019 and we don't want to load it into a register as an optimization,
9020 because prepare_call_address already did it if it should be done. */
9021 if (GET_CODE (function) != SYMBOL_REF)
9022 function = memory_address (FUNCTION_MODE, function);
9023
9024 /* Generate the actual call instruction and save the return value. */
9025 #ifdef HAVE_untyped_call
9026 if (HAVE_untyped_call)
9027 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9028 result, result_vector (1, result)));
9029 else
9030 #endif
9031 #ifdef HAVE_call_value
9032 if (HAVE_call_value)
9033 {
9034 rtx valreg = 0;
9035
9036 /* Locate the unique return register. It is not possible to
9037 express a call that sets more than one return register using
9038 call_value; use untyped_call for that. In fact, untyped_call
9039 only needs to save the return registers in the given block. */
9040 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9041 if ((mode = apply_result_mode[regno]) != VOIDmode)
9042 {
9043 if (valreg)
9044 abort (); /* HAVE_untyped_call required. */
9045 valreg = gen_rtx (REG, mode, regno);
9046 }
9047
9048 emit_call_insn (gen_call_value (valreg,
9049 gen_rtx (MEM, FUNCTION_MODE, function),
9050 const0_rtx, NULL_RTX, const0_rtx));
9051
9052 emit_move_insn (change_address (result, GET_MODE (valreg),
9053 XEXP (result, 0)),
9054 valreg);
9055 }
9056 else
9057 #endif
9058 abort ();
9059
9060 /* Find the CALL insn we just emitted. */
9061 for (call_insn = get_last_insn ();
9062 call_insn && GET_CODE (call_insn) != CALL_INSN;
9063 call_insn = PREV_INSN (call_insn))
9064 ;
9065
9066 if (! call_insn)
9067 abort ();
9068
9069 /* Put the register usage information on the CALL. If there is already
9070 some usage information, put ours at the end. */
9071 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9072 {
9073 rtx link;
9074
9075 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9076 link = XEXP (link, 1))
9077 ;
9078
9079 XEXP (link, 1) = call_fusage;
9080 }
9081 else
9082 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9083
9084 /* Restore the stack. */
9085 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9086
9087 /* Return the address of the result block. */
9088 return copy_addr_to_reg (XEXP (result, 0));
9089 }
9090
9091 /* Perform an untyped return. */
9092
9093 static void
9094 expand_builtin_return (result)
9095 rtx result;
9096 {
9097 int size, align, regno;
9098 enum machine_mode mode;
9099 rtx reg;
9100 rtx call_fusage = 0;
9101
9102 apply_result_size ();
9103 result = gen_rtx (MEM, BLKmode, result);
9104
9105 #ifdef HAVE_untyped_return
9106 if (HAVE_untyped_return)
9107 {
9108 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9109 emit_barrier ();
9110 return;
9111 }
9112 #endif
9113
9114 /* Restore the return value and note that each value is used. */
9115 size = 0;
9116 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9117 if ((mode = apply_result_mode[regno]) != VOIDmode)
9118 {
9119 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9120 if (size % align != 0)
9121 size = CEIL (size, align) * align;
9122 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9123 emit_move_insn (reg,
9124 change_address (result, mode,
9125 plus_constant (XEXP (result, 0),
9126 size)));
9127
9128 push_to_sequence (call_fusage);
9129 emit_insn (gen_rtx (USE, VOIDmode, reg));
9130 call_fusage = get_insns ();
9131 end_sequence ();
9132 size += GET_MODE_SIZE (mode);
9133 }
9134
9135 /* Put the USE insns before the return. */
9136 emit_insns (call_fusage);
9137
9138 /* Return whatever values was restored by jumping directly to the end
9139 of the function. */
9140 expand_null_return ();
9141 }
9142 \f
9143 /* Expand code for a post- or pre- increment or decrement
9144 and return the RTX for the result.
9145 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9146
9147 static rtx
9148 expand_increment (exp, post)
9149 register tree exp;
9150 int post;
9151 {
9152 register rtx op0, op1;
9153 register rtx temp, value;
9154 register tree incremented = TREE_OPERAND (exp, 0);
9155 optab this_optab = add_optab;
9156 int icode;
9157 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9158 int op0_is_copy = 0;
9159 int single_insn = 0;
9160 /* 1 means we can't store into OP0 directly,
9161 because it is a subreg narrower than a word,
9162 and we don't dare clobber the rest of the word. */
9163 int bad_subreg = 0;
9164
9165 if (output_bytecode)
9166 {
9167 bc_expand_expr (exp);
9168 return NULL_RTX;
9169 }
9170
9171 /* Stabilize any component ref that might need to be
9172 evaluated more than once below. */
9173 if (!post
9174 || TREE_CODE (incremented) == BIT_FIELD_REF
9175 || (TREE_CODE (incremented) == COMPONENT_REF
9176 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9177 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9178 incremented = stabilize_reference (incremented);
9179 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9180 ones into save exprs so that they don't accidentally get evaluated
9181 more than once by the code below. */
9182 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9183 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9184 incremented = save_expr (incremented);
9185
9186 /* Compute the operands as RTX.
9187 Note whether OP0 is the actual lvalue or a copy of it:
9188 I believe it is a copy iff it is a register or subreg
9189 and insns were generated in computing it. */
9190
9191 temp = get_last_insn ();
9192 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9193
9194 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9195 in place but instead must do sign- or zero-extension during assignment,
9196 so we copy it into a new register and let the code below use it as
9197 a copy.
9198
9199 Note that we can safely modify this SUBREG since it is know not to be
9200 shared (it was made by the expand_expr call above). */
9201
9202 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9203 {
9204 if (post)
9205 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9206 else
9207 bad_subreg = 1;
9208 }
9209 else if (GET_CODE (op0) == SUBREG
9210 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9211 {
9212 /* We cannot increment this SUBREG in place. If we are
9213 post-incrementing, get a copy of the old value. Otherwise,
9214 just mark that we cannot increment in place. */
9215 if (post)
9216 op0 = copy_to_reg (op0);
9217 else
9218 bad_subreg = 1;
9219 }
9220
9221 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9222 && temp != get_last_insn ());
9223 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9224
9225 /* Decide whether incrementing or decrementing. */
9226 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9227 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9228 this_optab = sub_optab;
9229
9230 /* Convert decrement by a constant into a negative increment. */
9231 if (this_optab == sub_optab
9232 && GET_CODE (op1) == CONST_INT)
9233 {
9234 op1 = GEN_INT (- INTVAL (op1));
9235 this_optab = add_optab;
9236 }
9237
9238 /* For a preincrement, see if we can do this with a single instruction. */
9239 if (!post)
9240 {
9241 icode = (int) this_optab->handlers[(int) mode].insn_code;
9242 if (icode != (int) CODE_FOR_nothing
9243 /* Make sure that OP0 is valid for operands 0 and 1
9244 of the insn we want to queue. */
9245 && (*insn_operand_predicate[icode][0]) (op0, mode)
9246 && (*insn_operand_predicate[icode][1]) (op0, mode)
9247 && (*insn_operand_predicate[icode][2]) (op1, mode))
9248 single_insn = 1;
9249 }
9250
9251 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9252 then we cannot just increment OP0. We must therefore contrive to
9253 increment the original value. Then, for postincrement, we can return
9254 OP0 since it is a copy of the old value. For preincrement, expand here
9255 unless we can do it with a single insn.
9256
9257 Likewise if storing directly into OP0 would clobber high bits
9258 we need to preserve (bad_subreg). */
9259 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9260 {
9261 /* This is the easiest way to increment the value wherever it is.
9262 Problems with multiple evaluation of INCREMENTED are prevented
9263 because either (1) it is a component_ref or preincrement,
9264 in which case it was stabilized above, or (2) it is an array_ref
9265 with constant index in an array in a register, which is
9266 safe to reevaluate. */
9267 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9268 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9269 ? MINUS_EXPR : PLUS_EXPR),
9270 TREE_TYPE (exp),
9271 incremented,
9272 TREE_OPERAND (exp, 1));
9273
9274 while (TREE_CODE (incremented) == NOP_EXPR
9275 || TREE_CODE (incremented) == CONVERT_EXPR)
9276 {
9277 newexp = convert (TREE_TYPE (incremented), newexp);
9278 incremented = TREE_OPERAND (incremented, 0);
9279 }
9280
9281 temp = expand_assignment (incremented, newexp, ! post, 0);
9282 return post ? op0 : temp;
9283 }
9284
9285 if (post)
9286 {
9287 /* We have a true reference to the value in OP0.
9288 If there is an insn to add or subtract in this mode, queue it.
9289 Queueing the increment insn avoids the register shuffling
9290 that often results if we must increment now and first save
9291 the old value for subsequent use. */
9292
9293 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9294 op0 = stabilize (op0);
9295 #endif
9296
9297 icode = (int) this_optab->handlers[(int) mode].insn_code;
9298 if (icode != (int) CODE_FOR_nothing
9299 /* Make sure that OP0 is valid for operands 0 and 1
9300 of the insn we want to queue. */
9301 && (*insn_operand_predicate[icode][0]) (op0, mode)
9302 && (*insn_operand_predicate[icode][1]) (op0, mode))
9303 {
9304 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9305 op1 = force_reg (mode, op1);
9306
9307 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9308 }
9309 }
9310
9311 /* Preincrement, or we can't increment with one simple insn. */
9312 if (post)
9313 /* Save a copy of the value before inc or dec, to return it later. */
9314 temp = value = copy_to_reg (op0);
9315 else
9316 /* Arrange to return the incremented value. */
9317 /* Copy the rtx because expand_binop will protect from the queue,
9318 and the results of that would be invalid for us to return
9319 if our caller does emit_queue before using our result. */
9320 temp = copy_rtx (value = op0);
9321
9322 /* Increment however we can. */
9323 op1 = expand_binop (mode, this_optab, value, op1, op0,
9324 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9325 /* Make sure the value is stored into OP0. */
9326 if (op1 != op0)
9327 emit_move_insn (op0, op1);
9328
9329 return temp;
9330 }
9331 \f
9332 /* Expand all function calls contained within EXP, innermost ones first.
9333 But don't look within expressions that have sequence points.
9334 For each CALL_EXPR, record the rtx for its value
9335 in the CALL_EXPR_RTL field. */
9336
9337 static void
9338 preexpand_calls (exp)
9339 tree exp;
9340 {
9341 register int nops, i;
9342 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9343
9344 if (! do_preexpand_calls)
9345 return;
9346
9347 /* Only expressions and references can contain calls. */
9348
9349 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9350 return;
9351
9352 switch (TREE_CODE (exp))
9353 {
9354 case CALL_EXPR:
9355 /* Do nothing if already expanded. */
9356 if (CALL_EXPR_RTL (exp) != 0)
9357 return;
9358
9359 /* Do nothing to built-in functions. */
9360 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
9361 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
9362 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9363 /* Do nothing if the call returns a variable-sized object. */
9364 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
9365 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9366 return;
9367
9368 case COMPOUND_EXPR:
9369 case COND_EXPR:
9370 case TRUTH_ANDIF_EXPR:
9371 case TRUTH_ORIF_EXPR:
9372 /* If we find one of these, then we can be sure
9373 the adjust will be done for it (since it makes jumps).
9374 Do it now, so that if this is inside an argument
9375 of a function, we don't get the stack adjustment
9376 after some other args have already been pushed. */
9377 do_pending_stack_adjust ();
9378 return;
9379
9380 case BLOCK:
9381 case RTL_EXPR:
9382 case WITH_CLEANUP_EXPR:
9383 case CLEANUP_POINT_EXPR:
9384 return;
9385
9386 case SAVE_EXPR:
9387 if (SAVE_EXPR_RTL (exp) != 0)
9388 return;
9389 }
9390
9391 nops = tree_code_length[(int) TREE_CODE (exp)];
9392 for (i = 0; i < nops; i++)
9393 if (TREE_OPERAND (exp, i) != 0)
9394 {
9395 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9396 if (type == 'e' || type == '<' || type == '1' || type == '2'
9397 || type == 'r')
9398 preexpand_calls (TREE_OPERAND (exp, i));
9399 }
9400 }
9401 \f
9402 /* At the start of a function, record that we have no previously-pushed
9403 arguments waiting to be popped. */
9404
9405 void
9406 init_pending_stack_adjust ()
9407 {
9408 pending_stack_adjust = 0;
9409 }
9410
9411 /* When exiting from function, if safe, clear out any pending stack adjust
9412 so the adjustment won't get done. */
9413
9414 void
9415 clear_pending_stack_adjust ()
9416 {
9417 #ifdef EXIT_IGNORE_STACK
9418 if (optimize > 0
9419 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9420 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9421 && ! flag_inline_functions)
9422 pending_stack_adjust = 0;
9423 #endif
9424 }
9425
9426 /* Pop any previously-pushed arguments that have not been popped yet. */
9427
9428 void
9429 do_pending_stack_adjust ()
9430 {
9431 if (inhibit_defer_pop == 0)
9432 {
9433 if (pending_stack_adjust != 0)
9434 adjust_stack (GEN_INT (pending_stack_adjust));
9435 pending_stack_adjust = 0;
9436 }
9437 }
9438
9439 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9440 Returns the cleanups to be performed. */
9441
9442 static tree
9443 defer_cleanups_to (old_cleanups)
9444 tree old_cleanups;
9445 {
9446 tree new_cleanups = NULL_TREE;
9447 tree cleanups = cleanups_this_call;
9448 tree last = NULL_TREE;
9449
9450 while (cleanups_this_call != old_cleanups)
9451 {
9452 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
9453 last = cleanups_this_call;
9454 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9455 }
9456
9457 if (last)
9458 {
9459 /* Remove the list from the chain of cleanups. */
9460 TREE_CHAIN (last) = NULL_TREE;
9461
9462 /* reverse them so that we can build them in the right order. */
9463 cleanups = nreverse (cleanups);
9464
9465 /* All cleanups must be on the function_obstack. */
9466 push_obstacks_nochange ();
9467 resume_temporary_allocation ();
9468
9469 while (cleanups)
9470 {
9471 if (new_cleanups)
9472 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9473 TREE_VALUE (cleanups), new_cleanups);
9474 else
9475 new_cleanups = TREE_VALUE (cleanups);
9476
9477 cleanups = TREE_CHAIN (cleanups);
9478 }
9479
9480 pop_obstacks ();
9481 }
9482
9483 return new_cleanups;
9484 }
9485
9486 /* Expand all cleanups up to OLD_CLEANUPS.
9487 Needed here, and also for language-dependent calls. */
9488
9489 void
9490 expand_cleanups_to (old_cleanups)
9491 tree old_cleanups;
9492 {
9493 while (cleanups_this_call != old_cleanups)
9494 {
9495 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
9496 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9497 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9498 }
9499 }
9500 \f
9501 /* Expand conditional expressions. */
9502
9503 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9504 LABEL is an rtx of code CODE_LABEL, in this function and all the
9505 functions here. */
9506
9507 void
9508 jumpifnot (exp, label)
9509 tree exp;
9510 rtx label;
9511 {
9512 do_jump (exp, label, NULL_RTX);
9513 }
9514
9515 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9516
9517 void
9518 jumpif (exp, label)
9519 tree exp;
9520 rtx label;
9521 {
9522 do_jump (exp, NULL_RTX, label);
9523 }
9524
9525 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9526 the result is zero, or IF_TRUE_LABEL if the result is one.
9527 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9528 meaning fall through in that case.
9529
9530 do_jump always does any pending stack adjust except when it does not
9531 actually perform a jump. An example where there is no jump
9532 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9533
9534 This function is responsible for optimizing cases such as
9535 &&, || and comparison operators in EXP. */
9536
9537 void
9538 do_jump (exp, if_false_label, if_true_label)
9539 tree exp;
9540 rtx if_false_label, if_true_label;
9541 {
9542 register enum tree_code code = TREE_CODE (exp);
9543 /* Some cases need to create a label to jump to
9544 in order to properly fall through.
9545 These cases set DROP_THROUGH_LABEL nonzero. */
9546 rtx drop_through_label = 0;
9547 rtx temp;
9548 rtx comparison = 0;
9549 int i;
9550 tree type;
9551 enum machine_mode mode;
9552
9553 emit_queue ();
9554
9555 switch (code)
9556 {
9557 case ERROR_MARK:
9558 break;
9559
9560 case INTEGER_CST:
9561 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9562 if (temp)
9563 emit_jump (temp);
9564 break;
9565
9566 #if 0
9567 /* This is not true with #pragma weak */
9568 case ADDR_EXPR:
9569 /* The address of something can never be zero. */
9570 if (if_true_label)
9571 emit_jump (if_true_label);
9572 break;
9573 #endif
9574
9575 case NOP_EXPR:
9576 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9577 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9578 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9579 goto normal;
9580 case CONVERT_EXPR:
9581 /* If we are narrowing the operand, we have to do the compare in the
9582 narrower mode. */
9583 if ((TYPE_PRECISION (TREE_TYPE (exp))
9584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9585 goto normal;
9586 case NON_LVALUE_EXPR:
9587 case REFERENCE_EXPR:
9588 case ABS_EXPR:
9589 case NEGATE_EXPR:
9590 case LROTATE_EXPR:
9591 case RROTATE_EXPR:
9592 /* These cannot change zero->non-zero or vice versa. */
9593 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9594 break;
9595
9596 #if 0
9597 /* This is never less insns than evaluating the PLUS_EXPR followed by
9598 a test and can be longer if the test is eliminated. */
9599 case PLUS_EXPR:
9600 /* Reduce to minus. */
9601 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9602 TREE_OPERAND (exp, 0),
9603 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9604 TREE_OPERAND (exp, 1))));
9605 /* Process as MINUS. */
9606 #endif
9607
9608 case MINUS_EXPR:
9609 /* Non-zero iff operands of minus differ. */
9610 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9611 TREE_OPERAND (exp, 0),
9612 TREE_OPERAND (exp, 1)),
9613 NE, NE);
9614 break;
9615
9616 case BIT_AND_EXPR:
9617 /* If we are AND'ing with a small constant, do this comparison in the
9618 smallest type that fits. If the machine doesn't have comparisons
9619 that small, it will be converted back to the wider comparison.
9620 This helps if we are testing the sign bit of a narrower object.
9621 combine can't do this for us because it can't know whether a
9622 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9623
9624 if (! SLOW_BYTE_ACCESS
9625 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9626 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9627 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9628 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9629 && (type = type_for_mode (mode, 1)) != 0
9630 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9631 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9632 != CODE_FOR_nothing))
9633 {
9634 do_jump (convert (type, exp), if_false_label, if_true_label);
9635 break;
9636 }
9637 goto normal;
9638
9639 case TRUTH_NOT_EXPR:
9640 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9641 break;
9642
9643 case TRUTH_ANDIF_EXPR:
9644 {
9645 rtx seq1, seq2;
9646 tree cleanups, old_cleanups;
9647
9648 if (if_false_label == 0)
9649 if_false_label = drop_through_label = gen_label_rtx ();
9650 start_sequence ();
9651 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9652 seq1 = get_insns ();
9653 end_sequence ();
9654
9655 old_cleanups = cleanups_this_call;
9656 start_sequence ();
9657 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9658 seq2 = get_insns ();
9659 end_sequence ();
9660
9661 cleanups = defer_cleanups_to (old_cleanups);
9662 if (cleanups)
9663 {
9664 rtx flag = gen_reg_rtx (word_mode);
9665 tree new_cleanups;
9666 tree cond;
9667
9668 /* Flag cleanups as not needed. */
9669 emit_move_insn (flag, const0_rtx);
9670 emit_insns (seq1);
9671
9672 /* Flag cleanups as needed. */
9673 emit_move_insn (flag, const1_rtx);
9674 emit_insns (seq2);
9675
9676 /* All cleanups must be on the function_obstack. */
9677 push_obstacks_nochange ();
9678 resume_temporary_allocation ();
9679
9680 /* convert flag, which is an rtx, into a tree. */
9681 cond = make_node (RTL_EXPR);
9682 TREE_TYPE (cond) = integer_type_node;
9683 RTL_EXPR_RTL (cond) = flag;
9684 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9685 cond = save_expr (cond);
9686
9687 new_cleanups = build (COND_EXPR, void_type_node,
9688 truthvalue_conversion (cond),
9689 cleanups, integer_zero_node);
9690 new_cleanups = fold (new_cleanups);
9691
9692 pop_obstacks ();
9693
9694 /* Now add in the conditionalized cleanups. */
9695 cleanups_this_call
9696 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9697 (*interim_eh_hook) (NULL_TREE);
9698 }
9699 else
9700 {
9701 emit_insns (seq1);
9702 emit_insns (seq2);
9703 }
9704 }
9705 break;
9706
9707 case TRUTH_ORIF_EXPR:
9708 {
9709 rtx seq1, seq2;
9710 tree cleanups, old_cleanups;
9711
9712 if (if_true_label == 0)
9713 if_true_label = drop_through_label = gen_label_rtx ();
9714 start_sequence ();
9715 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9716 seq1 = get_insns ();
9717 end_sequence ();
9718
9719 old_cleanups = cleanups_this_call;
9720 start_sequence ();
9721 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9722 seq2 = get_insns ();
9723 end_sequence ();
9724
9725 cleanups = defer_cleanups_to (old_cleanups);
9726 if (cleanups)
9727 {
9728 rtx flag = gen_reg_rtx (word_mode);
9729 tree new_cleanups;
9730 tree cond;
9731
9732 /* Flag cleanups as not needed. */
9733 emit_move_insn (flag, const0_rtx);
9734 emit_insns (seq1);
9735
9736 /* Flag cleanups as needed. */
9737 emit_move_insn (flag, const1_rtx);
9738 emit_insns (seq2);
9739
9740 /* All cleanups must be on the function_obstack. */
9741 push_obstacks_nochange ();
9742 resume_temporary_allocation ();
9743
9744 /* convert flag, which is an rtx, into a tree. */
9745 cond = make_node (RTL_EXPR);
9746 TREE_TYPE (cond) = integer_type_node;
9747 RTL_EXPR_RTL (cond) = flag;
9748 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9749 cond = save_expr (cond);
9750
9751 new_cleanups = build (COND_EXPR, void_type_node,
9752 truthvalue_conversion (cond),
9753 cleanups, integer_zero_node);
9754 new_cleanups = fold (new_cleanups);
9755
9756 pop_obstacks ();
9757
9758 /* Now add in the conditionalized cleanups. */
9759 cleanups_this_call
9760 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9761 (*interim_eh_hook) (NULL_TREE);
9762 }
9763 else
9764 {
9765 emit_insns (seq1);
9766 emit_insns (seq2);
9767 }
9768 }
9769 break;
9770
9771 case COMPOUND_EXPR:
9772 push_temp_slots ();
9773 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9774 free_temp_slots ();
9775 pop_temp_slots ();
9776 emit_queue ();
9777 do_pending_stack_adjust ();
9778 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9779 break;
9780
9781 case COMPONENT_REF:
9782 case BIT_FIELD_REF:
9783 case ARRAY_REF:
9784 {
9785 int bitsize, bitpos, unsignedp;
9786 enum machine_mode mode;
9787 tree type;
9788 tree offset;
9789 int volatilep = 0;
9790
9791 /* Get description of this reference. We don't actually care
9792 about the underlying object here. */
9793 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9794 &mode, &unsignedp, &volatilep);
9795
9796 type = type_for_size (bitsize, unsignedp);
9797 if (! SLOW_BYTE_ACCESS
9798 && type != 0 && bitsize >= 0
9799 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9800 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9801 != CODE_FOR_nothing))
9802 {
9803 do_jump (convert (type, exp), if_false_label, if_true_label);
9804 break;
9805 }
9806 goto normal;
9807 }
9808
9809 case COND_EXPR:
9810 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9811 if (integer_onep (TREE_OPERAND (exp, 1))
9812 && integer_zerop (TREE_OPERAND (exp, 2)))
9813 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9814
9815 else if (integer_zerop (TREE_OPERAND (exp, 1))
9816 && integer_onep (TREE_OPERAND (exp, 2)))
9817 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9818
9819 else
9820 {
9821 register rtx label1 = gen_label_rtx ();
9822 drop_through_label = gen_label_rtx ();
9823 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9824 /* Now the THEN-expression. */
9825 do_jump (TREE_OPERAND (exp, 1),
9826 if_false_label ? if_false_label : drop_through_label,
9827 if_true_label ? if_true_label : drop_through_label);
9828 /* In case the do_jump just above never jumps. */
9829 do_pending_stack_adjust ();
9830 emit_label (label1);
9831 /* Now the ELSE-expression. */
9832 do_jump (TREE_OPERAND (exp, 2),
9833 if_false_label ? if_false_label : drop_through_label,
9834 if_true_label ? if_true_label : drop_through_label);
9835 }
9836 break;
9837
9838 case EQ_EXPR:
9839 {
9840 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9841
9842 if (integer_zerop (TREE_OPERAND (exp, 1)))
9843 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9844 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9845 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9846 do_jump
9847 (fold
9848 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9849 fold (build (EQ_EXPR, TREE_TYPE (exp),
9850 fold (build1 (REALPART_EXPR,
9851 TREE_TYPE (inner_type),
9852 TREE_OPERAND (exp, 0))),
9853 fold (build1 (REALPART_EXPR,
9854 TREE_TYPE (inner_type),
9855 TREE_OPERAND (exp, 1))))),
9856 fold (build (EQ_EXPR, TREE_TYPE (exp),
9857 fold (build1 (IMAGPART_EXPR,
9858 TREE_TYPE (inner_type),
9859 TREE_OPERAND (exp, 0))),
9860 fold (build1 (IMAGPART_EXPR,
9861 TREE_TYPE (inner_type),
9862 TREE_OPERAND (exp, 1))))))),
9863 if_false_label, if_true_label);
9864 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9865 && !can_compare_p (TYPE_MODE (inner_type)))
9866 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9867 else
9868 comparison = compare (exp, EQ, EQ);
9869 break;
9870 }
9871
9872 case NE_EXPR:
9873 {
9874 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9875
9876 if (integer_zerop (TREE_OPERAND (exp, 1)))
9877 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9878 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9879 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9880 do_jump
9881 (fold
9882 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9883 fold (build (NE_EXPR, TREE_TYPE (exp),
9884 fold (build1 (REALPART_EXPR,
9885 TREE_TYPE (inner_type),
9886 TREE_OPERAND (exp, 0))),
9887 fold (build1 (REALPART_EXPR,
9888 TREE_TYPE (inner_type),
9889 TREE_OPERAND (exp, 1))))),
9890 fold (build (NE_EXPR, TREE_TYPE (exp),
9891 fold (build1 (IMAGPART_EXPR,
9892 TREE_TYPE (inner_type),
9893 TREE_OPERAND (exp, 0))),
9894 fold (build1 (IMAGPART_EXPR,
9895 TREE_TYPE (inner_type),
9896 TREE_OPERAND (exp, 1))))))),
9897 if_false_label, if_true_label);
9898 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9899 && !can_compare_p (TYPE_MODE (inner_type)))
9900 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9901 else
9902 comparison = compare (exp, NE, NE);
9903 break;
9904 }
9905
9906 case LT_EXPR:
9907 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9908 == MODE_INT)
9909 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9910 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9911 else
9912 comparison = compare (exp, LT, LTU);
9913 break;
9914
9915 case LE_EXPR:
9916 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9917 == MODE_INT)
9918 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9919 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9920 else
9921 comparison = compare (exp, LE, LEU);
9922 break;
9923
9924 case GT_EXPR:
9925 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9926 == MODE_INT)
9927 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9928 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9929 else
9930 comparison = compare (exp, GT, GTU);
9931 break;
9932
9933 case GE_EXPR:
9934 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9935 == MODE_INT)
9936 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9937 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9938 else
9939 comparison = compare (exp, GE, GEU);
9940 break;
9941
9942 default:
9943 normal:
9944 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9945 #if 0
9946 /* This is not needed any more and causes poor code since it causes
9947 comparisons and tests from non-SI objects to have different code
9948 sequences. */
9949 /* Copy to register to avoid generating bad insns by cse
9950 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9951 if (!cse_not_expected && GET_CODE (temp) == MEM)
9952 temp = copy_to_reg (temp);
9953 #endif
9954 do_pending_stack_adjust ();
9955 if (GET_CODE (temp) == CONST_INT)
9956 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9957 else if (GET_CODE (temp) == LABEL_REF)
9958 comparison = const_true_rtx;
9959 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9960 && !can_compare_p (GET_MODE (temp)))
9961 /* Note swapping the labels gives us not-equal. */
9962 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9963 else if (GET_MODE (temp) != VOIDmode)
9964 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9965 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9966 GET_MODE (temp), NULL_RTX, 0);
9967 else
9968 abort ();
9969 }
9970
9971 /* Do any postincrements in the expression that was tested. */
9972 emit_queue ();
9973
9974 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9975 straight into a conditional jump instruction as the jump condition.
9976 Otherwise, all the work has been done already. */
9977
9978 if (comparison == const_true_rtx)
9979 {
9980 if (if_true_label)
9981 emit_jump (if_true_label);
9982 }
9983 else if (comparison == const0_rtx)
9984 {
9985 if (if_false_label)
9986 emit_jump (if_false_label);
9987 }
9988 else if (comparison)
9989 do_jump_for_compare (comparison, if_false_label, if_true_label);
9990
9991 if (drop_through_label)
9992 {
9993 /* If do_jump produces code that might be jumped around,
9994 do any stack adjusts from that code, before the place
9995 where control merges in. */
9996 do_pending_stack_adjust ();
9997 emit_label (drop_through_label);
9998 }
9999 }
10000 \f
10001 /* Given a comparison expression EXP for values too wide to be compared
10002 with one insn, test the comparison and jump to the appropriate label.
10003 The code of EXP is ignored; we always test GT if SWAP is 0,
10004 and LT if SWAP is 1. */
10005
10006 static void
10007 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10008 tree exp;
10009 int swap;
10010 rtx if_false_label, if_true_label;
10011 {
10012 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10013 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10014 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10015 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10016 rtx drop_through_label = 0;
10017 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10018 int i;
10019
10020 if (! if_true_label || ! if_false_label)
10021 drop_through_label = gen_label_rtx ();
10022 if (! if_true_label)
10023 if_true_label = drop_through_label;
10024 if (! if_false_label)
10025 if_false_label = drop_through_label;
10026
10027 /* Compare a word at a time, high order first. */
10028 for (i = 0; i < nwords; i++)
10029 {
10030 rtx comp;
10031 rtx op0_word, op1_word;
10032
10033 if (WORDS_BIG_ENDIAN)
10034 {
10035 op0_word = operand_subword_force (op0, i, mode);
10036 op1_word = operand_subword_force (op1, i, mode);
10037 }
10038 else
10039 {
10040 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10041 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10042 }
10043
10044 /* All but high-order word must be compared as unsigned. */
10045 comp = compare_from_rtx (op0_word, op1_word,
10046 (unsignedp || i > 0) ? GTU : GT,
10047 unsignedp, word_mode, NULL_RTX, 0);
10048 if (comp == const_true_rtx)
10049 emit_jump (if_true_label);
10050 else if (comp != const0_rtx)
10051 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10052
10053 /* Consider lower words only if these are equal. */
10054 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10055 NULL_RTX, 0);
10056 if (comp == const_true_rtx)
10057 emit_jump (if_false_label);
10058 else if (comp != const0_rtx)
10059 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10060 }
10061
10062 if (if_false_label)
10063 emit_jump (if_false_label);
10064 if (drop_through_label)
10065 emit_label (drop_through_label);
10066 }
10067
10068 /* Compare OP0 with OP1, word at a time, in mode MODE.
10069 UNSIGNEDP says to do unsigned comparison.
10070 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10071
10072 void
10073 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10074 enum machine_mode mode;
10075 int unsignedp;
10076 rtx op0, op1;
10077 rtx if_false_label, if_true_label;
10078 {
10079 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10080 rtx drop_through_label = 0;
10081 int i;
10082
10083 if (! if_true_label || ! if_false_label)
10084 drop_through_label = gen_label_rtx ();
10085 if (! if_true_label)
10086 if_true_label = drop_through_label;
10087 if (! if_false_label)
10088 if_false_label = drop_through_label;
10089
10090 /* Compare a word at a time, high order first. */
10091 for (i = 0; i < nwords; i++)
10092 {
10093 rtx comp;
10094 rtx op0_word, op1_word;
10095
10096 if (WORDS_BIG_ENDIAN)
10097 {
10098 op0_word = operand_subword_force (op0, i, mode);
10099 op1_word = operand_subword_force (op1, i, mode);
10100 }
10101 else
10102 {
10103 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10104 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10105 }
10106
10107 /* All but high-order word must be compared as unsigned. */
10108 comp = compare_from_rtx (op0_word, op1_word,
10109 (unsignedp || i > 0) ? GTU : GT,
10110 unsignedp, word_mode, NULL_RTX, 0);
10111 if (comp == const_true_rtx)
10112 emit_jump (if_true_label);
10113 else if (comp != const0_rtx)
10114 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10115
10116 /* Consider lower words only if these are equal. */
10117 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10118 NULL_RTX, 0);
10119 if (comp == const_true_rtx)
10120 emit_jump (if_false_label);
10121 else if (comp != const0_rtx)
10122 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10123 }
10124
10125 if (if_false_label)
10126 emit_jump (if_false_label);
10127 if (drop_through_label)
10128 emit_label (drop_through_label);
10129 }
10130
10131 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10132 with one insn, test the comparison and jump to the appropriate label. */
10133
10134 static void
10135 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10136 tree exp;
10137 rtx if_false_label, if_true_label;
10138 {
10139 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10140 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10141 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10142 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10143 int i;
10144 rtx drop_through_label = 0;
10145
10146 if (! if_false_label)
10147 drop_through_label = if_false_label = gen_label_rtx ();
10148
10149 for (i = 0; i < nwords; i++)
10150 {
10151 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10152 operand_subword_force (op1, i, mode),
10153 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10154 word_mode, NULL_RTX, 0);
10155 if (comp == const_true_rtx)
10156 emit_jump (if_false_label);
10157 else if (comp != const0_rtx)
10158 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10159 }
10160
10161 if (if_true_label)
10162 emit_jump (if_true_label);
10163 if (drop_through_label)
10164 emit_label (drop_through_label);
10165 }
10166 \f
10167 /* Jump according to whether OP0 is 0.
10168 We assume that OP0 has an integer mode that is too wide
10169 for the available compare insns. */
10170
10171 static void
10172 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10173 rtx op0;
10174 rtx if_false_label, if_true_label;
10175 {
10176 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10177 int i;
10178 rtx drop_through_label = 0;
10179
10180 if (! if_false_label)
10181 drop_through_label = if_false_label = gen_label_rtx ();
10182
10183 for (i = 0; i < nwords; i++)
10184 {
10185 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10186 GET_MODE (op0)),
10187 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10188 if (comp == const_true_rtx)
10189 emit_jump (if_false_label);
10190 else if (comp != const0_rtx)
10191 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10192 }
10193
10194 if (if_true_label)
10195 emit_jump (if_true_label);
10196 if (drop_through_label)
10197 emit_label (drop_through_label);
10198 }
10199
10200 /* Given a comparison expression in rtl form, output conditional branches to
10201 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10202
10203 static void
10204 do_jump_for_compare (comparison, if_false_label, if_true_label)
10205 rtx comparison, if_false_label, if_true_label;
10206 {
10207 if (if_true_label)
10208 {
10209 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10210 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10211 else
10212 abort ();
10213
10214 if (if_false_label)
10215 emit_jump (if_false_label);
10216 }
10217 else if (if_false_label)
10218 {
10219 rtx insn;
10220 rtx prev = get_last_insn ();
10221 rtx branch = 0;
10222
10223 /* Output the branch with the opposite condition. Then try to invert
10224 what is generated. If more than one insn is a branch, or if the
10225 branch is not the last insn written, abort. If we can't invert
10226 the branch, emit make a true label, redirect this jump to that,
10227 emit a jump to the false label and define the true label. */
10228
10229 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10230 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10231 else
10232 abort ();
10233
10234 /* Here we get the first insn that was just emitted. It used to be the
10235 case that, on some machines, emitting the branch would discard
10236 the previous compare insn and emit a replacement. This isn't
10237 done anymore, but abort if we see that PREV is deleted. */
10238
10239 if (prev == 0)
10240 insn = get_insns ();
10241 else if (INSN_DELETED_P (prev))
10242 abort ();
10243 else
10244 insn = NEXT_INSN (prev);
10245
10246 for (; insn; insn = NEXT_INSN (insn))
10247 if (GET_CODE (insn) == JUMP_INSN)
10248 {
10249 if (branch)
10250 abort ();
10251 branch = insn;
10252 }
10253
10254 if (branch != get_last_insn ())
10255 abort ();
10256
10257 JUMP_LABEL (branch) = if_false_label;
10258 if (! invert_jump (branch, if_false_label))
10259 {
10260 if_true_label = gen_label_rtx ();
10261 redirect_jump (branch, if_true_label);
10262 emit_jump (if_false_label);
10263 emit_label (if_true_label);
10264 }
10265 }
10266 }
10267 \f
10268 /* Generate code for a comparison expression EXP
10269 (including code to compute the values to be compared)
10270 and set (CC0) according to the result.
10271 SIGNED_CODE should be the rtx operation for this comparison for
10272 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10273
10274 We force a stack adjustment unless there are currently
10275 things pushed on the stack that aren't yet used. */
10276
10277 static rtx
10278 compare (exp, signed_code, unsigned_code)
10279 register tree exp;
10280 enum rtx_code signed_code, unsigned_code;
10281 {
10282 register rtx op0
10283 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10284 register rtx op1
10285 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10286 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10287 register enum machine_mode mode = TYPE_MODE (type);
10288 int unsignedp = TREE_UNSIGNED (type);
10289 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10290
10291 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10292 ((mode == BLKmode)
10293 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10294 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10295 }
10296
10297 /* Like compare but expects the values to compare as two rtx's.
10298 The decision as to signed or unsigned comparison must be made by the caller.
10299
10300 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10301 compared.
10302
10303 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10304 size of MODE should be used. */
10305
10306 rtx
10307 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10308 register rtx op0, op1;
10309 enum rtx_code code;
10310 int unsignedp;
10311 enum machine_mode mode;
10312 rtx size;
10313 int align;
10314 {
10315 rtx tem;
10316
10317 /* If one operand is constant, make it the second one. Only do this
10318 if the other operand is not constant as well. */
10319
10320 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10321 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10322 {
10323 tem = op0;
10324 op0 = op1;
10325 op1 = tem;
10326 code = swap_condition (code);
10327 }
10328
10329 if (flag_force_mem)
10330 {
10331 op0 = force_not_mem (op0);
10332 op1 = force_not_mem (op1);
10333 }
10334
10335 do_pending_stack_adjust ();
10336
10337 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10338 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10339 return tem;
10340
10341 #if 0
10342 /* There's no need to do this now that combine.c can eliminate lots of
10343 sign extensions. This can be less efficient in certain cases on other
10344 machines. */
10345
10346 /* If this is a signed equality comparison, we can do it as an
10347 unsigned comparison since zero-extension is cheaper than sign
10348 extension and comparisons with zero are done as unsigned. This is
10349 the case even on machines that can do fast sign extension, since
10350 zero-extension is easier to combine with other operations than
10351 sign-extension is. If we are comparing against a constant, we must
10352 convert it to what it would look like unsigned. */
10353 if ((code == EQ || code == NE) && ! unsignedp
10354 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10355 {
10356 if (GET_CODE (op1) == CONST_INT
10357 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10358 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10359 unsignedp = 1;
10360 }
10361 #endif
10362
10363 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10364
10365 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10366 }
10367 \f
10368 /* Generate code to calculate EXP using a store-flag instruction
10369 and return an rtx for the result. EXP is either a comparison
10370 or a TRUTH_NOT_EXPR whose operand is a comparison.
10371
10372 If TARGET is nonzero, store the result there if convenient.
10373
10374 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10375 cheap.
10376
10377 Return zero if there is no suitable set-flag instruction
10378 available on this machine.
10379
10380 Once expand_expr has been called on the arguments of the comparison,
10381 we are committed to doing the store flag, since it is not safe to
10382 re-evaluate the expression. We emit the store-flag insn by calling
10383 emit_store_flag, but only expand the arguments if we have a reason
10384 to believe that emit_store_flag will be successful. If we think that
10385 it will, but it isn't, we have to simulate the store-flag with a
10386 set/jump/set sequence. */
10387
10388 static rtx
10389 do_store_flag (exp, target, mode, only_cheap)
10390 tree exp;
10391 rtx target;
10392 enum machine_mode mode;
10393 int only_cheap;
10394 {
10395 enum rtx_code code;
10396 tree arg0, arg1, type;
10397 tree tem;
10398 enum machine_mode operand_mode;
10399 int invert = 0;
10400 int unsignedp;
10401 rtx op0, op1;
10402 enum insn_code icode;
10403 rtx subtarget = target;
10404 rtx result, label, pattern, jump_pat;
10405
10406 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10407 result at the end. We can't simply invert the test since it would
10408 have already been inverted if it were valid. This case occurs for
10409 some floating-point comparisons. */
10410
10411 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10412 invert = 1, exp = TREE_OPERAND (exp, 0);
10413
10414 arg0 = TREE_OPERAND (exp, 0);
10415 arg1 = TREE_OPERAND (exp, 1);
10416 type = TREE_TYPE (arg0);
10417 operand_mode = TYPE_MODE (type);
10418 unsignedp = TREE_UNSIGNED (type);
10419
10420 /* We won't bother with BLKmode store-flag operations because it would mean
10421 passing a lot of information to emit_store_flag. */
10422 if (operand_mode == BLKmode)
10423 return 0;
10424
10425 STRIP_NOPS (arg0);
10426 STRIP_NOPS (arg1);
10427
10428 /* Get the rtx comparison code to use. We know that EXP is a comparison
10429 operation of some type. Some comparisons against 1 and -1 can be
10430 converted to comparisons with zero. Do so here so that the tests
10431 below will be aware that we have a comparison with zero. These
10432 tests will not catch constants in the first operand, but constants
10433 are rarely passed as the first operand. */
10434
10435 switch (TREE_CODE (exp))
10436 {
10437 case EQ_EXPR:
10438 code = EQ;
10439 break;
10440 case NE_EXPR:
10441 code = NE;
10442 break;
10443 case LT_EXPR:
10444 if (integer_onep (arg1))
10445 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10446 else
10447 code = unsignedp ? LTU : LT;
10448 break;
10449 case LE_EXPR:
10450 if (! unsignedp && integer_all_onesp (arg1))
10451 arg1 = integer_zero_node, code = LT;
10452 else
10453 code = unsignedp ? LEU : LE;
10454 break;
10455 case GT_EXPR:
10456 if (! unsignedp && integer_all_onesp (arg1))
10457 arg1 = integer_zero_node, code = GE;
10458 else
10459 code = unsignedp ? GTU : GT;
10460 break;
10461 case GE_EXPR:
10462 if (integer_onep (arg1))
10463 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10464 else
10465 code = unsignedp ? GEU : GE;
10466 break;
10467 default:
10468 abort ();
10469 }
10470
10471 /* Put a constant second. */
10472 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10473 {
10474 tem = arg0; arg0 = arg1; arg1 = tem;
10475 code = swap_condition (code);
10476 }
10477
10478 /* If this is an equality or inequality test of a single bit, we can
10479 do this by shifting the bit being tested to the low-order bit and
10480 masking the result with the constant 1. If the condition was EQ,
10481 we xor it with 1. This does not require an scc insn and is faster
10482 than an scc insn even if we have it. */
10483
10484 if ((code == NE || code == EQ)
10485 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10486 && integer_pow2p (TREE_OPERAND (arg0, 1))
10487 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10488 {
10489 tree inner = TREE_OPERAND (arg0, 0);
10490 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10491 NULL_RTX, VOIDmode, 0)));
10492 int ops_unsignedp;
10493
10494 /* If INNER is a right shift of a constant and it plus BITNUM does
10495 not overflow, adjust BITNUM and INNER. */
10496
10497 if (TREE_CODE (inner) == RSHIFT_EXPR
10498 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10499 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10500 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10501 < TYPE_PRECISION (type)))
10502 {
10503 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10504 inner = TREE_OPERAND (inner, 0);
10505 }
10506
10507 /* If we are going to be able to omit the AND below, we must do our
10508 operations as unsigned. If we must use the AND, we have a choice.
10509 Normally unsigned is faster, but for some machines signed is. */
10510 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10511 #ifdef LOAD_EXTEND_OP
10512 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10513 #else
10514 : 1
10515 #endif
10516 );
10517
10518 if (subtarget == 0 || GET_CODE (subtarget) != REG
10519 || GET_MODE (subtarget) != operand_mode
10520 || ! safe_from_p (subtarget, inner))
10521 subtarget = 0;
10522
10523 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10524
10525 if (bitnum != 0)
10526 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10527 size_int (bitnum), subtarget, ops_unsignedp);
10528
10529 if (GET_MODE (op0) != mode)
10530 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10531
10532 if ((code == EQ && ! invert) || (code == NE && invert))
10533 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10534 ops_unsignedp, OPTAB_LIB_WIDEN);
10535
10536 /* Put the AND last so it can combine with more things. */
10537 if (bitnum != TYPE_PRECISION (type) - 1)
10538 op0 = expand_and (op0, const1_rtx, subtarget);
10539
10540 return op0;
10541 }
10542
10543 /* Now see if we are likely to be able to do this. Return if not. */
10544 if (! can_compare_p (operand_mode))
10545 return 0;
10546 icode = setcc_gen_code[(int) code];
10547 if (icode == CODE_FOR_nothing
10548 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10549 {
10550 /* We can only do this if it is one of the special cases that
10551 can be handled without an scc insn. */
10552 if ((code == LT && integer_zerop (arg1))
10553 || (! only_cheap && code == GE && integer_zerop (arg1)))
10554 ;
10555 else if (BRANCH_COST >= 0
10556 && ! only_cheap && (code == NE || code == EQ)
10557 && TREE_CODE (type) != REAL_TYPE
10558 && ((abs_optab->handlers[(int) operand_mode].insn_code
10559 != CODE_FOR_nothing)
10560 || (ffs_optab->handlers[(int) operand_mode].insn_code
10561 != CODE_FOR_nothing)))
10562 ;
10563 else
10564 return 0;
10565 }
10566
10567 preexpand_calls (exp);
10568 if (subtarget == 0 || GET_CODE (subtarget) != REG
10569 || GET_MODE (subtarget) != operand_mode
10570 || ! safe_from_p (subtarget, arg1))
10571 subtarget = 0;
10572
10573 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10574 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10575
10576 if (target == 0)
10577 target = gen_reg_rtx (mode);
10578
10579 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10580 because, if the emit_store_flag does anything it will succeed and
10581 OP0 and OP1 will not be used subsequently. */
10582
10583 result = emit_store_flag (target, code,
10584 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10585 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10586 operand_mode, unsignedp, 1);
10587
10588 if (result)
10589 {
10590 if (invert)
10591 result = expand_binop (mode, xor_optab, result, const1_rtx,
10592 result, 0, OPTAB_LIB_WIDEN);
10593 return result;
10594 }
10595
10596 /* If this failed, we have to do this with set/compare/jump/set code. */
10597 if (target == 0 || GET_CODE (target) != REG
10598 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10599 target = gen_reg_rtx (GET_MODE (target));
10600
10601 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10602 result = compare_from_rtx (op0, op1, code, unsignedp,
10603 operand_mode, NULL_RTX, 0);
10604 if (GET_CODE (result) == CONST_INT)
10605 return (((result == const0_rtx && ! invert)
10606 || (result != const0_rtx && invert))
10607 ? const0_rtx : const1_rtx);
10608
10609 label = gen_label_rtx ();
10610 if (bcc_gen_fctn[(int) code] == 0)
10611 abort ();
10612
10613 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10614 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10615 emit_label (label);
10616
10617 return target;
10618 }
10619 \f
10620 /* Generate a tablejump instruction (used for switch statements). */
10621
10622 #ifdef HAVE_tablejump
10623
10624 /* INDEX is the value being switched on, with the lowest value
10625 in the table already subtracted.
10626 MODE is its expected mode (needed if INDEX is constant).
10627 RANGE is the length of the jump table.
10628 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10629
10630 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10631 index value is out of range. */
10632
10633 void
10634 do_tablejump (index, mode, range, table_label, default_label)
10635 rtx index, range, table_label, default_label;
10636 enum machine_mode mode;
10637 {
10638 register rtx temp, vector;
10639
10640 /* Do an unsigned comparison (in the proper mode) between the index
10641 expression and the value which represents the length of the range.
10642 Since we just finished subtracting the lower bound of the range
10643 from the index expression, this comparison allows us to simultaneously
10644 check that the original index expression value is both greater than
10645 or equal to the minimum value of the range and less than or equal to
10646 the maximum value of the range. */
10647
10648 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10649 emit_jump_insn (gen_bgtu (default_label));
10650
10651 /* If index is in range, it must fit in Pmode.
10652 Convert to Pmode so we can index with it. */
10653 if (mode != Pmode)
10654 index = convert_to_mode (Pmode, index, 1);
10655
10656 /* Don't let a MEM slip thru, because then INDEX that comes
10657 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10658 and break_out_memory_refs will go to work on it and mess it up. */
10659 #ifdef PIC_CASE_VECTOR_ADDRESS
10660 if (flag_pic && GET_CODE (index) != REG)
10661 index = copy_to_mode_reg (Pmode, index);
10662 #endif
10663
10664 /* If flag_force_addr were to affect this address
10665 it could interfere with the tricky assumptions made
10666 about addresses that contain label-refs,
10667 which may be valid only very near the tablejump itself. */
10668 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10669 GET_MODE_SIZE, because this indicates how large insns are. The other
10670 uses should all be Pmode, because they are addresses. This code
10671 could fail if addresses and insns are not the same size. */
10672 index = gen_rtx (PLUS, Pmode,
10673 gen_rtx (MULT, Pmode, index,
10674 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10675 gen_rtx (LABEL_REF, Pmode, table_label));
10676 #ifdef PIC_CASE_VECTOR_ADDRESS
10677 if (flag_pic)
10678 index = PIC_CASE_VECTOR_ADDRESS (index);
10679 else
10680 #endif
10681 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10682 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10683 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
10684 RTX_UNCHANGING_P (vector) = 1;
10685 convert_move (temp, vector, 0);
10686
10687 emit_jump_insn (gen_tablejump (temp, table_label));
10688
10689 #ifndef CASE_VECTOR_PC_RELATIVE
10690 /* If we are generating PIC code or if the table is PC-relative, the
10691 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10692 if (! flag_pic)
10693 emit_barrier ();
10694 #endif
10695 }
10696
10697 #endif /* HAVE_tablejump */
10698
10699
10700 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10701 to that value is on the top of the stack. The resulting type is TYPE, and
10702 the source declaration is DECL. */
10703
10704 void
10705 bc_load_memory (type, decl)
10706 tree type, decl;
10707 {
10708 enum bytecode_opcode opcode;
10709
10710
10711 /* Bit fields are special. We only know about signed and
10712 unsigned ints, and enums. The latter are treated as
10713 signed integers. */
10714
10715 if (DECL_BIT_FIELD (decl))
10716 if (TREE_CODE (type) == ENUMERAL_TYPE
10717 || TREE_CODE (type) == INTEGER_TYPE)
10718 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10719 else
10720 abort ();
10721 else
10722 /* See corresponding comment in bc_store_memory(). */
10723 if (TYPE_MODE (type) == BLKmode
10724 || TYPE_MODE (type) == VOIDmode)
10725 return;
10726 else
10727 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
10728
10729 if (opcode == neverneverland)
10730 abort ();
10731
10732 bc_emit_bytecode (opcode);
10733
10734 #ifdef DEBUG_PRINT_CODE
10735 fputc ('\n', stderr);
10736 #endif
10737 }
10738
10739
10740 /* Store the contents of the second stack slot to the address in the
10741 top stack slot. DECL is the declaration of the destination and is used
10742 to determine whether we're dealing with a bitfield. */
10743
10744 void
10745 bc_store_memory (type, decl)
10746 tree type, decl;
10747 {
10748 enum bytecode_opcode opcode;
10749
10750
10751 if (DECL_BIT_FIELD (decl))
10752 {
10753 if (TREE_CODE (type) == ENUMERAL_TYPE
10754 || TREE_CODE (type) == INTEGER_TYPE)
10755 opcode = sstoreBI;
10756 else
10757 abort ();
10758 }
10759 else
10760 if (TYPE_MODE (type) == BLKmode)
10761 {
10762 /* Copy structure. This expands to a block copy instruction, storeBLK.
10763 In addition to the arguments expected by the other store instructions,
10764 it also expects a type size (SImode) on top of the stack, which is the
10765 structure size in size units (usually bytes). The two first arguments
10766 are already on the stack; so we just put the size on level 1. For some
10767 other languages, the size may be variable, this is why we don't encode
10768 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10769
10770 bc_expand_expr (TYPE_SIZE (type));
10771 opcode = storeBLK;
10772 }
10773 else
10774 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10775
10776 if (opcode == neverneverland)
10777 abort ();
10778
10779 bc_emit_bytecode (opcode);
10780
10781 #ifdef DEBUG_PRINT_CODE
10782 fputc ('\n', stderr);
10783 #endif
10784 }
10785
10786
10787 /* Allocate local stack space sufficient to hold a value of the given
10788 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10789 integral power of 2. A special case is locals of type VOID, which
10790 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10791 remapped into the corresponding attribute of SI. */
10792
10793 rtx
10794 bc_allocate_local (size, alignment)
10795 int size, alignment;
10796 {
10797 rtx retval;
10798 int byte_alignment;
10799
10800 if (size < 0)
10801 abort ();
10802
10803 /* Normalize size and alignment */
10804 if (!size)
10805 size = UNITS_PER_WORD;
10806
10807 if (alignment < BITS_PER_UNIT)
10808 byte_alignment = 1 << (INT_ALIGN - 1);
10809 else
10810 /* Align */
10811 byte_alignment = alignment / BITS_PER_UNIT;
10812
10813 if (local_vars_size & (byte_alignment - 1))
10814 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10815
10816 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10817 local_vars_size += size;
10818
10819 return retval;
10820 }
10821
10822
10823 /* Allocate variable-sized local array. Variable-sized arrays are
10824 actually pointers to the address in memory where they are stored. */
10825
10826 rtx
10827 bc_allocate_variable_array (size)
10828 tree size;
10829 {
10830 rtx retval;
10831 const int ptralign = (1 << (PTR_ALIGN - 1));
10832
10833 /* Align pointer */
10834 if (local_vars_size & ptralign)
10835 local_vars_size += ptralign - (local_vars_size & ptralign);
10836
10837 /* Note down local space needed: pointer to block; also return
10838 dummy rtx */
10839
10840 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10841 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10842 return retval;
10843 }
10844
10845
10846 /* Push the machine address for the given external variable offset. */
10847 void
10848 bc_load_externaddr (externaddr)
10849 rtx externaddr;
10850 {
10851 bc_emit_bytecode (constP);
10852 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10853 BYTECODE_BC_LABEL (externaddr)->offset);
10854
10855 #ifdef DEBUG_PRINT_CODE
10856 fputc ('\n', stderr);
10857 #endif
10858 }
10859
10860
10861 static char *
10862 bc_strdup (s)
10863 char *s;
10864 {
10865 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10866 strcpy (new, s);
10867 return new;
10868 }
10869
10870
10871 /* Like above, but expects an IDENTIFIER. */
10872 void
10873 bc_load_externaddr_id (id, offset)
10874 tree id;
10875 int offset;
10876 {
10877 if (!IDENTIFIER_POINTER (id))
10878 abort ();
10879
10880 bc_emit_bytecode (constP);
10881 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10882
10883 #ifdef DEBUG_PRINT_CODE
10884 fputc ('\n', stderr);
10885 #endif
10886 }
10887
10888
10889 /* Push the machine address for the given local variable offset. */
10890 void
10891 bc_load_localaddr (localaddr)
10892 rtx localaddr;
10893 {
10894 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10895 }
10896
10897
10898 /* Push the machine address for the given parameter offset.
10899 NOTE: offset is in bits. */
10900 void
10901 bc_load_parmaddr (parmaddr)
10902 rtx parmaddr;
10903 {
10904 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10905 / BITS_PER_UNIT));
10906 }
10907
10908
10909 /* Convert a[i] into *(a + i). */
10910 tree
10911 bc_canonicalize_array_ref (exp)
10912 tree exp;
10913 {
10914 tree type = TREE_TYPE (exp);
10915 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10916 TREE_OPERAND (exp, 0));
10917 tree index = TREE_OPERAND (exp, 1);
10918
10919
10920 /* Convert the integer argument to a type the same size as a pointer
10921 so the multiply won't overflow spuriously. */
10922
10923 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10924 index = convert (type_for_size (POINTER_SIZE, 0), index);
10925
10926 /* The array address isn't volatile even if the array is.
10927 (Of course this isn't terribly relevant since the bytecode
10928 translator treats nearly everything as volatile anyway.) */
10929 TREE_THIS_VOLATILE (array_adr) = 0;
10930
10931 return build1 (INDIRECT_REF, type,
10932 fold (build (PLUS_EXPR,
10933 TYPE_POINTER_TO (type),
10934 array_adr,
10935 fold (build (MULT_EXPR,
10936 TYPE_POINTER_TO (type),
10937 index,
10938 size_in_bytes (type))))));
10939 }
10940
10941
10942 /* Load the address of the component referenced by the given
10943 COMPONENT_REF expression.
10944
10945 Returns innermost lvalue. */
10946
10947 tree
10948 bc_expand_component_address (exp)
10949 tree exp;
10950 {
10951 tree tem, chain;
10952 enum machine_mode mode;
10953 int bitpos = 0;
10954 HOST_WIDE_INT SIval;
10955
10956
10957 tem = TREE_OPERAND (exp, 1);
10958 mode = DECL_MODE (tem);
10959
10960
10961 /* Compute cumulative bit offset for nested component refs
10962 and array refs, and find the ultimate containing object. */
10963
10964 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10965 {
10966 if (TREE_CODE (tem) == COMPONENT_REF)
10967 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10968 else
10969 if (TREE_CODE (tem) == ARRAY_REF
10970 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10971 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10972
10973 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10974 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10975 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10976 else
10977 break;
10978 }
10979
10980 bc_expand_expr (tem);
10981
10982
10983 /* For bitfields also push their offset and size */
10984 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10985 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10986 else
10987 if (SIval = bitpos / BITS_PER_UNIT)
10988 bc_emit_instruction (addconstPSI, SIval);
10989
10990 return (TREE_OPERAND (exp, 1));
10991 }
10992
10993
10994 /* Emit code to push two SI constants */
10995 void
10996 bc_push_offset_and_size (offset, size)
10997 HOST_WIDE_INT offset, size;
10998 {
10999 bc_emit_instruction (constSI, offset);
11000 bc_emit_instruction (constSI, size);
11001 }
11002
11003
11004 /* Emit byte code to push the address of the given lvalue expression to
11005 the stack. If it's a bit field, we also push offset and size info.
11006
11007 Returns innermost component, which allows us to determine not only
11008 its type, but also whether it's a bitfield. */
11009
11010 tree
11011 bc_expand_address (exp)
11012 tree exp;
11013 {
11014 /* Safeguard */
11015 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11016 return (exp);
11017
11018
11019 switch (TREE_CODE (exp))
11020 {
11021 case ARRAY_REF:
11022
11023 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11024
11025 case COMPONENT_REF:
11026
11027 return (bc_expand_component_address (exp));
11028
11029 case INDIRECT_REF:
11030
11031 bc_expand_expr (TREE_OPERAND (exp, 0));
11032
11033 /* For variable-sized types: retrieve pointer. Sometimes the
11034 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11035 also make sure we have an operand, just in case... */
11036
11037 if (TREE_OPERAND (exp, 0)
11038 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11039 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11040 bc_emit_instruction (loadP);
11041
11042 /* If packed, also return offset and size */
11043 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11044
11045 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11046 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11047
11048 return (TREE_OPERAND (exp, 0));
11049
11050 case FUNCTION_DECL:
11051
11052 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11053 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11054 break;
11055
11056 case PARM_DECL:
11057
11058 bc_load_parmaddr (DECL_RTL (exp));
11059
11060 /* For variable-sized types: retrieve pointer */
11061 if (TYPE_SIZE (TREE_TYPE (exp))
11062 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11063 bc_emit_instruction (loadP);
11064
11065 /* If packed, also return offset and size */
11066 if (DECL_BIT_FIELD (exp))
11067 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11068 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11069
11070 break;
11071
11072 case RESULT_DECL:
11073
11074 bc_emit_instruction (returnP);
11075 break;
11076
11077 case VAR_DECL:
11078
11079 #if 0
11080 if (BYTECODE_LABEL (DECL_RTL (exp)))
11081 bc_load_externaddr (DECL_RTL (exp));
11082 #endif
11083
11084 if (DECL_EXTERNAL (exp))
11085 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11086 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11087 else
11088 bc_load_localaddr (DECL_RTL (exp));
11089
11090 /* For variable-sized types: retrieve pointer */
11091 if (TYPE_SIZE (TREE_TYPE (exp))
11092 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11093 bc_emit_instruction (loadP);
11094
11095 /* If packed, also return offset and size */
11096 if (DECL_BIT_FIELD (exp))
11097 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11098 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11099
11100 break;
11101
11102 case STRING_CST:
11103 {
11104 rtx r;
11105
11106 bc_emit_bytecode (constP);
11107 r = output_constant_def (exp);
11108 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11109
11110 #ifdef DEBUG_PRINT_CODE
11111 fputc ('\n', stderr);
11112 #endif
11113 }
11114 break;
11115
11116 default:
11117
11118 abort();
11119 break;
11120 }
11121
11122 /* Most lvalues don't have components. */
11123 return (exp);
11124 }
11125
11126
11127 /* Emit a type code to be used by the runtime support in handling
11128 parameter passing. The type code consists of the machine mode
11129 plus the minimal alignment shifted left 8 bits. */
11130
11131 tree
11132 bc_runtime_type_code (type)
11133 tree type;
11134 {
11135 int val;
11136
11137 switch (TREE_CODE (type))
11138 {
11139 case VOID_TYPE:
11140 case INTEGER_TYPE:
11141 case REAL_TYPE:
11142 case COMPLEX_TYPE:
11143 case ENUMERAL_TYPE:
11144 case POINTER_TYPE:
11145 case RECORD_TYPE:
11146
11147 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11148 break;
11149
11150 case ERROR_MARK:
11151
11152 val = 0;
11153 break;
11154
11155 default:
11156
11157 abort ();
11158 }
11159 return build_int_2 (val, 0);
11160 }
11161
11162
11163 /* Generate constructor label */
11164 char *
11165 bc_gen_constr_label ()
11166 {
11167 static int label_counter;
11168 static char label[20];
11169
11170 sprintf (label, "*LR%d", label_counter++);
11171
11172 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11173 }
11174
11175
11176 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11177 expand the constructor data as static data, and push a pointer to it.
11178 The pointer is put in the pointer table and is retrieved by a constP
11179 bytecode instruction. We then loop and store each constructor member in
11180 the corresponding component. Finally, we return the original pointer on
11181 the stack. */
11182
11183 void
11184 bc_expand_constructor (constr)
11185 tree constr;
11186 {
11187 char *l;
11188 HOST_WIDE_INT ptroffs;
11189 rtx constr_rtx;
11190
11191
11192 /* Literal constructors are handled as constants, whereas
11193 non-literals are evaluated and stored element by element
11194 into the data segment. */
11195
11196 /* Allocate space in proper segment and push pointer to space on stack.
11197 */
11198
11199 l = bc_gen_constr_label ();
11200
11201 if (TREE_CONSTANT (constr))
11202 {
11203 text_section ();
11204
11205 bc_emit_const_labeldef (l);
11206 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11207 }
11208 else
11209 {
11210 data_section ();
11211
11212 bc_emit_data_labeldef (l);
11213 bc_output_data_constructor (constr);
11214 }
11215
11216
11217 /* Add reference to pointer table and recall pointer to stack;
11218 this code is common for both types of constructors: literals
11219 and non-literals. */
11220
11221 ptroffs = bc_define_pointer (l);
11222 bc_emit_instruction (constP, ptroffs);
11223
11224 /* This is all that has to be done if it's a literal. */
11225 if (TREE_CONSTANT (constr))
11226 return;
11227
11228
11229 /* At this point, we have the pointer to the structure on top of the stack.
11230 Generate sequences of store_memory calls for the constructor. */
11231
11232 /* constructor type is structure */
11233 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11234 {
11235 register tree elt;
11236
11237 /* If the constructor has fewer fields than the structure,
11238 clear the whole structure first. */
11239
11240 if (list_length (CONSTRUCTOR_ELTS (constr))
11241 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11242 {
11243 bc_emit_instruction (duplicate);
11244 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11245 bc_emit_instruction (clearBLK);
11246 }
11247
11248 /* Store each element of the constructor into the corresponding
11249 field of TARGET. */
11250
11251 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11252 {
11253 register tree field = TREE_PURPOSE (elt);
11254 register enum machine_mode mode;
11255 int bitsize;
11256 int bitpos;
11257 int unsignedp;
11258
11259 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11260 mode = DECL_MODE (field);
11261 unsignedp = TREE_UNSIGNED (field);
11262
11263 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11264
11265 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11266 /* The alignment of TARGET is
11267 at least what its type requires. */
11268 VOIDmode, 0,
11269 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11270 int_size_in_bytes (TREE_TYPE (constr)));
11271 }
11272 }
11273 else
11274
11275 /* Constructor type is array */
11276 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11277 {
11278 register tree elt;
11279 register int i;
11280 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11281 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11282 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11283 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11284
11285 /* If the constructor has fewer fields than the structure,
11286 clear the whole structure first. */
11287
11288 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11289 {
11290 bc_emit_instruction (duplicate);
11291 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11292 bc_emit_instruction (clearBLK);
11293 }
11294
11295
11296 /* Store each element of the constructor into the corresponding
11297 element of TARGET, determined by counting the elements. */
11298
11299 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11300 elt;
11301 elt = TREE_CHAIN (elt), i++)
11302 {
11303 register enum machine_mode mode;
11304 int bitsize;
11305 int bitpos;
11306 int unsignedp;
11307
11308 mode = TYPE_MODE (elttype);
11309 bitsize = GET_MODE_BITSIZE (mode);
11310 unsignedp = TREE_UNSIGNED (elttype);
11311
11312 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11313 /* * TYPE_SIZE_UNIT (elttype) */ );
11314
11315 bc_store_field (elt, bitsize, bitpos, mode,
11316 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11317 /* The alignment of TARGET is
11318 at least what its type requires. */
11319 VOIDmode, 0,
11320 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11321 int_size_in_bytes (TREE_TYPE (constr)));
11322 }
11323
11324 }
11325 }
11326
11327
11328 /* Store the value of EXP (an expression tree) into member FIELD of
11329 structure at address on stack, which has type TYPE, mode MODE and
11330 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11331 structure.
11332
11333 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11334 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11335
11336 void
11337 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11338 value_mode, unsignedp, align, total_size)
11339 int bitsize, bitpos;
11340 enum machine_mode mode;
11341 tree field, exp, type;
11342 enum machine_mode value_mode;
11343 int unsignedp;
11344 int align;
11345 int total_size;
11346 {
11347
11348 /* Expand expression and copy pointer */
11349 bc_expand_expr (exp);
11350 bc_emit_instruction (over);
11351
11352
11353 /* If the component is a bit field, we cannot use addressing to access
11354 it. Use bit-field techniques to store in it. */
11355
11356 if (DECL_BIT_FIELD (field))
11357 {
11358 bc_store_bit_field (bitpos, bitsize, unsignedp);
11359 return;
11360 }
11361 else
11362 /* Not bit field */
11363 {
11364 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11365
11366 /* Advance pointer to the desired member */
11367 if (offset)
11368 bc_emit_instruction (addconstPSI, offset);
11369
11370 /* Store */
11371 bc_store_memory (type, field);
11372 }
11373 }
11374
11375
11376 /* Store SI/SU in bitfield */
11377 void
11378 bc_store_bit_field (offset, size, unsignedp)
11379 int offset, size, unsignedp;
11380 {
11381 /* Push bitfield offset and size */
11382 bc_push_offset_and_size (offset, size);
11383
11384 /* Store */
11385 bc_emit_instruction (sstoreBI);
11386 }
11387
11388
11389 /* Load SI/SU from bitfield */
11390 void
11391 bc_load_bit_field (offset, size, unsignedp)
11392 int offset, size, unsignedp;
11393 {
11394 /* Push bitfield offset and size */
11395 bc_push_offset_and_size (offset, size);
11396
11397 /* Load: sign-extend if signed, else zero-extend */
11398 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11399 }
11400
11401
11402 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11403 (adjust stack pointer upwards), negative means add that number of
11404 levels (adjust the stack pointer downwards). Only positive values
11405 normally make sense. */
11406
11407 void
11408 bc_adjust_stack (nlevels)
11409 int nlevels;
11410 {
11411 switch (nlevels)
11412 {
11413 case 0:
11414 break;
11415
11416 case 2:
11417 bc_emit_instruction (drop);
11418
11419 case 1:
11420 bc_emit_instruction (drop);
11421 break;
11422
11423 default:
11424
11425 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11426 stack_depth -= nlevels;
11427 }
11428
11429 #if defined (VALIDATE_STACK_FOR_BC)
11430 VALIDATE_STACK_FOR_BC ();
11431 #endif
11432 }
This page took 0.56492 seconds and 6 git commands to generate.