]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
expr.c (store_constructor): Fix flow control thinko (merge error).
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "insn-flags.h"
32 #include "insn-codes.h"
33 #include "expr.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "typeclass.h"
38
39 #include "bytecode.h"
40 #include "bc-opcode.h"
41 #include "bc-typecd.h"
42 #include "bc-optab.h"
43 #include "bc-emit.h"
44
45
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79 int cse_not_expected;
80
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
85
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
89
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
95
96 /* A list of all cleanups which belong to the arguments of
97 function calls being expanded by expand_call. */
98 tree cleanups_this_call;
99
100 /* When temporaries are created by TARGET_EXPRs, they are created at
101 this level of temp_slot_level, so that they can remain allocated
102 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 of TARGET_EXPRs. */
104 int target_temp_slot_level;
105
106 /* Nonzero means __builtin_saveregs has already been done in this function.
107 The value is the pseudoreg containing the value __builtin_saveregs
108 returned. */
109 static rtx saveregs_value;
110
111 /* Similarly for __builtin_apply_args. */
112 static rtx apply_args_value;
113
114 /* This structure is used by move_by_pieces to describe the move to
115 be performed. */
116
117 struct move_by_pieces
118 {
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 int to_struct;
124 rtx from;
125 rtx from_addr;
126 int autinc_from;
127 int explicit_inc_from;
128 int from_struct;
129 int len;
130 int offset;
131 int reverse;
132 };
133
134 /* This structure is used by clear_by_pieces to describe the clear to
135 be performed. */
136
137 struct clear_by_pieces
138 {
139 rtx to;
140 rtx to_addr;
141 int autinc_to;
142 int explicit_inc_to;
143 int to_struct;
144 int len;
145 int offset;
146 int reverse;
147 };
148
149 /* Used to generate bytecodes: keep track of size of local variables,
150 as well as depth of arithmetic stack. (Notice that variables are
151 stored on the machine's stack, not the arithmetic stack.) */
152
153 extern int local_vars_size;
154 extern int stack_depth;
155 extern int max_stack_depth;
156 extern struct obstack permanent_obstack;
157 extern rtx arg_pointer_save_area;
158
159 static rtx enqueue_insn PROTO((rtx, rtx));
160 static int queued_subexp_p PROTO((rtx));
161 static void init_queue PROTO((void));
162 static void move_by_pieces PROTO((rtx, rtx, int, int));
163 static int move_by_pieces_ninsns PROTO((unsigned int, int));
164 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
165 struct move_by_pieces *));
166 static void clear_by_pieces PROTO((rtx, int, int));
167 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
168 struct clear_by_pieces *));
169 static int is_zeros_p PROTO((tree));
170 static int mostly_zeros_p PROTO((tree));
171 static void store_constructor PROTO((tree, rtx, int));
172 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
173 enum machine_mode, int, int, int));
174 static int get_inner_unaligned_p PROTO((tree));
175 static tree save_noncopied_parts PROTO((tree, tree));
176 static tree init_noncopied_parts PROTO((tree, tree));
177 static int safe_from_p PROTO((rtx, tree));
178 static int fixed_type_p PROTO((tree));
179 static int get_pointer_alignment PROTO((tree, unsigned));
180 static tree string_constant PROTO((tree, tree *));
181 static tree c_strlen PROTO((tree));
182 static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
184 static int apply_args_size PROTO((void));
185 static int apply_result_size PROTO((void));
186 static rtx result_vector PROTO((int, rtx));
187 static rtx expand_builtin_apply_args PROTO((void));
188 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189 static void expand_builtin_return PROTO((rtx));
190 static rtx expand_increment PROTO((tree, int));
191 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
192 tree bc_runtime_type_code PROTO((tree));
193 rtx bc_allocate_local PROTO((int, int));
194 void bc_store_memory PROTO((tree, tree));
195 tree bc_expand_component_address PROTO((tree));
196 tree bc_expand_address PROTO((tree));
197 void bc_expand_constructor PROTO((tree));
198 void bc_adjust_stack PROTO((int));
199 tree bc_canonicalize_array_ref PROTO((tree));
200 void bc_load_memory PROTO((tree, tree));
201 void bc_load_externaddr PROTO((rtx));
202 void bc_load_externaddr_id PROTO((tree, int));
203 void bc_load_localaddr PROTO((rtx));
204 void bc_load_parmaddr PROTO((rtx));
205 static void preexpand_calls PROTO((tree));
206 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
207 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
208 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
209 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
210 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
211 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
212 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
213 static tree defer_cleanups_to PROTO((tree));
214 extern void (*interim_eh_hook) PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
216
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
223
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
236
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
248
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
256 \f
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264 void
265 bc_init_mode_to_opcode_maps ()
266 {
267 int mode;
268
269 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
270 mode_to_const_map[mode] =
271 mode_to_load_map[mode] =
272 mode_to_store_map[mode] = neverneverland;
273
274 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
275 mode_to_const_map[(int) SYM] = CONST; \
276 mode_to_load_map[(int) SYM] = LOAD; \
277 mode_to_store_map[(int) SYM] = STORE;
278
279 #include "modemap.def"
280 #undef DEF_MODEMAP
281 }
282 \f
283 /* This is run once per compilation to set up which modes can be used
284 directly in memory and to initialize the block move optab. */
285
286 void
287 init_expr_once ()
288 {
289 rtx insn, pat;
290 enum machine_mode mode;
291 /* Try indexing by frame ptr and try by stack ptr.
292 It is known that on the Convex the stack ptr isn't a valid index.
293 With luck, one or the other is valid on any machine. */
294 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
295 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
296
297 start_sequence ();
298 insn = emit_insn (gen_rtx (SET, 0, 0));
299 pat = PATTERN (insn);
300
301 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
302 mode = (enum machine_mode) ((int) mode + 1))
303 {
304 int regno;
305 rtx reg;
306 int num_clobbers;
307
308 direct_load[(int) mode] = direct_store[(int) mode] = 0;
309 PUT_MODE (mem, mode);
310 PUT_MODE (mem1, mode);
311
312 /* See if there is some register that can be used in this mode and
313 directly loaded or stored from memory. */
314
315 if (mode != VOIDmode && mode != BLKmode)
316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
317 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
318 regno++)
319 {
320 if (! HARD_REGNO_MODE_OK (regno, mode))
321 continue;
322
323 reg = gen_rtx (REG, mode, regno);
324
325 SET_SRC (pat) = mem;
326 SET_DEST (pat) = reg;
327 if (recog (pat, insn, &num_clobbers) >= 0)
328 direct_load[(int) mode] = 1;
329
330 SET_SRC (pat) = mem1;
331 SET_DEST (pat) = reg;
332 if (recog (pat, insn, &num_clobbers) >= 0)
333 direct_load[(int) mode] = 1;
334
335 SET_SRC (pat) = reg;
336 SET_DEST (pat) = mem;
337 if (recog (pat, insn, &num_clobbers) >= 0)
338 direct_store[(int) mode] = 1;
339
340 SET_SRC (pat) = reg;
341 SET_DEST (pat) = mem1;
342 if (recog (pat, insn, &num_clobbers) >= 0)
343 direct_store[(int) mode] = 1;
344 }
345 }
346
347 end_sequence ();
348 }
349
350 /* This is run at the start of compiling a function. */
351
352 void
353 init_expr ()
354 {
355 init_queue ();
356
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
360 saveregs_value = 0;
361 apply_args_value = 0;
362 forced_labels = 0;
363 }
364
365 /* Save all variables describing the current status into the structure *P.
366 This is used before starting a nested function. */
367
368 void
369 save_expr_status (p)
370 struct function *p;
371 {
372 /* Instead of saving the postincrement queue, empty it. */
373 emit_queue ();
374
375 p->pending_stack_adjust = pending_stack_adjust;
376 p->inhibit_defer_pop = inhibit_defer_pop;
377 p->cleanups_this_call = cleanups_this_call;
378 p->saveregs_value = saveregs_value;
379 p->apply_args_value = apply_args_value;
380 p->forced_labels = forced_labels;
381
382 pending_stack_adjust = 0;
383 inhibit_defer_pop = 0;
384 cleanups_this_call = 0;
385 saveregs_value = 0;
386 apply_args_value = 0;
387 forced_labels = 0;
388 }
389
390 /* Restore all variables describing the current status from the structure *P.
391 This is used after a nested function. */
392
393 void
394 restore_expr_status (p)
395 struct function *p;
396 {
397 pending_stack_adjust = p->pending_stack_adjust;
398 inhibit_defer_pop = p->inhibit_defer_pop;
399 cleanups_this_call = p->cleanups_this_call;
400 saveregs_value = p->saveregs_value;
401 apply_args_value = p->apply_args_value;
402 forced_labels = p->forced_labels;
403 }
404 \f
405 /* Manage the queue of increment instructions to be output
406 for POSTINCREMENT_EXPR expressions, etc. */
407
408 static rtx pending_chain;
409
410 /* Queue up to increment (or change) VAR later. BODY says how:
411 BODY should be the same thing you would pass to emit_insn
412 to increment right away. It will go to emit_insn later on.
413
414 The value is a QUEUED expression to be used in place of VAR
415 where you want to guarantee the pre-incrementation value of VAR. */
416
417 static rtx
418 enqueue_insn (var, body)
419 rtx var, body;
420 {
421 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
422 var, NULL_RTX, NULL_RTX, body, pending_chain);
423 return pending_chain;
424 }
425
426 /* Use protect_from_queue to convert a QUEUED expression
427 into something that you can put immediately into an instruction.
428 If the queued incrementation has not happened yet,
429 protect_from_queue returns the variable itself.
430 If the incrementation has happened, protect_from_queue returns a temp
431 that contains a copy of the old value of the variable.
432
433 Any time an rtx which might possibly be a QUEUED is to be put
434 into an instruction, it must be passed through protect_from_queue first.
435 QUEUED expressions are not meaningful in instructions.
436
437 Do not pass a value through protect_from_queue and then hold
438 on to it for a while before putting it in an instruction!
439 If the queue is flushed in between, incorrect code will result. */
440
441 rtx
442 protect_from_queue (x, modify)
443 register rtx x;
444 int modify;
445 {
446 register RTX_CODE code = GET_CODE (x);
447
448 #if 0 /* A QUEUED can hang around after the queue is forced out. */
449 /* Shortcut for most common case. */
450 if (pending_chain == 0)
451 return x;
452 #endif
453
454 if (code != QUEUED)
455 {
456 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
457 use of autoincrement. Make a copy of the contents of the memory
458 location rather than a copy of the address, but not if the value is
459 of mode BLKmode. Don't modify X in place since it might be
460 shared. */
461 if (code == MEM && GET_MODE (x) != BLKmode
462 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
463 {
464 register rtx y = XEXP (x, 0);
465 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
466
467 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
468 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
469 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
470
471 if (QUEUED_INSN (y))
472 {
473 register rtx temp = gen_reg_rtx (GET_MODE (new));
474 emit_insn_before (gen_move_insn (temp, new),
475 QUEUED_INSN (y));
476 return temp;
477 }
478 return new;
479 }
480 /* Otherwise, recursively protect the subexpressions of all
481 the kinds of rtx's that can contain a QUEUED. */
482 if (code == MEM)
483 {
484 rtx tem = protect_from_queue (XEXP (x, 0), 0);
485 if (tem != XEXP (x, 0))
486 {
487 x = copy_rtx (x);
488 XEXP (x, 0) = tem;
489 }
490 }
491 else if (code == PLUS || code == MULT)
492 {
493 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
494 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
495 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
496 {
497 x = copy_rtx (x);
498 XEXP (x, 0) = new0;
499 XEXP (x, 1) = new1;
500 }
501 }
502 return x;
503 }
504 /* If the increment has not happened, use the variable itself. */
505 if (QUEUED_INSN (x) == 0)
506 return QUEUED_VAR (x);
507 /* If the increment has happened and a pre-increment copy exists,
508 use that copy. */
509 if (QUEUED_COPY (x) != 0)
510 return QUEUED_COPY (x);
511 /* The increment has happened but we haven't set up a pre-increment copy.
512 Set one up now, and use it. */
513 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
514 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
515 QUEUED_INSN (x));
516 return QUEUED_COPY (x);
517 }
518
519 /* Return nonzero if X contains a QUEUED expression:
520 if it contains anything that will be altered by a queued increment.
521 We handle only combinations of MEM, PLUS, MINUS and MULT operators
522 since memory addresses generally contain only those. */
523
524 static int
525 queued_subexp_p (x)
526 rtx x;
527 {
528 register enum rtx_code code = GET_CODE (x);
529 switch (code)
530 {
531 case QUEUED:
532 return 1;
533 case MEM:
534 return queued_subexp_p (XEXP (x, 0));
535 case MULT:
536 case PLUS:
537 case MINUS:
538 return queued_subexp_p (XEXP (x, 0))
539 || queued_subexp_p (XEXP (x, 1));
540 }
541 return 0;
542 }
543
544 /* Perform all the pending incrementations. */
545
546 void
547 emit_queue ()
548 {
549 register rtx p;
550 while (p = pending_chain)
551 {
552 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
553 pending_chain = QUEUED_NEXT (p);
554 }
555 }
556
557 static void
558 init_queue ()
559 {
560 if (pending_chain)
561 abort ();
562 }
563 \f
564 /* Copy data from FROM to TO, where the machine modes are not the same.
565 Both modes may be integer, or both may be floating.
566 UNSIGNEDP should be nonzero if FROM is an unsigned type.
567 This causes zero-extension instead of sign-extension. */
568
569 void
570 convert_move (to, from, unsignedp)
571 register rtx to, from;
572 int unsignedp;
573 {
574 enum machine_mode to_mode = GET_MODE (to);
575 enum machine_mode from_mode = GET_MODE (from);
576 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
577 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
578 enum insn_code code;
579 rtx libcall;
580
581 /* rtx code for making an equivalent value. */
582 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
583
584 to = protect_from_queue (to, 1);
585 from = protect_from_queue (from, 0);
586
587 if (to_real != from_real)
588 abort ();
589
590 /* If FROM is a SUBREG that indicates that we have already done at least
591 the required extension, strip it. We don't handle such SUBREGs as
592 TO here. */
593
594 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
595 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
596 >= GET_MODE_SIZE (to_mode))
597 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
598 from = gen_lowpart (to_mode, from), from_mode = to_mode;
599
600 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
601 abort ();
602
603 if (to_mode == from_mode
604 || (from_mode == VOIDmode && CONSTANT_P (from)))
605 {
606 emit_move_insn (to, from);
607 return;
608 }
609
610 if (to_real)
611 {
612 rtx value;
613
614 #ifdef HAVE_extendqfhf2
615 if (HAVE_extendqfhf2 && from_mode == QFmode && to_mode == HFmode)
616 {
617 emit_unop_insn (CODE_FOR_extendqfhf2, to, from, UNKNOWN);
618 return;
619 }
620 #endif
621 #ifdef HAVE_extendqfsf2
622 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
623 {
624 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
625 return;
626 }
627 #endif
628 #ifdef HAVE_extendqfdf2
629 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
630 {
631 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
632 return;
633 }
634 #endif
635 #ifdef HAVE_extendqfxf2
636 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
637 {
638 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
639 return;
640 }
641 #endif
642 #ifdef HAVE_extendqftf2
643 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
644 {
645 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649
650 #ifdef HAVE_extendhftqf2
651 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
652 {
653 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657
658 #ifdef HAVE_extendhfsf2
659 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
660 {
661 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_extendhfdf2
666 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
667 {
668 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672 #ifdef HAVE_extendhfxf2
673 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
674 {
675 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679 #ifdef HAVE_extendhftf2
680 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
681 {
682 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
683 return;
684 }
685 #endif
686
687 #ifdef HAVE_extendsfdf2
688 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
689 {
690 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_extendsfxf2
695 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
696 {
697 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_extendsftf2
702 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
703 {
704 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708 #ifdef HAVE_extenddfxf2
709 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
710 {
711 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
712 return;
713 }
714 #endif
715 #ifdef HAVE_extenddftf2
716 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
717 {
718 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
719 return;
720 }
721 #endif
722
723 #ifdef HAVE_trunchfqf2
724 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
725 {
726 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
727 return;
728 }
729 #endif
730 #ifdef HAVE_truncsfqf2
731 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
732 {
733 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
734 return;
735 }
736 #endif
737 #ifdef HAVE_truncdfqf2
738 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
739 {
740 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
741 return;
742 }
743 #endif
744 #ifdef HAVE_truncxfqf2
745 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
746 {
747 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
748 return;
749 }
750 #endif
751 #ifdef HAVE_trunctfqf2
752 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
753 {
754 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
755 return;
756 }
757 #endif
758
759 #ifdef HAVE_trunctqfhf2
760 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
761 {
762 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
763 return;
764 }
765 #endif
766 #ifdef HAVE_truncsfhf2
767 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
768 {
769 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
770 return;
771 }
772 #endif
773 #ifdef HAVE_truncdfhf2
774 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
775 {
776 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
777 return;
778 }
779 #endif
780 #ifdef HAVE_truncxfhf2
781 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
782 {
783 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
784 return;
785 }
786 #endif
787 #ifdef HAVE_trunctfhf2
788 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
789 {
790 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
791 return;
792 }
793 #endif
794 #ifdef HAVE_truncdfsf2
795 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
796 {
797 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
798 return;
799 }
800 #endif
801 #ifdef HAVE_truncxfsf2
802 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
803 {
804 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
805 return;
806 }
807 #endif
808 #ifdef HAVE_trunctfsf2
809 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
810 {
811 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
812 return;
813 }
814 #endif
815 #ifdef HAVE_truncxfdf2
816 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
817 {
818 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
819 return;
820 }
821 #endif
822 #ifdef HAVE_trunctfdf2
823 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
824 {
825 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
826 return;
827 }
828 #endif
829
830 libcall = (rtx) 0;
831 switch (from_mode)
832 {
833 case SFmode:
834 switch (to_mode)
835 {
836 case DFmode:
837 libcall = extendsfdf2_libfunc;
838 break;
839
840 case XFmode:
841 libcall = extendsfxf2_libfunc;
842 break;
843
844 case TFmode:
845 libcall = extendsftf2_libfunc;
846 break;
847 }
848 break;
849
850 case DFmode:
851 switch (to_mode)
852 {
853 case SFmode:
854 libcall = truncdfsf2_libfunc;
855 break;
856
857 case XFmode:
858 libcall = extenddfxf2_libfunc;
859 break;
860
861 case TFmode:
862 libcall = extenddftf2_libfunc;
863 break;
864 }
865 break;
866
867 case XFmode:
868 switch (to_mode)
869 {
870 case SFmode:
871 libcall = truncxfsf2_libfunc;
872 break;
873
874 case DFmode:
875 libcall = truncxfdf2_libfunc;
876 break;
877 }
878 break;
879
880 case TFmode:
881 switch (to_mode)
882 {
883 case SFmode:
884 libcall = trunctfsf2_libfunc;
885 break;
886
887 case DFmode:
888 libcall = trunctfdf2_libfunc;
889 break;
890 }
891 break;
892 }
893
894 if (libcall == (rtx) 0)
895 /* This conversion is not implemented yet. */
896 abort ();
897
898 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
899 1, from, from_mode);
900 emit_move_insn (to, value);
901 return;
902 }
903
904 /* Now both modes are integers. */
905
906 /* Handle expanding beyond a word. */
907 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
908 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
909 {
910 rtx insns;
911 rtx lowpart;
912 rtx fill_value;
913 rtx lowfrom;
914 int i;
915 enum machine_mode lowpart_mode;
916 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
917
918 /* Try converting directly if the insn is supported. */
919 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
920 != CODE_FOR_nothing)
921 {
922 /* If FROM is a SUBREG, put it into a register. Do this
923 so that we always generate the same set of insns for
924 better cse'ing; if an intermediate assignment occurred,
925 we won't be doing the operation directly on the SUBREG. */
926 if (optimize > 0 && GET_CODE (from) == SUBREG)
927 from = force_reg (from_mode, from);
928 emit_unop_insn (code, to, from, equiv_code);
929 return;
930 }
931 /* Next, try converting via full word. */
932 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
933 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
934 != CODE_FOR_nothing))
935 {
936 if (GET_CODE (to) == REG)
937 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
938 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
939 emit_unop_insn (code, to,
940 gen_lowpart (word_mode, to), equiv_code);
941 return;
942 }
943
944 /* No special multiword conversion insn; do it by hand. */
945 start_sequence ();
946
947 /* Since we will turn this into a no conflict block, we must ensure
948 that the source does not overlap the target. */
949
950 if (reg_overlap_mentioned_p (to, from))
951 from = force_reg (from_mode, from);
952
953 /* Get a copy of FROM widened to a word, if necessary. */
954 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
955 lowpart_mode = word_mode;
956 else
957 lowpart_mode = from_mode;
958
959 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
960
961 lowpart = gen_lowpart (lowpart_mode, to);
962 emit_move_insn (lowpart, lowfrom);
963
964 /* Compute the value to put in each remaining word. */
965 if (unsignedp)
966 fill_value = const0_rtx;
967 else
968 {
969 #ifdef HAVE_slt
970 if (HAVE_slt
971 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
972 && STORE_FLAG_VALUE == -1)
973 {
974 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
975 lowpart_mode, 0, 0);
976 fill_value = gen_reg_rtx (word_mode);
977 emit_insn (gen_slt (fill_value));
978 }
979 else
980 #endif
981 {
982 fill_value
983 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
984 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
985 NULL_RTX, 0);
986 fill_value = convert_to_mode (word_mode, fill_value, 1);
987 }
988 }
989
990 /* Fill the remaining words. */
991 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
992 {
993 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
994 rtx subword = operand_subword (to, index, 1, to_mode);
995
996 if (subword == 0)
997 abort ();
998
999 if (fill_value != subword)
1000 emit_move_insn (subword, fill_value);
1001 }
1002
1003 insns = get_insns ();
1004 end_sequence ();
1005
1006 emit_no_conflict_block (insns, to, from, NULL_RTX,
1007 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
1008 return;
1009 }
1010
1011 /* Truncating multi-word to a word or less. */
1012 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
1013 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
1014 {
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
1022 convert_move (to, gen_lowpart (word_mode, from), 0);
1023 return;
1024 }
1025
1026 /* Handle pointer conversion */ /* SPEE 900220 */
1027 if (to_mode == PSImode)
1028 {
1029 if (from_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1031
1032 #ifdef HAVE_truncsipsi2
1033 if (HAVE_truncsipsi2)
1034 {
1035 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1036 return;
1037 }
1038 #endif /* HAVE_truncsipsi2 */
1039 abort ();
1040 }
1041
1042 if (from_mode == PSImode)
1043 {
1044 if (to_mode != SImode)
1045 {
1046 from = convert_to_mode (SImode, from, unsignedp);
1047 from_mode = SImode;
1048 }
1049 else
1050 {
1051 #ifdef HAVE_extendpsisi2
1052 if (HAVE_extendpsisi2)
1053 {
1054 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1055 return;
1056 }
1057 #endif /* HAVE_extendpsisi2 */
1058 abort ();
1059 }
1060 }
1061
1062 if (to_mode == PDImode)
1063 {
1064 if (from_mode != DImode)
1065 from = convert_to_mode (DImode, from, unsignedp);
1066
1067 #ifdef HAVE_truncdipdi2
1068 if (HAVE_truncdipdi2)
1069 {
1070 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1071 return;
1072 }
1073 #endif /* HAVE_truncdipdi2 */
1074 abort ();
1075 }
1076
1077 if (from_mode == PDImode)
1078 {
1079 if (to_mode != DImode)
1080 {
1081 from = convert_to_mode (DImode, from, unsignedp);
1082 from_mode = DImode;
1083 }
1084 else
1085 {
1086 #ifdef HAVE_extendpdidi2
1087 if (HAVE_extendpdidi2)
1088 {
1089 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1090 return;
1091 }
1092 #endif /* HAVE_extendpdidi2 */
1093 abort ();
1094 }
1095 }
1096
1097 /* Now follow all the conversions between integers
1098 no more than a word long. */
1099
1100 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1101 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1102 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1103 GET_MODE_BITSIZE (from_mode)))
1104 {
1105 if (!((GET_CODE (from) == MEM
1106 && ! MEM_VOLATILE_P (from)
1107 && direct_load[(int) to_mode]
1108 && ! mode_dependent_address_p (XEXP (from, 0)))
1109 || GET_CODE (from) == REG
1110 || GET_CODE (from) == SUBREG))
1111 from = force_reg (from_mode, from);
1112 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1113 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1114 from = copy_to_reg (from);
1115 emit_move_insn (to, gen_lowpart (to_mode, from));
1116 return;
1117 }
1118
1119 /* Handle extension. */
1120 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1121 {
1122 /* Convert directly if that works. */
1123 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1124 != CODE_FOR_nothing)
1125 {
1126 emit_unop_insn (code, to, from, equiv_code);
1127 return;
1128 }
1129 else
1130 {
1131 enum machine_mode intermediate;
1132
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1140 && (can_extend_p (intermediate, from_mode, unsignedp)
1141 != CODE_FOR_nothing))
1142 {
1143 convert_move (to, convert_to_mode (intermediate, from,
1144 unsignedp), unsignedp);
1145 return;
1146 }
1147
1148 /* No suitable intermediate mode. */
1149 abort ();
1150 }
1151 }
1152
1153 /* Support special truncate insns for certain modes. */
1154
1155 if (from_mode == DImode && to_mode == SImode)
1156 {
1157 #ifdef HAVE_truncdisi2
1158 if (HAVE_truncdisi2)
1159 {
1160 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1161 return;
1162 }
1163 #endif
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 return;
1166 }
1167
1168 if (from_mode == DImode && to_mode == HImode)
1169 {
1170 #ifdef HAVE_truncdihi2
1171 if (HAVE_truncdihi2)
1172 {
1173 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1174 return;
1175 }
1176 #endif
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 return;
1179 }
1180
1181 if (from_mode == DImode && to_mode == QImode)
1182 {
1183 #ifdef HAVE_truncdiqi2
1184 if (HAVE_truncdiqi2)
1185 {
1186 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1187 return;
1188 }
1189 #endif
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 return;
1192 }
1193
1194 if (from_mode == SImode && to_mode == HImode)
1195 {
1196 #ifdef HAVE_truncsihi2
1197 if (HAVE_truncsihi2)
1198 {
1199 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1200 return;
1201 }
1202 #endif
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 return;
1205 }
1206
1207 if (from_mode == SImode && to_mode == QImode)
1208 {
1209 #ifdef HAVE_truncsiqi2
1210 if (HAVE_truncsiqi2)
1211 {
1212 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1213 return;
1214 }
1215 #endif
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 return;
1218 }
1219
1220 if (from_mode == HImode && to_mode == QImode)
1221 {
1222 #ifdef HAVE_trunchiqi2
1223 if (HAVE_trunchiqi2)
1224 {
1225 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1226 return;
1227 }
1228 #endif
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 return;
1231 }
1232
1233 if (from_mode == TImode && to_mode == DImode)
1234 {
1235 #ifdef HAVE_trunctidi2
1236 if (HAVE_trunctidi2)
1237 {
1238 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1239 return;
1240 }
1241 #endif
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 return;
1244 }
1245
1246 if (from_mode == TImode && to_mode == SImode)
1247 {
1248 #ifdef HAVE_trunctisi2
1249 if (HAVE_trunctisi2)
1250 {
1251 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1252 return;
1253 }
1254 #endif
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1256 return;
1257 }
1258
1259 if (from_mode == TImode && to_mode == HImode)
1260 {
1261 #ifdef HAVE_trunctihi2
1262 if (HAVE_trunctihi2)
1263 {
1264 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1265 return;
1266 }
1267 #endif
1268 convert_move (to, force_reg (from_mode, from), unsignedp);
1269 return;
1270 }
1271
1272 if (from_mode == TImode && to_mode == QImode)
1273 {
1274 #ifdef HAVE_trunctiqi2
1275 if (HAVE_trunctiqi2)
1276 {
1277 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1278 return;
1279 }
1280 #endif
1281 convert_move (to, force_reg (from_mode, from), unsignedp);
1282 return;
1283 }
1284
1285 /* Handle truncation of volatile memrefs, and so on;
1286 the things that couldn't be truncated directly,
1287 and for which there was no special instruction. */
1288 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1289 {
1290 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1291 emit_move_insn (to, temp);
1292 return;
1293 }
1294
1295 /* Mode combination is not recognized. */
1296 abort ();
1297 }
1298
1299 /* Return an rtx for a value that would result
1300 from converting X to mode MODE.
1301 Both X and MODE may be floating, or both integer.
1302 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
1304 or by copying to a new temporary with conversion.
1305
1306 This function *must not* call protect_from_queue
1307 except when putting X into an insn (in which case convert_move does it). */
1308
1309 rtx
1310 convert_to_mode (mode, x, unsignedp)
1311 enum machine_mode mode;
1312 rtx x;
1313 int unsignedp;
1314 {
1315 return convert_modes (mode, VOIDmode, x, unsignedp);
1316 }
1317
1318 /* Return an rtx for a value that would result
1319 from converting X from mode OLDMODE to mode MODE.
1320 Both modes may be floating, or both integer.
1321 UNSIGNEDP is nonzero if X is an unsigned value.
1322
1323 This can be done by referring to a part of X in place
1324 or by copying to a new temporary with conversion.
1325
1326 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1327
1328 This function *must not* call protect_from_queue
1329 except when putting X into an insn (in which case convert_move does it). */
1330
1331 rtx
1332 convert_modes (mode, oldmode, x, unsignedp)
1333 enum machine_mode mode, oldmode;
1334 rtx x;
1335 int unsignedp;
1336 {
1337 register rtx temp;
1338
1339 /* If FROM is a SUBREG that indicates that we have already done at least
1340 the required extension, strip it. */
1341
1342 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1343 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1344 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1345 x = gen_lowpart (mode, x);
1346
1347 if (GET_MODE (x) != VOIDmode)
1348 oldmode = GET_MODE (x);
1349
1350 if (mode == oldmode)
1351 return x;
1352
1353 /* There is one case that we must handle specially: If we are converting
1354 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1355 we are to interpret the constant as unsigned, gen_lowpart will do
1356 the wrong if the constant appears negative. What we want to do is
1357 make the high-order word of the constant zero, not all ones. */
1358
1359 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1360 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1361 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1362 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1363
1364 /* We can do this with a gen_lowpart if both desired and current modes
1365 are integer, and this is either a constant integer, a register, or a
1366 non-volatile MEM. Except for the constant case where MODE is no
1367 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1368
1369 if ((GET_CODE (x) == CONST_INT
1370 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1371 || (GET_MODE_CLASS (mode) == MODE_INT
1372 && GET_MODE_CLASS (oldmode) == MODE_INT
1373 && (GET_CODE (x) == CONST_DOUBLE
1374 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1375 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1376 && direct_load[(int) mode])
1377 || (GET_CODE (x) == REG
1378 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1379 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1380 {
1381 /* ?? If we don't know OLDMODE, we have to assume here that
1382 X does not need sign- or zero-extension. This may not be
1383 the case, but it's the best we can do. */
1384 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1385 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1386 {
1387 HOST_WIDE_INT val = INTVAL (x);
1388 int width = GET_MODE_BITSIZE (oldmode);
1389
1390 /* We must sign or zero-extend in this case. Start by
1391 zero-extending, then sign extend if we need to. */
1392 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1393 if (! unsignedp
1394 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1395 val |= (HOST_WIDE_INT) (-1) << width;
1396
1397 return GEN_INT (val);
1398 }
1399
1400 return gen_lowpart (mode, x);
1401 }
1402
1403 temp = gen_reg_rtx (mode);
1404 convert_move (temp, x, unsignedp);
1405 return temp;
1406 }
1407 \f
1408 /* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN (in bytes) is maximum alignment we can assume. */
1413
1414 static void
1415 move_by_pieces (to, from, len, align)
1416 rtx to, from;
1417 int len, align;
1418 {
1419 struct move_by_pieces data;
1420 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1421 int max_size = MOVE_MAX + 1;
1422
1423 data.offset = 0;
1424 data.to_addr = to_addr;
1425 data.from_addr = from_addr;
1426 data.to = to;
1427 data.from = from;
1428 data.autinc_to
1429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1431 data.autinc_from
1432 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1433 || GET_CODE (from_addr) == POST_INC
1434 || GET_CODE (from_addr) == POST_DEC);
1435
1436 data.explicit_inc_from = 0;
1437 data.explicit_inc_to = 0;
1438 data.reverse
1439 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1440 if (data.reverse) data.offset = len;
1441 data.len = len;
1442
1443 data.to_struct = MEM_IN_STRUCT_P (to);
1444 data.from_struct = MEM_IN_STRUCT_P (from);
1445
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data.autinc_from && data.autinc_to)
1450 && move_by_pieces_ninsns (len, align) > 2)
1451 {
1452 #ifdef HAVE_PRE_DECREMENT
1453 if (data.reverse && ! data.autinc_from)
1454 {
1455 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1456 data.autinc_from = 1;
1457 data.explicit_inc_from = -1;
1458 }
1459 #endif
1460 #ifdef HAVE_POST_INCREMENT
1461 if (! data.autinc_from)
1462 {
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = 1;
1466 }
1467 #endif
1468 if (!data.autinc_from && CONSTANT_P (from_addr))
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 #ifdef HAVE_PRE_DECREMENT
1471 if (data.reverse && ! data.autinc_to)
1472 {
1473 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1474 data.autinc_to = 1;
1475 data.explicit_inc_to = -1;
1476 }
1477 #endif
1478 #ifdef HAVE_POST_INCREMENT
1479 if (! data.reverse && ! data.autinc_to)
1480 {
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1484 }
1485 #endif
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1488 }
1489
1490 if (! SLOW_UNALIGNED_ACCESS
1491 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1492 align = MOVE_MAX;
1493
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1496
1497 while (max_size > 1)
1498 {
1499 enum machine_mode mode = VOIDmode, tmode;
1500 enum insn_code icode;
1501
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
1505 mode = tmode;
1506
1507 if (mode == VOIDmode)
1508 break;
1509
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing
1512 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1513 GET_MODE_SIZE (mode)))
1514 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1515
1516 max_size = GET_MODE_SIZE (mode);
1517 }
1518
1519 /* The code above should have handled everything. */
1520 if (data.len != 0)
1521 abort ();
1522 }
1523
1524 /* Return number of insns required to move L bytes by pieces.
1525 ALIGN (in bytes) is maximum alignment we can assume. */
1526
1527 static int
1528 move_by_pieces_ninsns (l, align)
1529 unsigned int l;
1530 int align;
1531 {
1532 register int n_insns = 0;
1533 int max_size = MOVE_MAX + 1;
1534
1535 if (! SLOW_UNALIGNED_ACCESS
1536 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1537 align = MOVE_MAX;
1538
1539 while (max_size > 1)
1540 {
1541 enum machine_mode mode = VOIDmode, tmode;
1542 enum insn_code icode;
1543
1544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1546 if (GET_MODE_SIZE (tmode) < max_size)
1547 mode = tmode;
1548
1549 if (mode == VOIDmode)
1550 break;
1551
1552 icode = mov_optab->handlers[(int) mode].insn_code;
1553 if (icode != CODE_FOR_nothing
1554 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1555 GET_MODE_SIZE (mode)))
1556 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1557
1558 max_size = GET_MODE_SIZE (mode);
1559 }
1560
1561 return n_insns;
1562 }
1563
1564 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1565 with move instructions for mode MODE. GENFUN is the gen_... function
1566 to make a move insn for that mode. DATA has all the other info. */
1567
1568 static void
1569 move_by_pieces_1 (genfun, mode, data)
1570 rtx (*genfun) ();
1571 enum machine_mode mode;
1572 struct move_by_pieces *data;
1573 {
1574 register int size = GET_MODE_SIZE (mode);
1575 register rtx to1, from1;
1576
1577 while (data->len >= size)
1578 {
1579 if (data->reverse) data->offset -= size;
1580
1581 to1 = (data->autinc_to
1582 ? gen_rtx (MEM, mode, data->to_addr)
1583 : change_address (data->to, mode,
1584 plus_constant (data->to_addr, data->offset)));
1585 MEM_IN_STRUCT_P (to1) = data->to_struct;
1586 from1 =
1587 (data->autinc_from
1588 ? gen_rtx (MEM, mode, data->from_addr)
1589 : change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset)));
1591 MEM_IN_STRUCT_P (from1) = data->from_struct;
1592
1593 #ifdef HAVE_PRE_DECREMENT
1594 if (data->explicit_inc_to < 0)
1595 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1596 if (data->explicit_inc_from < 0)
1597 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1598 #endif
1599
1600 emit_insn ((*genfun) (to1, from1));
1601 #ifdef HAVE_POST_INCREMENT
1602 if (data->explicit_inc_to > 0)
1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1604 if (data->explicit_inc_from > 0)
1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1606 #endif
1607
1608 if (! data->reverse) data->offset += size;
1609
1610 data->len -= size;
1611 }
1612 }
1613 \f
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1617
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 with mode BLKmode.
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have,
1622 measured in bytes. */
1623
1624 void
1625 emit_block_move (x, y, size, align)
1626 rtx x, y;
1627 rtx size;
1628 int align;
1629 {
1630 if (GET_MODE (x) != BLKmode)
1631 abort ();
1632
1633 if (GET_MODE (y) != BLKmode)
1634 abort ();
1635
1636 x = protect_from_queue (x, 1);
1637 y = protect_from_queue (y, 0);
1638 size = protect_from_queue (size, 0);
1639
1640 if (GET_CODE (x) != MEM)
1641 abort ();
1642 if (GET_CODE (y) != MEM)
1643 abort ();
1644 if (size == 0)
1645 abort ();
1646
1647 if (GET_CODE (size) == CONST_INT
1648 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1649 move_by_pieces (x, y, INTVAL (size), align);
1650 else
1651 {
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1655
1656 rtx opalign = GEN_INT (align);
1657 enum machine_mode mode;
1658
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
1661 {
1662 enum insn_code code = movstr_optab[(int) mode];
1663
1664 if (code != CODE_FOR_nothing
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
1667 returned by the macro, it will definitely be less than the
1668 actual mode mask. */
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1671 <= GET_MODE_MASK (mode)))
1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1673 && (insn_operand_predicate[(int) code][0] == 0
1674 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1675 && (insn_operand_predicate[(int) code][1] == 0
1676 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1677 && (insn_operand_predicate[(int) code][3] == 0
1678 || (*insn_operand_predicate[(int) code][3]) (opalign,
1679 VOIDmode)))
1680 {
1681 rtx op2;
1682 rtx last = get_last_insn ();
1683 rtx pat;
1684
1685 op2 = convert_to_mode (mode, size, 1);
1686 if (insn_operand_predicate[(int) code][2] != 0
1687 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1688 op2 = copy_to_mode_reg (mode, op2);
1689
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1691 if (pat)
1692 {
1693 emit_insn (pat);
1694 return;
1695 }
1696 else
1697 delete_insns_since (last);
1698 }
1699 }
1700
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 emit_library_call (memcpy_libfunc, 0,
1703 VOIDmode, 3, XEXP (x, 0), Pmode,
1704 XEXP (y, 0), Pmode,
1705 convert_to_mode (TYPE_MODE (sizetype), size,
1706 TREE_UNSIGNED (sizetype)),
1707 TYPE_MODE (sizetype));
1708 #else
1709 emit_library_call (bcopy_libfunc, 0,
1710 VOIDmode, 3, XEXP (y, 0), Pmode,
1711 XEXP (x, 0), Pmode,
1712 convert_to_mode (TYPE_MODE (integer_type_node), size,
1713 TREE_UNSIGNED (integer_type_node)),
1714 TYPE_MODE (integer_type_node));
1715 #endif
1716 }
1717 }
1718 \f
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1721
1722 void
1723 move_block_to_reg (regno, x, nregs, mode)
1724 int regno;
1725 rtx x;
1726 int nregs;
1727 enum machine_mode mode;
1728 {
1729 int i;
1730 rtx pat, last;
1731
1732 if (nregs == 0)
1733 return;
1734
1735 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1736 x = validize_mem (force_const_mem (mode, x));
1737
1738 /* See if the machine can do this with a load multiple insn. */
1739 #ifdef HAVE_load_multiple
1740 if (HAVE_load_multiple)
1741 {
1742 last = get_last_insn ();
1743 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1744 GEN_INT (nregs));
1745 if (pat)
1746 {
1747 emit_insn (pat);
1748 return;
1749 }
1750 else
1751 delete_insns_since (last);
1752 }
1753 #endif
1754
1755 for (i = 0; i < nregs; i++)
1756 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1757 operand_subword_force (x, i, mode));
1758 }
1759
1760 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1761 The number of registers to be filled is NREGS. SIZE indicates the number
1762 of bytes in the object X. */
1763
1764
1765 void
1766 move_block_from_reg (regno, x, nregs, size)
1767 int regno;
1768 rtx x;
1769 int nregs;
1770 int size;
1771 {
1772 int i;
1773 rtx pat, last;
1774
1775 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1776 to the left before storing to memory. */
1777 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1778 {
1779 rtx tem = operand_subword (x, 0, 1, BLKmode);
1780 rtx shift;
1781
1782 if (tem == 0)
1783 abort ();
1784
1785 shift = expand_shift (LSHIFT_EXPR, word_mode,
1786 gen_rtx (REG, word_mode, regno),
1787 build_int_2 ((UNITS_PER_WORD - size)
1788 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1789 emit_move_insn (tem, shift);
1790 return;
1791 }
1792
1793 /* See if the machine can do this with a store multiple insn. */
1794 #ifdef HAVE_store_multiple
1795 if (HAVE_store_multiple)
1796 {
1797 last = get_last_insn ();
1798 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1799 GEN_INT (nregs));
1800 if (pat)
1801 {
1802 emit_insn (pat);
1803 return;
1804 }
1805 else
1806 delete_insns_since (last);
1807 }
1808 #endif
1809
1810 for (i = 0; i < nregs; i++)
1811 {
1812 rtx tem = operand_subword (x, i, 1, BLKmode);
1813
1814 if (tem == 0)
1815 abort ();
1816
1817 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1818 }
1819 }
1820
1821 /* Add a USE expression for REG to the (possibly empty) list pointed
1822 to by CALL_FUSAGE. REG must denote a hard register. */
1823
1824 void
1825 use_reg (call_fusage, reg)
1826 rtx *call_fusage, reg;
1827 {
1828 if (GET_CODE (reg) != REG
1829 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1830 abort();
1831
1832 *call_fusage
1833 = gen_rtx (EXPR_LIST, VOIDmode,
1834 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1835 }
1836
1837 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1838 starting at REGNO. All of these registers must be hard registers. */
1839
1840 void
1841 use_regs (call_fusage, regno, nregs)
1842 rtx *call_fusage;
1843 int regno;
1844 int nregs;
1845 {
1846 int i;
1847
1848 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1849 abort ();
1850
1851 for (i = 0; i < nregs; i++)
1852 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1853 }
1854 \f
1855 /* Generate several move instructions to clear LEN bytes of block TO.
1856 (A MEM rtx with BLKmode). The caller must pass TO through
1857 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1858 we can assume. */
1859
1860 static void
1861 clear_by_pieces (to, len, align)
1862 rtx to;
1863 int len, align;
1864 {
1865 struct clear_by_pieces data;
1866 rtx to_addr = XEXP (to, 0);
1867 int max_size = MOVE_MAX + 1;
1868
1869 data.offset = 0;
1870 data.to_addr = to_addr;
1871 data.to = to;
1872 data.autinc_to
1873 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1874 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1875
1876 data.explicit_inc_to = 0;
1877 data.reverse
1878 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1879 if (data.reverse) data.offset = len;
1880 data.len = len;
1881
1882 data.to_struct = MEM_IN_STRUCT_P (to);
1883
1884 /* If copying requires more than two move insns,
1885 copy addresses to registers (to make displacements shorter)
1886 and use post-increment if available. */
1887 if (!data.autinc_to
1888 && move_by_pieces_ninsns (len, align) > 2)
1889 {
1890 #ifdef HAVE_PRE_DECREMENT
1891 if (data.reverse && ! data.autinc_to)
1892 {
1893 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1894 data.autinc_to = 1;
1895 data.explicit_inc_to = -1;
1896 }
1897 #endif
1898 #ifdef HAVE_POST_INCREMENT
1899 if (! data.reverse && ! data.autinc_to)
1900 {
1901 data.to_addr = copy_addr_to_reg (to_addr);
1902 data.autinc_to = 1;
1903 data.explicit_inc_to = 1;
1904 }
1905 #endif
1906 if (!data.autinc_to && CONSTANT_P (to_addr))
1907 data.to_addr = copy_addr_to_reg (to_addr);
1908 }
1909
1910 if (! SLOW_UNALIGNED_ACCESS
1911 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1912 align = MOVE_MAX;
1913
1914 /* First move what we can in the largest integer mode, then go to
1915 successively smaller modes. */
1916
1917 while (max_size > 1)
1918 {
1919 enum machine_mode mode = VOIDmode, tmode;
1920 enum insn_code icode;
1921
1922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1924 if (GET_MODE_SIZE (tmode) < max_size)
1925 mode = tmode;
1926
1927 if (mode == VOIDmode)
1928 break;
1929
1930 icode = mov_optab->handlers[(int) mode].insn_code;
1931 if (icode != CODE_FOR_nothing
1932 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1933 GET_MODE_SIZE (mode)))
1934 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1935
1936 max_size = GET_MODE_SIZE (mode);
1937 }
1938
1939 /* The code above should have handled everything. */
1940 if (data.len != 0)
1941 abort ();
1942 }
1943
1944 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1945 with move instructions for mode MODE. GENFUN is the gen_... function
1946 to make a move insn for that mode. DATA has all the other info. */
1947
1948 static void
1949 clear_by_pieces_1 (genfun, mode, data)
1950 rtx (*genfun) ();
1951 enum machine_mode mode;
1952 struct clear_by_pieces *data;
1953 {
1954 register int size = GET_MODE_SIZE (mode);
1955 register rtx to1;
1956
1957 while (data->len >= size)
1958 {
1959 if (data->reverse) data->offset -= size;
1960
1961 to1 = (data->autinc_to
1962 ? gen_rtx (MEM, mode, data->to_addr)
1963 : change_address (data->to, mode,
1964 plus_constant (data->to_addr, data->offset)));
1965 MEM_IN_STRUCT_P (to1) = data->to_struct;
1966
1967 #ifdef HAVE_PRE_DECREMENT
1968 if (data->explicit_inc_to < 0)
1969 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1970 #endif
1971
1972 emit_insn ((*genfun) (to1, const0_rtx));
1973 #ifdef HAVE_POST_INCREMENT
1974 if (data->explicit_inc_to > 0)
1975 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1976 #endif
1977
1978 if (! data->reverse) data->offset += size;
1979
1980 data->len -= size;
1981 }
1982 }
1983 \f
1984 /* Write zeros through the storage of OBJECT.
1985 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
1986 the maximum alignment we can is has, measured in bytes. */
1987
1988 void
1989 clear_storage (object, size, align)
1990 rtx object;
1991 rtx size;
1992 int align;
1993 {
1994 if (GET_MODE (object) == BLKmode)
1995 {
1996 object = protect_from_queue (object, 1);
1997 size = protect_from_queue (size, 0);
1998
1999 if (GET_CODE (size) == CONST_INT
2000 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2001 clear_by_pieces (object, INTVAL (size), align);
2002
2003 else
2004 {
2005 /* Try the most limited insn first, because there's no point
2006 including more than one in the machine description unless
2007 the more limited one has some advantage. */
2008
2009 rtx opalign = GEN_INT (align);
2010 enum machine_mode mode;
2011
2012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2013 mode = GET_MODE_WIDER_MODE (mode))
2014 {
2015 enum insn_code code = clrstr_optab[(int) mode];
2016
2017 if (code != CODE_FOR_nothing
2018 /* We don't need MODE to be narrower than
2019 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2020 the mode mask, as it is returned by the macro, it will
2021 definitely be less than the actual mode mask. */
2022 && ((GET_CODE (size) == CONST_INT
2023 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2024 <= GET_MODE_MASK (mode)))
2025 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2026 && (insn_operand_predicate[(int) code][0] == 0
2027 || (*insn_operand_predicate[(int) code][0]) (object,
2028 BLKmode))
2029 && (insn_operand_predicate[(int) code][2] == 0
2030 || (*insn_operand_predicate[(int) code][2]) (opalign,
2031 VOIDmode)))
2032 {
2033 rtx op1;
2034 rtx last = get_last_insn ();
2035 rtx pat;
2036
2037 op1 = convert_to_mode (mode, size, 1);
2038 if (insn_operand_predicate[(int) code][1] != 0
2039 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2040 mode))
2041 op1 = copy_to_mode_reg (mode, op1);
2042
2043 pat = GEN_FCN ((int) code) (object, op1, opalign);
2044 if (pat)
2045 {
2046 emit_insn (pat);
2047 return;
2048 }
2049 else
2050 delete_insns_since (last);
2051 }
2052 }
2053
2054
2055 #ifdef TARGET_MEM_FUNCTIONS
2056 emit_library_call (memset_libfunc, 0,
2057 VOIDmode, 3,
2058 XEXP (object, 0), Pmode,
2059 const0_rtx, TYPE_MODE (integer_type_node),
2060 convert_to_mode (TYPE_MODE (sizetype),
2061 size, TREE_UNSIGNED (sizetype)),
2062 TYPE_MODE (sizetype));
2063 #else
2064 emit_library_call (bzero_libfunc, 0,
2065 VOIDmode, 2,
2066 XEXP (object, 0), Pmode,
2067 convert_to_mode (TYPE_MODE (integer_type_node),
2068 size,
2069 TREE_UNSIGNED (integer_type_node)),
2070 TYPE_MODE (integer_type_node));
2071 #endif
2072 }
2073 }
2074 else
2075 emit_move_insn (object, const0_rtx);
2076 }
2077
2078 /* Generate code to copy Y into X.
2079 Both Y and X must have the same mode, except that
2080 Y can be a constant with VOIDmode.
2081 This mode cannot be BLKmode; use emit_block_move for that.
2082
2083 Return the last instruction emitted. */
2084
2085 rtx
2086 emit_move_insn (x, y)
2087 rtx x, y;
2088 {
2089 enum machine_mode mode = GET_MODE (x);
2090
2091 x = protect_from_queue (x, 1);
2092 y = protect_from_queue (y, 0);
2093
2094 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2095 abort ();
2096
2097 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2098 y = force_const_mem (mode, y);
2099
2100 /* If X or Y are memory references, verify that their addresses are valid
2101 for the machine. */
2102 if (GET_CODE (x) == MEM
2103 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2104 && ! push_operand (x, GET_MODE (x)))
2105 || (flag_force_addr
2106 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2107 x = change_address (x, VOIDmode, XEXP (x, 0));
2108
2109 if (GET_CODE (y) == MEM
2110 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2111 || (flag_force_addr
2112 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2113 y = change_address (y, VOIDmode, XEXP (y, 0));
2114
2115 if (mode == BLKmode)
2116 abort ();
2117
2118 return emit_move_insn_1 (x, y);
2119 }
2120
2121 /* Low level part of emit_move_insn.
2122 Called just like emit_move_insn, but assumes X and Y
2123 are basically valid. */
2124
2125 rtx
2126 emit_move_insn_1 (x, y)
2127 rtx x, y;
2128 {
2129 enum machine_mode mode = GET_MODE (x);
2130 enum machine_mode submode;
2131 enum mode_class class = GET_MODE_CLASS (mode);
2132 int i;
2133
2134 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2135 return
2136 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2137
2138 /* Expand complex moves by moving real part and imag part, if possible. */
2139 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2140 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2141 * BITS_PER_UNIT),
2142 (class == MODE_COMPLEX_INT
2143 ? MODE_INT : MODE_FLOAT),
2144 0))
2145 && (mov_optab->handlers[(int) submode].insn_code
2146 != CODE_FOR_nothing))
2147 {
2148 /* Don't split destination if it is a stack push. */
2149 int stack = push_operand (x, GET_MODE (x));
2150 rtx insns;
2151
2152 /* If this is a stack, push the highpart first, so it
2153 will be in the argument order.
2154
2155 In that case, change_address is used only to convert
2156 the mode, not to change the address. */
2157 if (stack)
2158 {
2159 /* Note that the real part always precedes the imag part in memory
2160 regardless of machine's endianness. */
2161 #ifdef STACK_GROWS_DOWNWARD
2162 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2163 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2164 gen_imagpart (submode, y)));
2165 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2166 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2167 gen_realpart (submode, y)));
2168 #else
2169 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2170 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2171 gen_realpart (submode, y)));
2172 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2173 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2174 gen_imagpart (submode, y)));
2175 #endif
2176 }
2177 else
2178 {
2179 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2180 (gen_realpart (submode, x), gen_realpart (submode, y)));
2181 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2182 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2183 }
2184
2185 return get_last_insn ();
2186 }
2187
2188 /* This will handle any multi-word mode that lacks a move_insn pattern.
2189 However, you will get better code if you define such patterns,
2190 even if they must turn into multiple assembler instructions. */
2191 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2192 {
2193 rtx last_insn = 0;
2194 rtx insns;
2195
2196 #ifdef PUSH_ROUNDING
2197
2198 /* If X is a push on the stack, do the push now and replace
2199 X with a reference to the stack pointer. */
2200 if (push_operand (x, GET_MODE (x)))
2201 {
2202 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2203 x = change_address (x, VOIDmode, stack_pointer_rtx);
2204 }
2205 #endif
2206
2207 /* Show the output dies here. */
2208 if (x != y)
2209 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2210
2211 for (i = 0;
2212 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2213 i++)
2214 {
2215 rtx xpart = operand_subword (x, i, 1, mode);
2216 rtx ypart = operand_subword (y, i, 1, mode);
2217
2218 /* If we can't get a part of Y, put Y into memory if it is a
2219 constant. Otherwise, force it into a register. If we still
2220 can't get a part of Y, abort. */
2221 if (ypart == 0 && CONSTANT_P (y))
2222 {
2223 y = force_const_mem (mode, y);
2224 ypart = operand_subword (y, i, 1, mode);
2225 }
2226 else if (ypart == 0)
2227 ypart = operand_subword_force (y, i, mode);
2228
2229 if (xpart == 0 || ypart == 0)
2230 abort ();
2231
2232 last_insn = emit_move_insn (xpart, ypart);
2233 }
2234
2235 return last_insn;
2236 }
2237 else
2238 abort ();
2239 }
2240 \f
2241 /* Pushing data onto the stack. */
2242
2243 /* Push a block of length SIZE (perhaps variable)
2244 and return an rtx to address the beginning of the block.
2245 Note that it is not possible for the value returned to be a QUEUED.
2246 The value may be virtual_outgoing_args_rtx.
2247
2248 EXTRA is the number of bytes of padding to push in addition to SIZE.
2249 BELOW nonzero means this padding comes at low addresses;
2250 otherwise, the padding comes at high addresses. */
2251
2252 rtx
2253 push_block (size, extra, below)
2254 rtx size;
2255 int extra, below;
2256 {
2257 register rtx temp;
2258
2259 size = convert_modes (Pmode, ptr_mode, size, 1);
2260 if (CONSTANT_P (size))
2261 anti_adjust_stack (plus_constant (size, extra));
2262 else if (GET_CODE (size) == REG && extra == 0)
2263 anti_adjust_stack (size);
2264 else
2265 {
2266 rtx temp = copy_to_mode_reg (Pmode, size);
2267 if (extra != 0)
2268 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2269 temp, 0, OPTAB_LIB_WIDEN);
2270 anti_adjust_stack (temp);
2271 }
2272
2273 #ifdef STACK_GROWS_DOWNWARD
2274 temp = virtual_outgoing_args_rtx;
2275 if (extra != 0 && below)
2276 temp = plus_constant (temp, extra);
2277 #else
2278 if (GET_CODE (size) == CONST_INT)
2279 temp = plus_constant (virtual_outgoing_args_rtx,
2280 - INTVAL (size) - (below ? 0 : extra));
2281 else if (extra != 0 && !below)
2282 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2283 negate_rtx (Pmode, plus_constant (size, extra)));
2284 else
2285 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2286 negate_rtx (Pmode, size));
2287 #endif
2288
2289 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2290 }
2291
2292 rtx
2293 gen_push_operand ()
2294 {
2295 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2296 }
2297
2298 /* Generate code to push X onto the stack, assuming it has mode MODE and
2299 type TYPE.
2300 MODE is redundant except when X is a CONST_INT (since they don't
2301 carry mode info).
2302 SIZE is an rtx for the size of data to be copied (in bytes),
2303 needed only if X is BLKmode.
2304
2305 ALIGN (in bytes) is maximum alignment we can assume.
2306
2307 If PARTIAL and REG are both nonzero, then copy that many of the first
2308 words of X into registers starting with REG, and push the rest of X.
2309 The amount of space pushed is decreased by PARTIAL words,
2310 rounded *down* to a multiple of PARM_BOUNDARY.
2311 REG must be a hard register in this case.
2312 If REG is zero but PARTIAL is not, take any all others actions for an
2313 argument partially in registers, but do not actually load any
2314 registers.
2315
2316 EXTRA is the amount in bytes of extra space to leave next to this arg.
2317 This is ignored if an argument block has already been allocated.
2318
2319 On a machine that lacks real push insns, ARGS_ADDR is the address of
2320 the bottom of the argument block for this call. We use indexing off there
2321 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2322 argument block has not been preallocated.
2323
2324 ARGS_SO_FAR is the size of args previously pushed for this call. */
2325
2326 void
2327 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2328 args_addr, args_so_far)
2329 register rtx x;
2330 enum machine_mode mode;
2331 tree type;
2332 rtx size;
2333 int align;
2334 int partial;
2335 rtx reg;
2336 int extra;
2337 rtx args_addr;
2338 rtx args_so_far;
2339 {
2340 rtx xinner;
2341 enum direction stack_direction
2342 #ifdef STACK_GROWS_DOWNWARD
2343 = downward;
2344 #else
2345 = upward;
2346 #endif
2347
2348 /* Decide where to pad the argument: `downward' for below,
2349 `upward' for above, or `none' for don't pad it.
2350 Default is below for small data on big-endian machines; else above. */
2351 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2352
2353 /* Invert direction if stack is post-update. */
2354 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2355 if (where_pad != none)
2356 where_pad = (where_pad == downward ? upward : downward);
2357
2358 xinner = x = protect_from_queue (x, 0);
2359
2360 if (mode == BLKmode)
2361 {
2362 /* Copy a block into the stack, entirely or partially. */
2363
2364 register rtx temp;
2365 int used = partial * UNITS_PER_WORD;
2366 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2367 int skip;
2368
2369 if (size == 0)
2370 abort ();
2371
2372 used -= offset;
2373
2374 /* USED is now the # of bytes we need not copy to the stack
2375 because registers will take care of them. */
2376
2377 if (partial != 0)
2378 xinner = change_address (xinner, BLKmode,
2379 plus_constant (XEXP (xinner, 0), used));
2380
2381 /* If the partial register-part of the arg counts in its stack size,
2382 skip the part of stack space corresponding to the registers.
2383 Otherwise, start copying to the beginning of the stack space,
2384 by setting SKIP to 0. */
2385 #ifndef REG_PARM_STACK_SPACE
2386 skip = 0;
2387 #else
2388 skip = used;
2389 #endif
2390
2391 #ifdef PUSH_ROUNDING
2392 /* Do it with several push insns if that doesn't take lots of insns
2393 and if there is no difficulty with push insns that skip bytes
2394 on the stack for alignment purposes. */
2395 if (args_addr == 0
2396 && GET_CODE (size) == CONST_INT
2397 && skip == 0
2398 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2399 < MOVE_RATIO)
2400 /* Here we avoid the case of a structure whose weak alignment
2401 forces many pushes of a small amount of data,
2402 and such small pushes do rounding that causes trouble. */
2403 && ((! SLOW_UNALIGNED_ACCESS)
2404 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2405 || PUSH_ROUNDING (align) == align)
2406 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2407 {
2408 /* Push padding now if padding above and stack grows down,
2409 or if padding below and stack grows up.
2410 But if space already allocated, this has already been done. */
2411 if (extra && args_addr == 0
2412 && where_pad != none && where_pad != stack_direction)
2413 anti_adjust_stack (GEN_INT (extra));
2414
2415 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2416 INTVAL (size) - used, align);
2417 }
2418 else
2419 #endif /* PUSH_ROUNDING */
2420 {
2421 /* Otherwise make space on the stack and copy the data
2422 to the address of that space. */
2423
2424 /* Deduct words put into registers from the size we must copy. */
2425 if (partial != 0)
2426 {
2427 if (GET_CODE (size) == CONST_INT)
2428 size = GEN_INT (INTVAL (size) - used);
2429 else
2430 size = expand_binop (GET_MODE (size), sub_optab, size,
2431 GEN_INT (used), NULL_RTX, 0,
2432 OPTAB_LIB_WIDEN);
2433 }
2434
2435 /* Get the address of the stack space.
2436 In this case, we do not deal with EXTRA separately.
2437 A single stack adjust will do. */
2438 if (! args_addr)
2439 {
2440 temp = push_block (size, extra, where_pad == downward);
2441 extra = 0;
2442 }
2443 else if (GET_CODE (args_so_far) == CONST_INT)
2444 temp = memory_address (BLKmode,
2445 plus_constant (args_addr,
2446 skip + INTVAL (args_so_far)));
2447 else
2448 temp = memory_address (BLKmode,
2449 plus_constant (gen_rtx (PLUS, Pmode,
2450 args_addr, args_so_far),
2451 skip));
2452
2453 /* TEMP is the address of the block. Copy the data there. */
2454 if (GET_CODE (size) == CONST_INT
2455 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2456 < MOVE_RATIO))
2457 {
2458 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2459 INTVAL (size), align);
2460 goto ret;
2461 }
2462 /* Try the most limited insn first, because there's no point
2463 including more than one in the machine description unless
2464 the more limited one has some advantage. */
2465 #ifdef HAVE_movstrqi
2466 if (HAVE_movstrqi
2467 && GET_CODE (size) == CONST_INT
2468 && ((unsigned) INTVAL (size)
2469 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2470 {
2471 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2472 xinner, size, GEN_INT (align));
2473 if (pat != 0)
2474 {
2475 emit_insn (pat);
2476 goto ret;
2477 }
2478 }
2479 #endif
2480 #ifdef HAVE_movstrhi
2481 if (HAVE_movstrhi
2482 && GET_CODE (size) == CONST_INT
2483 && ((unsigned) INTVAL (size)
2484 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2485 {
2486 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2487 xinner, size, GEN_INT (align));
2488 if (pat != 0)
2489 {
2490 emit_insn (pat);
2491 goto ret;
2492 }
2493 }
2494 #endif
2495 #ifdef HAVE_movstrsi
2496 if (HAVE_movstrsi)
2497 {
2498 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2499 xinner, size, GEN_INT (align));
2500 if (pat != 0)
2501 {
2502 emit_insn (pat);
2503 goto ret;
2504 }
2505 }
2506 #endif
2507 #ifdef HAVE_movstrdi
2508 if (HAVE_movstrdi)
2509 {
2510 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2511 xinner, size, GEN_INT (align));
2512 if (pat != 0)
2513 {
2514 emit_insn (pat);
2515 goto ret;
2516 }
2517 }
2518 #endif
2519
2520 #ifndef ACCUMULATE_OUTGOING_ARGS
2521 /* If the source is referenced relative to the stack pointer,
2522 copy it to another register to stabilize it. We do not need
2523 to do this if we know that we won't be changing sp. */
2524
2525 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2526 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2527 temp = copy_to_reg (temp);
2528 #endif
2529
2530 /* Make inhibit_defer_pop nonzero around the library call
2531 to force it to pop the bcopy-arguments right away. */
2532 NO_DEFER_POP;
2533 #ifdef TARGET_MEM_FUNCTIONS
2534 emit_library_call (memcpy_libfunc, 0,
2535 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2536 convert_to_mode (TYPE_MODE (sizetype),
2537 size, TREE_UNSIGNED (sizetype)),
2538 TYPE_MODE (sizetype));
2539 #else
2540 emit_library_call (bcopy_libfunc, 0,
2541 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2542 convert_to_mode (TYPE_MODE (integer_type_node),
2543 size,
2544 TREE_UNSIGNED (integer_type_node)),
2545 TYPE_MODE (integer_type_node));
2546 #endif
2547 OK_DEFER_POP;
2548 }
2549 }
2550 else if (partial > 0)
2551 {
2552 /* Scalar partly in registers. */
2553
2554 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2555 int i;
2556 int not_stack;
2557 /* # words of start of argument
2558 that we must make space for but need not store. */
2559 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2560 int args_offset = INTVAL (args_so_far);
2561 int skip;
2562
2563 /* Push padding now if padding above and stack grows down,
2564 or if padding below and stack grows up.
2565 But if space already allocated, this has already been done. */
2566 if (extra && args_addr == 0
2567 && where_pad != none && where_pad != stack_direction)
2568 anti_adjust_stack (GEN_INT (extra));
2569
2570 /* If we make space by pushing it, we might as well push
2571 the real data. Otherwise, we can leave OFFSET nonzero
2572 and leave the space uninitialized. */
2573 if (args_addr == 0)
2574 offset = 0;
2575
2576 /* Now NOT_STACK gets the number of words that we don't need to
2577 allocate on the stack. */
2578 not_stack = partial - offset;
2579
2580 /* If the partial register-part of the arg counts in its stack size,
2581 skip the part of stack space corresponding to the registers.
2582 Otherwise, start copying to the beginning of the stack space,
2583 by setting SKIP to 0. */
2584 #ifndef REG_PARM_STACK_SPACE
2585 skip = 0;
2586 #else
2587 skip = not_stack;
2588 #endif
2589
2590 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2591 x = validize_mem (force_const_mem (mode, x));
2592
2593 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2594 SUBREGs of such registers are not allowed. */
2595 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2596 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2597 x = copy_to_reg (x);
2598
2599 /* Loop over all the words allocated on the stack for this arg. */
2600 /* We can do it by words, because any scalar bigger than a word
2601 has a size a multiple of a word. */
2602 #ifndef PUSH_ARGS_REVERSED
2603 for (i = not_stack; i < size; i++)
2604 #else
2605 for (i = size - 1; i >= not_stack; i--)
2606 #endif
2607 if (i >= not_stack + offset)
2608 emit_push_insn (operand_subword_force (x, i, mode),
2609 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2610 0, args_addr,
2611 GEN_INT (args_offset + ((i - not_stack + skip)
2612 * UNITS_PER_WORD)));
2613 }
2614 else
2615 {
2616 rtx addr;
2617
2618 /* Push padding now if padding above and stack grows down,
2619 or if padding below and stack grows up.
2620 But if space already allocated, this has already been done. */
2621 if (extra && args_addr == 0
2622 && where_pad != none && where_pad != stack_direction)
2623 anti_adjust_stack (GEN_INT (extra));
2624
2625 #ifdef PUSH_ROUNDING
2626 if (args_addr == 0)
2627 addr = gen_push_operand ();
2628 else
2629 #endif
2630 if (GET_CODE (args_so_far) == CONST_INT)
2631 addr
2632 = memory_address (mode,
2633 plus_constant (args_addr, INTVAL (args_so_far)));
2634 else
2635 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2636 args_so_far));
2637
2638 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2639 }
2640
2641 ret:
2642 /* If part should go in registers, copy that part
2643 into the appropriate registers. Do this now, at the end,
2644 since mem-to-mem copies above may do function calls. */
2645 if (partial > 0 && reg != 0)
2646 move_block_to_reg (REGNO (reg), x, partial, mode);
2647
2648 if (extra && args_addr == 0 && where_pad == stack_direction)
2649 anti_adjust_stack (GEN_INT (extra));
2650 }
2651 \f
2652 /* Expand an assignment that stores the value of FROM into TO.
2653 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2654 (This may contain a QUEUED rtx;
2655 if the value is constant, this rtx is a constant.)
2656 Otherwise, the returned value is NULL_RTX.
2657
2658 SUGGEST_REG is no longer actually used.
2659 It used to mean, copy the value through a register
2660 and return that register, if that is possible.
2661 We now use WANT_VALUE to decide whether to do this. */
2662
2663 rtx
2664 expand_assignment (to, from, want_value, suggest_reg)
2665 tree to, from;
2666 int want_value;
2667 int suggest_reg;
2668 {
2669 register rtx to_rtx = 0;
2670 rtx result;
2671
2672 /* Don't crash if the lhs of the assignment was erroneous. */
2673
2674 if (TREE_CODE (to) == ERROR_MARK)
2675 {
2676 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2677 return want_value ? result : NULL_RTX;
2678 }
2679
2680 if (output_bytecode)
2681 {
2682 tree dest_innermost;
2683
2684 bc_expand_expr (from);
2685 bc_emit_instruction (duplicate);
2686
2687 dest_innermost = bc_expand_address (to);
2688
2689 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2690 take care of it here. */
2691
2692 bc_store_memory (TREE_TYPE (to), dest_innermost);
2693 return NULL;
2694 }
2695
2696 /* Assignment of a structure component needs special treatment
2697 if the structure component's rtx is not simply a MEM.
2698 Assignment of an array element at a constant index, and assignment of
2699 an array element in an unaligned packed structure field, has the same
2700 problem. */
2701
2702 if (TREE_CODE (to) == COMPONENT_REF
2703 || TREE_CODE (to) == BIT_FIELD_REF
2704 || (TREE_CODE (to) == ARRAY_REF
2705 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2706 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2707 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2708 {
2709 enum machine_mode mode1;
2710 int bitsize;
2711 int bitpos;
2712 tree offset;
2713 int unsignedp;
2714 int volatilep = 0;
2715 tree tem;
2716 int alignment;
2717
2718 push_temp_slots ();
2719 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2720 &mode1, &unsignedp, &volatilep);
2721
2722 /* If we are going to use store_bit_field and extract_bit_field,
2723 make sure to_rtx will be safe for multiple use. */
2724
2725 if (mode1 == VOIDmode && want_value)
2726 tem = stabilize_reference (tem);
2727
2728 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2729 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2730 if (offset != 0)
2731 {
2732 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2733
2734 if (GET_CODE (to_rtx) != MEM)
2735 abort ();
2736 to_rtx = change_address (to_rtx, VOIDmode,
2737 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2738 force_reg (ptr_mode, offset_rtx)));
2739 /* If we have a variable offset, the known alignment
2740 is only that of the innermost structure containing the field.
2741 (Actually, we could sometimes do better by using the
2742 align of an element of the innermost array, but no need.) */
2743 if (TREE_CODE (to) == COMPONENT_REF
2744 || TREE_CODE (to) == BIT_FIELD_REF)
2745 alignment
2746 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2747 }
2748 if (volatilep)
2749 {
2750 if (GET_CODE (to_rtx) == MEM)
2751 {
2752 /* When the offset is zero, to_rtx is the address of the
2753 structure we are storing into, and hence may be shared.
2754 We must make a new MEM before setting the volatile bit. */
2755 if (offset == 0)
2756 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2757 MEM_VOLATILE_P (to_rtx) = 1;
2758 }
2759 #if 0 /* This was turned off because, when a field is volatile
2760 in an object which is not volatile, the object may be in a register,
2761 and then we would abort over here. */
2762 else
2763 abort ();
2764 #endif
2765 }
2766
2767 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2768 (want_value
2769 /* Spurious cast makes HPUX compiler happy. */
2770 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2771 : VOIDmode),
2772 unsignedp,
2773 /* Required alignment of containing datum. */
2774 alignment,
2775 int_size_in_bytes (TREE_TYPE (tem)));
2776 preserve_temp_slots (result);
2777 free_temp_slots ();
2778 pop_temp_slots ();
2779
2780 /* If the value is meaningful, convert RESULT to the proper mode.
2781 Otherwise, return nothing. */
2782 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2783 TYPE_MODE (TREE_TYPE (from)),
2784 result,
2785 TREE_UNSIGNED (TREE_TYPE (to)))
2786 : NULL_RTX);
2787 }
2788
2789 /* If the rhs is a function call and its value is not an aggregate,
2790 call the function before we start to compute the lhs.
2791 This is needed for correct code for cases such as
2792 val = setjmp (buf) on machines where reference to val
2793 requires loading up part of an address in a separate insn.
2794
2795 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2796 a promoted variable where the zero- or sign- extension needs to be done.
2797 Handling this in the normal way is safe because no computation is done
2798 before the call. */
2799 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2800 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2801 {
2802 rtx value;
2803
2804 push_temp_slots ();
2805 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2806 if (to_rtx == 0)
2807 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2808
2809 if (GET_MODE (to_rtx) == BLKmode)
2810 emit_block_move (to_rtx, value, expr_size (from),
2811 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2812 else
2813 emit_move_insn (to_rtx, value);
2814 preserve_temp_slots (to_rtx);
2815 free_temp_slots ();
2816 pop_temp_slots ();
2817 return want_value ? to_rtx : NULL_RTX;
2818 }
2819
2820 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2821 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2822
2823 if (to_rtx == 0)
2824 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2825
2826 /* Don't move directly into a return register. */
2827 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2828 {
2829 rtx temp;
2830
2831 push_temp_slots ();
2832 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2833 emit_move_insn (to_rtx, temp);
2834 preserve_temp_slots (to_rtx);
2835 free_temp_slots ();
2836 pop_temp_slots ();
2837 return want_value ? to_rtx : NULL_RTX;
2838 }
2839
2840 /* In case we are returning the contents of an object which overlaps
2841 the place the value is being stored, use a safe function when copying
2842 a value through a pointer into a structure value return block. */
2843 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2844 && current_function_returns_struct
2845 && !current_function_returns_pcc_struct)
2846 {
2847 rtx from_rtx, size;
2848
2849 push_temp_slots ();
2850 size = expr_size (from);
2851 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2852
2853 #ifdef TARGET_MEM_FUNCTIONS
2854 emit_library_call (memcpy_libfunc, 0,
2855 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2856 XEXP (from_rtx, 0), Pmode,
2857 convert_to_mode (TYPE_MODE (sizetype),
2858 size, TREE_UNSIGNED (sizetype)),
2859 TYPE_MODE (sizetype));
2860 #else
2861 emit_library_call (bcopy_libfunc, 0,
2862 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2863 XEXP (to_rtx, 0), Pmode,
2864 convert_to_mode (TYPE_MODE (integer_type_node),
2865 size, TREE_UNSIGNED (integer_type_node)),
2866 TYPE_MODE (integer_type_node));
2867 #endif
2868
2869 preserve_temp_slots (to_rtx);
2870 free_temp_slots ();
2871 pop_temp_slots ();
2872 return want_value ? to_rtx : NULL_RTX;
2873 }
2874
2875 /* Compute FROM and store the value in the rtx we got. */
2876
2877 push_temp_slots ();
2878 result = store_expr (from, to_rtx, want_value);
2879 preserve_temp_slots (result);
2880 free_temp_slots ();
2881 pop_temp_slots ();
2882 return want_value ? result : NULL_RTX;
2883 }
2884
2885 /* Generate code for computing expression EXP,
2886 and storing the value into TARGET.
2887 TARGET may contain a QUEUED rtx.
2888
2889 If WANT_VALUE is nonzero, return a copy of the value
2890 not in TARGET, so that we can be sure to use the proper
2891 value in a containing expression even if TARGET has something
2892 else stored in it. If possible, we copy the value through a pseudo
2893 and return that pseudo. Or, if the value is constant, we try to
2894 return the constant. In some cases, we return a pseudo
2895 copied *from* TARGET.
2896
2897 If the mode is BLKmode then we may return TARGET itself.
2898 It turns out that in BLKmode it doesn't cause a problem.
2899 because C has no operators that could combine two different
2900 assignments into the same BLKmode object with different values
2901 with no sequence point. Will other languages need this to
2902 be more thorough?
2903
2904 If WANT_VALUE is 0, we return NULL, to make sure
2905 to catch quickly any cases where the caller uses the value
2906 and fails to set WANT_VALUE. */
2907
2908 rtx
2909 store_expr (exp, target, want_value)
2910 register tree exp;
2911 register rtx target;
2912 int want_value;
2913 {
2914 register rtx temp;
2915 int dont_return_target = 0;
2916
2917 if (TREE_CODE (exp) == COMPOUND_EXPR)
2918 {
2919 /* Perform first part of compound expression, then assign from second
2920 part. */
2921 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2922 emit_queue ();
2923 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2924 }
2925 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2926 {
2927 /* For conditional expression, get safe form of the target. Then
2928 test the condition, doing the appropriate assignment on either
2929 side. This avoids the creation of unnecessary temporaries.
2930 For non-BLKmode, it is more efficient not to do this. */
2931
2932 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2933
2934 emit_queue ();
2935 target = protect_from_queue (target, 1);
2936
2937 do_pending_stack_adjust ();
2938 NO_DEFER_POP;
2939 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2940 store_expr (TREE_OPERAND (exp, 1), target, 0);
2941 emit_queue ();
2942 emit_jump_insn (gen_jump (lab2));
2943 emit_barrier ();
2944 emit_label (lab1);
2945 store_expr (TREE_OPERAND (exp, 2), target, 0);
2946 emit_queue ();
2947 emit_label (lab2);
2948 OK_DEFER_POP;
2949 return want_value ? target : NULL_RTX;
2950 }
2951 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2952 && GET_MODE (target) != BLKmode)
2953 /* If target is in memory and caller wants value in a register instead,
2954 arrange that. Pass TARGET as target for expand_expr so that,
2955 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2956 We know expand_expr will not use the target in that case.
2957 Don't do this if TARGET is volatile because we are supposed
2958 to write it and then read it. */
2959 {
2960 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2961 GET_MODE (target), 0);
2962 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2963 temp = copy_to_reg (temp);
2964 dont_return_target = 1;
2965 }
2966 else if (queued_subexp_p (target))
2967 /* If target contains a postincrement, let's not risk
2968 using it as the place to generate the rhs. */
2969 {
2970 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2971 {
2972 /* Expand EXP into a new pseudo. */
2973 temp = gen_reg_rtx (GET_MODE (target));
2974 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2975 }
2976 else
2977 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2978
2979 /* If target is volatile, ANSI requires accessing the value
2980 *from* the target, if it is accessed. So make that happen.
2981 In no case return the target itself. */
2982 if (! MEM_VOLATILE_P (target) && want_value)
2983 dont_return_target = 1;
2984 }
2985 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2986 /* If this is an scalar in a register that is stored in a wider mode
2987 than the declared mode, compute the result into its declared mode
2988 and then convert to the wider mode. Our value is the computed
2989 expression. */
2990 {
2991 /* If we don't want a value, we can do the conversion inside EXP,
2992 which will often result in some optimizations. Do the conversion
2993 in two steps: first change the signedness, if needed, then
2994 the extend. */
2995 if (! want_value)
2996 {
2997 if (TREE_UNSIGNED (TREE_TYPE (exp))
2998 != SUBREG_PROMOTED_UNSIGNED_P (target))
2999 exp
3000 = convert
3001 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3002 TREE_TYPE (exp)),
3003 exp);
3004
3005 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3006 SUBREG_PROMOTED_UNSIGNED_P (target)),
3007 exp);
3008 }
3009
3010 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3011
3012 /* If TEMP is a volatile MEM and we want a result value, make
3013 the access now so it gets done only once. Likewise if
3014 it contains TARGET. */
3015 if (GET_CODE (temp) == MEM && want_value
3016 && (MEM_VOLATILE_P (temp)
3017 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3018 temp = copy_to_reg (temp);
3019
3020 /* If TEMP is a VOIDmode constant, use convert_modes to make
3021 sure that we properly convert it. */
3022 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3023 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3024 TYPE_MODE (TREE_TYPE (exp)), temp,
3025 SUBREG_PROMOTED_UNSIGNED_P (target));
3026
3027 convert_move (SUBREG_REG (target), temp,
3028 SUBREG_PROMOTED_UNSIGNED_P (target));
3029 return want_value ? temp : NULL_RTX;
3030 }
3031 else
3032 {
3033 temp = expand_expr (exp, target, GET_MODE (target), 0);
3034 /* Return TARGET if it's a specified hardware register.
3035 If TARGET is a volatile mem ref, either return TARGET
3036 or return a reg copied *from* TARGET; ANSI requires this.
3037
3038 Otherwise, if TEMP is not TARGET, return TEMP
3039 if it is constant (for efficiency),
3040 or if we really want the correct value. */
3041 if (!(target && GET_CODE (target) == REG
3042 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3043 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3044 && temp != target
3045 && (CONSTANT_P (temp) || want_value))
3046 dont_return_target = 1;
3047 }
3048
3049 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3050 the same as that of TARGET, adjust the constant. This is needed, for
3051 example, in case it is a CONST_DOUBLE and we want only a word-sized
3052 value. */
3053 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3054 && TREE_CODE (exp) != ERROR_MARK
3055 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3056 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3057 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3058
3059 /* If value was not generated in the target, store it there.
3060 Convert the value to TARGET's type first if nec. */
3061
3062 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3063 {
3064 target = protect_from_queue (target, 1);
3065 if (GET_MODE (temp) != GET_MODE (target)
3066 && GET_MODE (temp) != VOIDmode)
3067 {
3068 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3069 if (dont_return_target)
3070 {
3071 /* In this case, we will return TEMP,
3072 so make sure it has the proper mode.
3073 But don't forget to store the value into TARGET. */
3074 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3075 emit_move_insn (target, temp);
3076 }
3077 else
3078 convert_move (target, temp, unsignedp);
3079 }
3080
3081 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3082 {
3083 /* Handle copying a string constant into an array.
3084 The string constant may be shorter than the array.
3085 So copy just the string's actual length, and clear the rest. */
3086 rtx size;
3087 rtx addr;
3088
3089 /* Get the size of the data type of the string,
3090 which is actually the size of the target. */
3091 size = expr_size (exp);
3092 if (GET_CODE (size) == CONST_INT
3093 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3094 emit_block_move (target, temp, size,
3095 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3096 else
3097 {
3098 /* Compute the size of the data to copy from the string. */
3099 tree copy_size
3100 = size_binop (MIN_EXPR,
3101 make_tree (sizetype, size),
3102 convert (sizetype,
3103 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3104 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3105 VOIDmode, 0);
3106 rtx label = 0;
3107
3108 /* Copy that much. */
3109 emit_block_move (target, temp, copy_size_rtx,
3110 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3111
3112 /* Figure out how much is left in TARGET that we have to clear.
3113 Do all calculations in ptr_mode. */
3114
3115 addr = XEXP (target, 0);
3116 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3117
3118 if (GET_CODE (copy_size_rtx) == CONST_INT)
3119 {
3120 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3121 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3122 }
3123 else
3124 {
3125 addr = force_reg (ptr_mode, addr);
3126 addr = expand_binop (ptr_mode, add_optab, addr,
3127 copy_size_rtx, NULL_RTX, 0,
3128 OPTAB_LIB_WIDEN);
3129
3130 size = expand_binop (ptr_mode, sub_optab, size,
3131 copy_size_rtx, NULL_RTX, 0,
3132 OPTAB_LIB_WIDEN);
3133
3134 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3135 GET_MODE (size), 0, 0);
3136 label = gen_label_rtx ();
3137 emit_jump_insn (gen_blt (label));
3138 }
3139
3140 if (size != const0_rtx)
3141 {
3142 #ifdef TARGET_MEM_FUNCTIONS
3143 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3144 addr, Pmode,
3145 const0_rtx, TYPE_MODE (integer_type_node),
3146 convert_to_mode (TYPE_MODE (sizetype),
3147 size,
3148 TREE_UNSIGNED (sizetype)),
3149 TYPE_MODE (sizetype));
3150 #else
3151 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3152 addr, Pmode,
3153 convert_to_mode (TYPE_MODE (integer_type_node),
3154 size,
3155 TREE_UNSIGNED (integer_type_node)),
3156 TYPE_MODE (integer_type_node));
3157 #endif
3158 }
3159
3160 if (label)
3161 emit_label (label);
3162 }
3163 }
3164 else if (GET_MODE (temp) == BLKmode)
3165 emit_block_move (target, temp, expr_size (exp),
3166 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3167 else
3168 emit_move_insn (target, temp);
3169 }
3170
3171 /* If we don't want a value, return NULL_RTX. */
3172 if (! want_value)
3173 return NULL_RTX;
3174
3175 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3176 ??? The latter test doesn't seem to make sense. */
3177 else if (dont_return_target && GET_CODE (temp) != MEM)
3178 return temp;
3179
3180 /* Return TARGET itself if it is a hard register. */
3181 else if (want_value && GET_MODE (target) != BLKmode
3182 && ! (GET_CODE (target) == REG
3183 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3184 return copy_to_reg (target);
3185
3186 else
3187 return target;
3188 }
3189 \f
3190 /* Return 1 if EXP just contains zeros. */
3191
3192 static int
3193 is_zeros_p (exp)
3194 tree exp;
3195 {
3196 tree elt;
3197
3198 switch (TREE_CODE (exp))
3199 {
3200 case CONVERT_EXPR:
3201 case NOP_EXPR:
3202 case NON_LVALUE_EXPR:
3203 return is_zeros_p (TREE_OPERAND (exp, 0));
3204
3205 case INTEGER_CST:
3206 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3207
3208 case COMPLEX_CST:
3209 return
3210 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3211
3212 case REAL_CST:
3213 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3214
3215 case CONSTRUCTOR:
3216 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3217 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3218 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3219 if (! is_zeros_p (TREE_VALUE (elt)))
3220 return 0;
3221
3222 return 1;
3223 }
3224
3225 return 0;
3226 }
3227
3228 /* Return 1 if EXP contains mostly (3/4) zeros. */
3229
3230 static int
3231 mostly_zeros_p (exp)
3232 tree exp;
3233 {
3234 if (TREE_CODE (exp) == CONSTRUCTOR)
3235 {
3236 int elts = 0, zeros = 0;
3237 tree elt = CONSTRUCTOR_ELTS (exp);
3238 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3239 {
3240 /* If there are no ranges of true bits, it is all zero. */
3241 return elt == NULL_TREE;
3242 }
3243 for (; elt; elt = TREE_CHAIN (elt))
3244 {
3245 /* We do not handle the case where the index is a RANGE_EXPR,
3246 so the statistic will be somewhat inaccurate.
3247 We do make a more accurate count in store_constructor itself,
3248 so since this function is only used for nested array elements,
3249 this should be close enough. */
3250 if (mostly_zeros_p (TREE_VALUE (elt)))
3251 zeros++;
3252 elts++;
3253 }
3254
3255 return 4 * zeros >= 3 * elts;
3256 }
3257
3258 return is_zeros_p (exp);
3259 }
3260 \f
3261 /* Helper function for store_constructor.
3262 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3263 TYPE is the type of the CONSTRUCTOR, not the element type.
3264 CLEARED is as for store_constructor. */
3265
3266 static void
3267 store_constructor_field (target, bitsize, bitpos,
3268 mode, exp, type, cleared)
3269 rtx target;
3270 int bitsize, bitpos;
3271 enum machine_mode mode;
3272 tree exp, type;
3273 int cleared;
3274 {
3275 if (TREE_CODE (exp) == CONSTRUCTOR
3276 && (bitpos % BITS_PER_UNIT) == 0)
3277 {
3278 if (bitpos != 0)
3279 target = change_address (target, VOIDmode,
3280 plus_constant (XEXP (target, 0),
3281 bitpos / BITS_PER_UNIT));
3282 store_constructor (exp, target, cleared);
3283 }
3284 else
3285 store_field (target, bitsize, bitpos, mode, exp,
3286 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3287 int_size_in_bytes (type));
3288 }
3289
3290 /* Store the value of constructor EXP into the rtx TARGET.
3291 TARGET is either a REG or a MEM.
3292 CLEARED is true if TARGET is known to have been zero'd. */
3293
3294 static void
3295 store_constructor (exp, target, cleared)
3296 tree exp;
3297 rtx target;
3298 int cleared;
3299 {
3300 tree type = TREE_TYPE (exp);
3301
3302 /* We know our target cannot conflict, since safe_from_p has been called. */
3303 #if 0
3304 /* Don't try copying piece by piece into a hard register
3305 since that is vulnerable to being clobbered by EXP.
3306 Instead, construct in a pseudo register and then copy it all. */
3307 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3308 {
3309 rtx temp = gen_reg_rtx (GET_MODE (target));
3310 store_constructor (exp, temp, 0);
3311 emit_move_insn (target, temp);
3312 return;
3313 }
3314 #endif
3315
3316 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3317 || TREE_CODE (type) == QUAL_UNION_TYPE)
3318 {
3319 register tree elt;
3320
3321 /* Inform later passes that the whole union value is dead. */
3322 if (TREE_CODE (type) == UNION_TYPE
3323 || TREE_CODE (type) == QUAL_UNION_TYPE)
3324 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3325
3326 /* If we are building a static constructor into a register,
3327 set the initial value as zero so we can fold the value into
3328 a constant. But if more than one register is involved,
3329 this probably loses. */
3330 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3331 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3332 {
3333 if (! cleared)
3334 emit_move_insn (target, const0_rtx);
3335
3336 cleared = 1;
3337 }
3338
3339 /* If the constructor has fewer fields than the structure
3340 or if we are initializing the structure to mostly zeros,
3341 clear the whole structure first. */
3342 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3343 != list_length (TYPE_FIELDS (type)))
3344 || mostly_zeros_p (exp))
3345 {
3346 if (! cleared)
3347 clear_storage (target, expr_size (exp),
3348 TYPE_ALIGN (type) / BITS_PER_UNIT);
3349
3350 cleared = 1;
3351 }
3352 else
3353 /* Inform later passes that the old value is dead. */
3354 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3355
3356 /* Store each element of the constructor into
3357 the corresponding field of TARGET. */
3358
3359 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3360 {
3361 register tree field = TREE_PURPOSE (elt);
3362 register enum machine_mode mode;
3363 int bitsize;
3364 int bitpos = 0;
3365 int unsignedp;
3366 tree pos, constant = 0, offset = 0;
3367 rtx to_rtx = target;
3368
3369 /* Just ignore missing fields.
3370 We cleared the whole structure, above,
3371 if any fields are missing. */
3372 if (field == 0)
3373 continue;
3374
3375 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3376 continue;
3377
3378 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3379 unsignedp = TREE_UNSIGNED (field);
3380 mode = DECL_MODE (field);
3381 if (DECL_BIT_FIELD (field))
3382 mode = VOIDmode;
3383
3384 pos = DECL_FIELD_BITPOS (field);
3385 if (TREE_CODE (pos) == INTEGER_CST)
3386 constant = pos;
3387 else if (TREE_CODE (pos) == PLUS_EXPR
3388 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3389 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3390 else
3391 offset = pos;
3392
3393 if (constant)
3394 bitpos = TREE_INT_CST_LOW (constant);
3395
3396 if (offset)
3397 {
3398 rtx offset_rtx;
3399
3400 if (contains_placeholder_p (offset))
3401 offset = build (WITH_RECORD_EXPR, sizetype,
3402 offset, exp);
3403
3404 offset = size_binop (FLOOR_DIV_EXPR, offset,
3405 size_int (BITS_PER_UNIT));
3406
3407 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3408 if (GET_CODE (to_rtx) != MEM)
3409 abort ();
3410
3411 to_rtx
3412 = change_address (to_rtx, VOIDmode,
3413 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3414 force_reg (ptr_mode, offset_rtx)));
3415 }
3416 if (TREE_READONLY (field))
3417 {
3418 if (GET_CODE (to_rtx) == MEM)
3419 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3420 XEXP (to_rtx, 0));
3421 RTX_UNCHANGING_P (to_rtx) = 1;
3422 }
3423
3424 store_constructor_field (to_rtx, bitsize, bitpos,
3425 mode, TREE_VALUE (elt), type, cleared);
3426 }
3427 }
3428 else if (TREE_CODE (type) == ARRAY_TYPE)
3429 {
3430 register tree elt;
3431 register int i;
3432 int need_to_clear;
3433 tree domain = TYPE_DOMAIN (type);
3434 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3435 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3436 tree elttype = TREE_TYPE (type);
3437
3438 /* If the constructor has fewer elements than the array,
3439 clear the whole array first. Similarly if this this is
3440 static constructor of a non-BLKmode object. */
3441 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3442 need_to_clear = 1;
3443 else
3444 {
3445 HOST_WIDE_INT count = 0, zero_count = 0;
3446 need_to_clear = 0;
3447 /* This loop is a more accurate version of the loop in
3448 mostly_zeros_p (it handles RANGE_EXPR in an index).
3449 It is also needed to check for missing elements. */
3450 for (elt = CONSTRUCTOR_ELTS (exp);
3451 elt != NULL_TREE;
3452 elt = TREE_CHAIN (elt), i++)
3453 {
3454 tree index = TREE_PURPOSE (elt);
3455 HOST_WIDE_INT this_node_count;
3456 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3457 {
3458 tree lo_index = TREE_OPERAND (index, 0);
3459 tree hi_index = TREE_OPERAND (index, 1);
3460 if (TREE_CODE (lo_index) != INTEGER_CST
3461 || TREE_CODE (hi_index) != INTEGER_CST)
3462 {
3463 need_to_clear = 1;
3464 break;
3465 }
3466 this_node_count = TREE_INT_CST_LOW (hi_index)
3467 - TREE_INT_CST_LOW (lo_index) + 1;
3468 }
3469 else
3470 this_node_count = 1;
3471 count += this_node_count;
3472 if (mostly_zeros_p (TREE_VALUE (elt)))
3473 zero_count += this_node_count;
3474 }
3475 if (4 * zero_count >= 3 * count)
3476 need_to_clear = 1;
3477 }
3478 if (need_to_clear)
3479 {
3480 if (! cleared)
3481 clear_storage (target, expr_size (exp),
3482 TYPE_ALIGN (type) / BITS_PER_UNIT);
3483 cleared = 1;
3484 }
3485 else
3486 /* Inform later passes that the old value is dead. */
3487 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3488
3489 /* Store each element of the constructor into
3490 the corresponding element of TARGET, determined
3491 by counting the elements. */
3492 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3493 elt;
3494 elt = TREE_CHAIN (elt), i++)
3495 {
3496 register enum machine_mode mode;
3497 int bitsize;
3498 int bitpos;
3499 int unsignedp;
3500 tree value = TREE_VALUE (elt);
3501 tree index = TREE_PURPOSE (elt);
3502 rtx xtarget = target;
3503
3504 if (cleared && is_zeros_p (value))
3505 continue;
3506
3507 mode = TYPE_MODE (elttype);
3508 bitsize = GET_MODE_BITSIZE (mode);
3509 unsignedp = TREE_UNSIGNED (elttype);
3510
3511 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3512 {
3513 tree lo_index = TREE_OPERAND (index, 0);
3514 tree hi_index = TREE_OPERAND (index, 1);
3515 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3516 struct nesting *loop;
3517 HOST_WIDE_INT lo, hi, count;
3518 tree position;
3519
3520 /* If the range is constant and "small", unroll the loop. */
3521 if (TREE_CODE (lo_index) == INTEGER_CST
3522 && TREE_CODE (hi_index) == INTEGER_CST
3523 && (lo = TREE_INT_CST_LOW (lo_index),
3524 hi = TREE_INT_CST_LOW (hi_index),
3525 count = hi - lo + 1,
3526 (GET_CODE (target) != MEM
3527 || count <= 2
3528 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3529 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3530 <= 40 * 8))))
3531 {
3532 lo -= minelt; hi -= minelt;
3533 for (; lo <= hi; lo++)
3534 {
3535 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3536 store_constructor_field (target, bitsize, bitpos,
3537 mode, value, type, cleared);
3538 }
3539 }
3540 else
3541 {
3542 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3543 loop_top = gen_label_rtx ();
3544 loop_end = gen_label_rtx ();
3545
3546 unsignedp = TREE_UNSIGNED (domain);
3547
3548 index = build_decl (VAR_DECL, NULL_TREE, domain);
3549
3550 DECL_RTL (index) = index_r
3551 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3552 &unsignedp, 0));
3553
3554 if (TREE_CODE (value) == SAVE_EXPR
3555 && SAVE_EXPR_RTL (value) == 0)
3556 {
3557 /* Make sure value gets expanded once before the loop. */
3558 expand_expr (value, const0_rtx, VOIDmode, 0);
3559 emit_queue ();
3560 }
3561 store_expr (lo_index, index_r, 0);
3562 loop = expand_start_loop (0);
3563
3564 /* Assign value to element index. */
3565 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3566 size_int (BITS_PER_UNIT));
3567 position = size_binop (MULT_EXPR,
3568 size_binop (MINUS_EXPR, index,
3569 TYPE_MIN_VALUE (domain)),
3570 position);
3571 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3572 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3573 xtarget = change_address (target, mode, addr);
3574 if (TREE_CODE (value) == CONSTRUCTOR)
3575 store_constructor (value, xtarget, cleared);
3576 else
3577 store_expr (value, xtarget, 0);
3578
3579 expand_exit_loop_if_false (loop,
3580 build (LT_EXPR, integer_type_node,
3581 index, hi_index));
3582
3583 expand_increment (build (PREINCREMENT_EXPR,
3584 TREE_TYPE (index),
3585 index, integer_one_node), 0);
3586 expand_end_loop ();
3587 emit_label (loop_end);
3588
3589 /* Needed by stupid register allocation. to extend the
3590 lifetime of pseudo-regs used by target past the end
3591 of the loop. */
3592 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3593 }
3594 }
3595 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3596 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3597 {
3598 rtx pos_rtx, addr;
3599 tree position;
3600
3601 if (index == 0)
3602 index = size_int (i);
3603
3604 if (minelt)
3605 index = size_binop (MINUS_EXPR, index,
3606 TYPE_MIN_VALUE (domain));
3607 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3608 size_int (BITS_PER_UNIT));
3609 position = size_binop (MULT_EXPR, index, position);
3610 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3611 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3612 xtarget = change_address (target, mode, addr);
3613 store_expr (value, xtarget, 0);
3614 }
3615 else
3616 {
3617 if (index != 0)
3618 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3619 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3620 else
3621 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3622 store_constructor_field (target, bitsize, bitpos,
3623 mode, value, type, cleared);
3624 }
3625 }
3626 }
3627 /* set constructor assignments */
3628 else if (TREE_CODE (type) == SET_TYPE)
3629 {
3630 tree elt = CONSTRUCTOR_ELTS (exp);
3631 rtx xtarget = XEXP (target, 0);
3632 int set_word_size = TYPE_ALIGN (type);
3633 int nbytes = int_size_in_bytes (type), nbits;
3634 tree domain = TYPE_DOMAIN (type);
3635 tree domain_min, domain_max, bitlength;
3636
3637 /* The default implementation strategy is to extract the constant
3638 parts of the constructor, use that to initialize the target,
3639 and then "or" in whatever non-constant ranges we need in addition.
3640
3641 If a large set is all zero or all ones, it is
3642 probably better to set it using memset (if available) or bzero.
3643 Also, if a large set has just a single range, it may also be
3644 better to first clear all the first clear the set (using
3645 bzero/memset), and set the bits we want. */
3646
3647 /* Check for all zeros. */
3648 if (elt == NULL_TREE)
3649 {
3650 if (!cleared)
3651 clear_storage (target, expr_size (exp),
3652 TYPE_ALIGN (type) / BITS_PER_UNIT);
3653 return;
3654 }
3655
3656 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3657 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3658 bitlength = size_binop (PLUS_EXPR,
3659 size_binop (MINUS_EXPR, domain_max, domain_min),
3660 size_one_node);
3661
3662 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3663 abort ();
3664 nbits = TREE_INT_CST_LOW (bitlength);
3665
3666 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3667 are "complicated" (more than one range), initialize (the
3668 constant parts) by copying from a constant. */
3669 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3670 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3671 {
3672 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3673 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3674 char *bit_buffer = (char*) alloca (nbits);
3675 HOST_WIDE_INT word = 0;
3676 int bit_pos = 0;
3677 int ibit = 0;
3678 int offset = 0; /* In bytes from beginning of set. */
3679 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3680 for (;;)
3681 {
3682 if (bit_buffer[ibit])
3683 {
3684 if (BYTES_BIG_ENDIAN)
3685 word |= (1 << (set_word_size - 1 - bit_pos));
3686 else
3687 word |= 1 << bit_pos;
3688 }
3689 bit_pos++; ibit++;
3690 if (bit_pos >= set_word_size || ibit == nbits)
3691 {
3692 if (word != 0 || ! cleared)
3693 {
3694 rtx datum = GEN_INT (word);
3695 rtx to_rtx;
3696 /* The assumption here is that it is safe to use XEXP if
3697 the set is multi-word, but not if it's single-word. */
3698 if (GET_CODE (target) == MEM)
3699 {
3700 to_rtx = plus_constant (XEXP (target, 0), offset);
3701 to_rtx = change_address (target, mode, to_rtx);
3702 }
3703 else if (offset == 0)
3704 to_rtx = target;
3705 else
3706 abort ();
3707 emit_move_insn (to_rtx, datum);
3708 }
3709 if (ibit == nbits)
3710 break;
3711 word = 0;
3712 bit_pos = 0;
3713 offset += set_word_size / BITS_PER_UNIT;
3714 }
3715 }
3716 }
3717 else if (!cleared)
3718 {
3719 /* Don't bother clearing storage if the set is all ones. */
3720 if (TREE_CHAIN (elt) != NULL_TREE
3721 || (TREE_PURPOSE (elt) == NULL_TREE
3722 ? nbits != 1
3723 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3724 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3725 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3726 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3727 != nbits))))
3728 clear_storage (target, expr_size (exp),
3729 TYPE_ALIGN (type) / BITS_PER_UNIT);
3730 }
3731
3732 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3733 {
3734 /* start of range of element or NULL */
3735 tree startbit = TREE_PURPOSE (elt);
3736 /* end of range of element, or element value */
3737 tree endbit = TREE_VALUE (elt);
3738 HOST_WIDE_INT startb, endb;
3739 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3740
3741 bitlength_rtx = expand_expr (bitlength,
3742 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3743
3744 /* handle non-range tuple element like [ expr ] */
3745 if (startbit == NULL_TREE)
3746 {
3747 startbit = save_expr (endbit);
3748 endbit = startbit;
3749 }
3750 startbit = convert (sizetype, startbit);
3751 endbit = convert (sizetype, endbit);
3752 if (! integer_zerop (domain_min))
3753 {
3754 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3755 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3756 }
3757 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3758 EXPAND_CONST_ADDRESS);
3759 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3760 EXPAND_CONST_ADDRESS);
3761
3762 if (REG_P (target))
3763 {
3764 targetx = assign_stack_temp (GET_MODE (target),
3765 GET_MODE_SIZE (GET_MODE (target)),
3766 0);
3767 emit_move_insn (targetx, target);
3768 }
3769 else if (GET_CODE (target) == MEM)
3770 targetx = target;
3771 else
3772 abort ();
3773
3774 #ifdef TARGET_MEM_FUNCTIONS
3775 /* Optimization: If startbit and endbit are
3776 constants divisible by BITS_PER_UNIT,
3777 call memset instead. */
3778 if (TREE_CODE (startbit) == INTEGER_CST
3779 && TREE_CODE (endbit) == INTEGER_CST
3780 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3781 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3782 {
3783 emit_library_call (memset_libfunc, 0,
3784 VOIDmode, 3,
3785 plus_constant (XEXP (targetx, 0),
3786 startb / BITS_PER_UNIT),
3787 Pmode,
3788 constm1_rtx, TYPE_MODE (integer_type_node),
3789 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3790 TYPE_MODE (sizetype));
3791 }
3792 else
3793 #endif
3794 {
3795 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3796 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3797 bitlength_rtx, TYPE_MODE (sizetype),
3798 startbit_rtx, TYPE_MODE (sizetype),
3799 endbit_rtx, TYPE_MODE (sizetype));
3800 }
3801 if (REG_P (target))
3802 emit_move_insn (target, targetx);
3803 }
3804 }
3805
3806 else
3807 abort ();
3808 }
3809
3810 /* Store the value of EXP (an expression tree)
3811 into a subfield of TARGET which has mode MODE and occupies
3812 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3813 If MODE is VOIDmode, it means that we are storing into a bit-field.
3814
3815 If VALUE_MODE is VOIDmode, return nothing in particular.
3816 UNSIGNEDP is not used in this case.
3817
3818 Otherwise, return an rtx for the value stored. This rtx
3819 has mode VALUE_MODE if that is convenient to do.
3820 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3821
3822 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3823 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3824
3825 static rtx
3826 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3827 unsignedp, align, total_size)
3828 rtx target;
3829 int bitsize, bitpos;
3830 enum machine_mode mode;
3831 tree exp;
3832 enum machine_mode value_mode;
3833 int unsignedp;
3834 int align;
3835 int total_size;
3836 {
3837 HOST_WIDE_INT width_mask = 0;
3838
3839 if (bitsize < HOST_BITS_PER_WIDE_INT)
3840 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3841
3842 /* If we are storing into an unaligned field of an aligned union that is
3843 in a register, we may have the mode of TARGET being an integer mode but
3844 MODE == BLKmode. In that case, get an aligned object whose size and
3845 alignment are the same as TARGET and store TARGET into it (we can avoid
3846 the store if the field being stored is the entire width of TARGET). Then
3847 call ourselves recursively to store the field into a BLKmode version of
3848 that object. Finally, load from the object into TARGET. This is not
3849 very efficient in general, but should only be slightly more expensive
3850 than the otherwise-required unaligned accesses. Perhaps this can be
3851 cleaned up later. */
3852
3853 if (mode == BLKmode
3854 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3855 {
3856 rtx object = assign_stack_temp (GET_MODE (target),
3857 GET_MODE_SIZE (GET_MODE (target)), 0);
3858 rtx blk_object = copy_rtx (object);
3859
3860 MEM_IN_STRUCT_P (object) = 1;
3861 MEM_IN_STRUCT_P (blk_object) = 1;
3862 PUT_MODE (blk_object, BLKmode);
3863
3864 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3865 emit_move_insn (object, target);
3866
3867 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3868 align, total_size);
3869
3870 /* Even though we aren't returning target, we need to
3871 give it the updated value. */
3872 emit_move_insn (target, object);
3873
3874 return blk_object;
3875 }
3876
3877 /* If the structure is in a register or if the component
3878 is a bit field, we cannot use addressing to access it.
3879 Use bit-field techniques or SUBREG to store in it. */
3880
3881 if (mode == VOIDmode
3882 || (mode != BLKmode && ! direct_store[(int) mode])
3883 || GET_CODE (target) == REG
3884 || GET_CODE (target) == SUBREG
3885 /* If the field isn't aligned enough to store as an ordinary memref,
3886 store it as a bit field. */
3887 || (SLOW_UNALIGNED_ACCESS
3888 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3889 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3890 {
3891 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3892
3893 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3894 MODE. */
3895 if (mode != VOIDmode && mode != BLKmode
3896 && mode != TYPE_MODE (TREE_TYPE (exp)))
3897 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3898
3899 /* Store the value in the bitfield. */
3900 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3901 if (value_mode != VOIDmode)
3902 {
3903 /* The caller wants an rtx for the value. */
3904 /* If possible, avoid refetching from the bitfield itself. */
3905 if (width_mask != 0
3906 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3907 {
3908 tree count;
3909 enum machine_mode tmode;
3910
3911 if (unsignedp)
3912 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3913 tmode = GET_MODE (temp);
3914 if (tmode == VOIDmode)
3915 tmode = value_mode;
3916 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3917 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3918 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3919 }
3920 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3921 NULL_RTX, value_mode, 0, align,
3922 total_size);
3923 }
3924 return const0_rtx;
3925 }
3926 else
3927 {
3928 rtx addr = XEXP (target, 0);
3929 rtx to_rtx;
3930
3931 /* If a value is wanted, it must be the lhs;
3932 so make the address stable for multiple use. */
3933
3934 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3935 && ! CONSTANT_ADDRESS_P (addr)
3936 /* A frame-pointer reference is already stable. */
3937 && ! (GET_CODE (addr) == PLUS
3938 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3939 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3940 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3941 addr = copy_to_reg (addr);
3942
3943 /* Now build a reference to just the desired component. */
3944
3945 to_rtx = change_address (target, mode,
3946 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3947 MEM_IN_STRUCT_P (to_rtx) = 1;
3948
3949 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3950 }
3951 }
3952 \f
3953 /* Return true if any object containing the innermost array is an unaligned
3954 packed structure field. */
3955
3956 static int
3957 get_inner_unaligned_p (exp)
3958 tree exp;
3959 {
3960 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3961
3962 while (1)
3963 {
3964 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3965 {
3966 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3967 < needed_alignment)
3968 return 1;
3969 }
3970 else if (TREE_CODE (exp) != ARRAY_REF
3971 && TREE_CODE (exp) != NON_LVALUE_EXPR
3972 && ! ((TREE_CODE (exp) == NOP_EXPR
3973 || TREE_CODE (exp) == CONVERT_EXPR)
3974 && (TYPE_MODE (TREE_TYPE (exp))
3975 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3976 break;
3977
3978 exp = TREE_OPERAND (exp, 0);
3979 }
3980
3981 return 0;
3982 }
3983
3984 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3985 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3986 ARRAY_REFs and find the ultimate containing object, which we return.
3987
3988 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3989 bit position, and *PUNSIGNEDP to the signedness of the field.
3990 If the position of the field is variable, we store a tree
3991 giving the variable offset (in units) in *POFFSET.
3992 This offset is in addition to the bit position.
3993 If the position is not variable, we store 0 in *POFFSET.
3994
3995 If any of the extraction expressions is volatile,
3996 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3997
3998 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3999 is a mode that can be used to access the field. In that case, *PBITSIZE
4000 is redundant.
4001
4002 If the field describes a variable-sized object, *PMODE is set to
4003 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4004 this case, but the address of the object can be found. */
4005
4006 tree
4007 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4008 punsignedp, pvolatilep)
4009 tree exp;
4010 int *pbitsize;
4011 int *pbitpos;
4012 tree *poffset;
4013 enum machine_mode *pmode;
4014 int *punsignedp;
4015 int *pvolatilep;
4016 {
4017 tree orig_exp = exp;
4018 tree size_tree = 0;
4019 enum machine_mode mode = VOIDmode;
4020 tree offset = integer_zero_node;
4021
4022 if (TREE_CODE (exp) == COMPONENT_REF)
4023 {
4024 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4025 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4026 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4027 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4028 }
4029 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4030 {
4031 size_tree = TREE_OPERAND (exp, 1);
4032 *punsignedp = TREE_UNSIGNED (exp);
4033 }
4034 else
4035 {
4036 mode = TYPE_MODE (TREE_TYPE (exp));
4037 *pbitsize = GET_MODE_BITSIZE (mode);
4038 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4039 }
4040
4041 if (size_tree)
4042 {
4043 if (TREE_CODE (size_tree) != INTEGER_CST)
4044 mode = BLKmode, *pbitsize = -1;
4045 else
4046 *pbitsize = TREE_INT_CST_LOW (size_tree);
4047 }
4048
4049 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4050 and find the ultimate containing object. */
4051
4052 *pbitpos = 0;
4053
4054 while (1)
4055 {
4056 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4057 {
4058 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4059 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4060 : TREE_OPERAND (exp, 2));
4061 tree constant = integer_zero_node, var = pos;
4062
4063 /* If this field hasn't been filled in yet, don't go
4064 past it. This should only happen when folding expressions
4065 made during type construction. */
4066 if (pos == 0)
4067 break;
4068
4069 /* Assume here that the offset is a multiple of a unit.
4070 If not, there should be an explicitly added constant. */
4071 if (TREE_CODE (pos) == PLUS_EXPR
4072 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4073 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4074 else if (TREE_CODE (pos) == INTEGER_CST)
4075 constant = pos, var = integer_zero_node;
4076
4077 *pbitpos += TREE_INT_CST_LOW (constant);
4078
4079 if (var)
4080 offset = size_binop (PLUS_EXPR, offset,
4081 size_binop (EXACT_DIV_EXPR, var,
4082 size_int (BITS_PER_UNIT)));
4083 }
4084
4085 else if (TREE_CODE (exp) == ARRAY_REF)
4086 {
4087 /* This code is based on the code in case ARRAY_REF in expand_expr
4088 below. We assume here that the size of an array element is
4089 always an integral multiple of BITS_PER_UNIT. */
4090
4091 tree index = TREE_OPERAND (exp, 1);
4092 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4093 tree low_bound
4094 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4095 tree index_type = TREE_TYPE (index);
4096
4097 if (! integer_zerop (low_bound))
4098 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4099
4100 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4101 {
4102 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4103 index);
4104 index_type = TREE_TYPE (index);
4105 }
4106
4107 index = fold (build (MULT_EXPR, index_type, index,
4108 TYPE_SIZE (TREE_TYPE (exp))));
4109
4110 if (TREE_CODE (index) == INTEGER_CST
4111 && TREE_INT_CST_HIGH (index) == 0)
4112 *pbitpos += TREE_INT_CST_LOW (index);
4113 else
4114 offset = size_binop (PLUS_EXPR, offset,
4115 size_binop (FLOOR_DIV_EXPR, index,
4116 size_int (BITS_PER_UNIT)));
4117 }
4118 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4119 && ! ((TREE_CODE (exp) == NOP_EXPR
4120 || TREE_CODE (exp) == CONVERT_EXPR)
4121 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4122 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4123 != UNION_TYPE))
4124 && (TYPE_MODE (TREE_TYPE (exp))
4125 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4126 break;
4127
4128 /* If any reference in the chain is volatile, the effect is volatile. */
4129 if (TREE_THIS_VOLATILE (exp))
4130 *pvolatilep = 1;
4131 exp = TREE_OPERAND (exp, 0);
4132 }
4133
4134 /* If this was a bit-field, see if there is a mode that allows direct
4135 access in case EXP is in memory. */
4136 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
4137 {
4138 mode = mode_for_size (*pbitsize, MODE_INT, 0);
4139 if (mode == BLKmode)
4140 mode = VOIDmode;
4141 }
4142
4143 if (integer_zerop (offset))
4144 offset = 0;
4145
4146 if (offset != 0 && contains_placeholder_p (offset))
4147 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4148
4149 *pmode = mode;
4150 *poffset = offset;
4151 return exp;
4152 }
4153 \f
4154 /* Given an rtx VALUE that may contain additions and multiplications,
4155 return an equivalent value that just refers to a register or memory.
4156 This is done by generating instructions to perform the arithmetic
4157 and returning a pseudo-register containing the value.
4158
4159 The returned value may be a REG, SUBREG, MEM or constant. */
4160
4161 rtx
4162 force_operand (value, target)
4163 rtx value, target;
4164 {
4165 register optab binoptab = 0;
4166 /* Use a temporary to force order of execution of calls to
4167 `force_operand'. */
4168 rtx tmp;
4169 register rtx op2;
4170 /* Use subtarget as the target for operand 0 of a binary operation. */
4171 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4172
4173 if (GET_CODE (value) == PLUS)
4174 binoptab = add_optab;
4175 else if (GET_CODE (value) == MINUS)
4176 binoptab = sub_optab;
4177 else if (GET_CODE (value) == MULT)
4178 {
4179 op2 = XEXP (value, 1);
4180 if (!CONSTANT_P (op2)
4181 && !(GET_CODE (op2) == REG && op2 != subtarget))
4182 subtarget = 0;
4183 tmp = force_operand (XEXP (value, 0), subtarget);
4184 return expand_mult (GET_MODE (value), tmp,
4185 force_operand (op2, NULL_RTX),
4186 target, 0);
4187 }
4188
4189 if (binoptab)
4190 {
4191 op2 = XEXP (value, 1);
4192 if (!CONSTANT_P (op2)
4193 && !(GET_CODE (op2) == REG && op2 != subtarget))
4194 subtarget = 0;
4195 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4196 {
4197 binoptab = add_optab;
4198 op2 = negate_rtx (GET_MODE (value), op2);
4199 }
4200
4201 /* Check for an addition with OP2 a constant integer and our first
4202 operand a PLUS of a virtual register and something else. In that
4203 case, we want to emit the sum of the virtual register and the
4204 constant first and then add the other value. This allows virtual
4205 register instantiation to simply modify the constant rather than
4206 creating another one around this addition. */
4207 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4208 && GET_CODE (XEXP (value, 0)) == PLUS
4209 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4210 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4211 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4212 {
4213 rtx temp = expand_binop (GET_MODE (value), binoptab,
4214 XEXP (XEXP (value, 0), 0), op2,
4215 subtarget, 0, OPTAB_LIB_WIDEN);
4216 return expand_binop (GET_MODE (value), binoptab, temp,
4217 force_operand (XEXP (XEXP (value, 0), 1), 0),
4218 target, 0, OPTAB_LIB_WIDEN);
4219 }
4220
4221 tmp = force_operand (XEXP (value, 0), subtarget);
4222 return expand_binop (GET_MODE (value), binoptab, tmp,
4223 force_operand (op2, NULL_RTX),
4224 target, 0, OPTAB_LIB_WIDEN);
4225 /* We give UNSIGNEDP = 0 to expand_binop
4226 because the only operations we are expanding here are signed ones. */
4227 }
4228 return value;
4229 }
4230 \f
4231 /* Subroutine of expand_expr:
4232 save the non-copied parts (LIST) of an expr (LHS), and return a list
4233 which can restore these values to their previous values,
4234 should something modify their storage. */
4235
4236 static tree
4237 save_noncopied_parts (lhs, list)
4238 tree lhs;
4239 tree list;
4240 {
4241 tree tail;
4242 tree parts = 0;
4243
4244 for (tail = list; tail; tail = TREE_CHAIN (tail))
4245 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4246 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4247 else
4248 {
4249 tree part = TREE_VALUE (tail);
4250 tree part_type = TREE_TYPE (part);
4251 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4252 rtx target = assign_temp (part_type, 0, 1, 1);
4253 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4254 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4255 parts = tree_cons (to_be_saved,
4256 build (RTL_EXPR, part_type, NULL_TREE,
4257 (tree) target),
4258 parts);
4259 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4260 }
4261 return parts;
4262 }
4263
4264 /* Subroutine of expand_expr:
4265 record the non-copied parts (LIST) of an expr (LHS), and return a list
4266 which specifies the initial values of these parts. */
4267
4268 static tree
4269 init_noncopied_parts (lhs, list)
4270 tree lhs;
4271 tree list;
4272 {
4273 tree tail;
4274 tree parts = 0;
4275
4276 for (tail = list; tail; tail = TREE_CHAIN (tail))
4277 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4278 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4279 else
4280 {
4281 tree part = TREE_VALUE (tail);
4282 tree part_type = TREE_TYPE (part);
4283 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4284 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4285 }
4286 return parts;
4287 }
4288
4289 /* Subroutine of expand_expr: return nonzero iff there is no way that
4290 EXP can reference X, which is being modified. */
4291
4292 static int
4293 safe_from_p (x, exp)
4294 rtx x;
4295 tree exp;
4296 {
4297 rtx exp_rtl = 0;
4298 int i, nops;
4299
4300 if (x == 0
4301 /* If EXP has varying size, we MUST use a target since we currently
4302 have no way of allocating temporaries of variable size. So we
4303 assume here that something at a higher level has prevented a
4304 clash. This is somewhat bogus, but the best we can do. Only
4305 do this when X is BLKmode. */
4306 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4307 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4308 && GET_MODE (x) == BLKmode))
4309 return 1;
4310
4311 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4312 find the underlying pseudo. */
4313 if (GET_CODE (x) == SUBREG)
4314 {
4315 x = SUBREG_REG (x);
4316 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4317 return 0;
4318 }
4319
4320 /* If X is a location in the outgoing argument area, it is always safe. */
4321 if (GET_CODE (x) == MEM
4322 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4323 || (GET_CODE (XEXP (x, 0)) == PLUS
4324 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4325 return 1;
4326
4327 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4328 {
4329 case 'd':
4330 exp_rtl = DECL_RTL (exp);
4331 break;
4332
4333 case 'c':
4334 return 1;
4335
4336 case 'x':
4337 if (TREE_CODE (exp) == TREE_LIST)
4338 return ((TREE_VALUE (exp) == 0
4339 || safe_from_p (x, TREE_VALUE (exp)))
4340 && (TREE_CHAIN (exp) == 0
4341 || safe_from_p (x, TREE_CHAIN (exp))));
4342 else
4343 return 0;
4344
4345 case '1':
4346 return safe_from_p (x, TREE_OPERAND (exp, 0));
4347
4348 case '2':
4349 case '<':
4350 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4351 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4352
4353 case 'e':
4354 case 'r':
4355 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4356 the expression. If it is set, we conflict iff we are that rtx or
4357 both are in memory. Otherwise, we check all operands of the
4358 expression recursively. */
4359
4360 switch (TREE_CODE (exp))
4361 {
4362 case ADDR_EXPR:
4363 return (staticp (TREE_OPERAND (exp, 0))
4364 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4365
4366 case INDIRECT_REF:
4367 if (GET_CODE (x) == MEM)
4368 return 0;
4369 break;
4370
4371 case CALL_EXPR:
4372 exp_rtl = CALL_EXPR_RTL (exp);
4373 if (exp_rtl == 0)
4374 {
4375 /* Assume that the call will clobber all hard registers and
4376 all of memory. */
4377 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4378 || GET_CODE (x) == MEM)
4379 return 0;
4380 }
4381
4382 break;
4383
4384 case RTL_EXPR:
4385 /* If a sequence exists, we would have to scan every instruction
4386 in the sequence to see if it was safe. This is probably not
4387 worthwhile. */
4388 if (RTL_EXPR_SEQUENCE (exp))
4389 return 0;
4390
4391 exp_rtl = RTL_EXPR_RTL (exp);
4392 break;
4393
4394 case WITH_CLEANUP_EXPR:
4395 exp_rtl = RTL_EXPR_RTL (exp);
4396 break;
4397
4398 case CLEANUP_POINT_EXPR:
4399 return safe_from_p (x, TREE_OPERAND (exp, 0));
4400
4401 case SAVE_EXPR:
4402 exp_rtl = SAVE_EXPR_RTL (exp);
4403 break;
4404
4405 case BIND_EXPR:
4406 /* The only operand we look at is operand 1. The rest aren't
4407 part of the expression. */
4408 return safe_from_p (x, TREE_OPERAND (exp, 1));
4409
4410 case METHOD_CALL_EXPR:
4411 /* This takes a rtx argument, but shouldn't appear here. */
4412 abort ();
4413 }
4414
4415 /* If we have an rtx, we do not need to scan our operands. */
4416 if (exp_rtl)
4417 break;
4418
4419 nops = tree_code_length[(int) TREE_CODE (exp)];
4420 for (i = 0; i < nops; i++)
4421 if (TREE_OPERAND (exp, i) != 0
4422 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4423 return 0;
4424 }
4425
4426 /* If we have an rtl, find any enclosed object. Then see if we conflict
4427 with it. */
4428 if (exp_rtl)
4429 {
4430 if (GET_CODE (exp_rtl) == SUBREG)
4431 {
4432 exp_rtl = SUBREG_REG (exp_rtl);
4433 if (GET_CODE (exp_rtl) == REG
4434 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4435 return 0;
4436 }
4437
4438 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4439 are memory and EXP is not readonly. */
4440 return ! (rtx_equal_p (x, exp_rtl)
4441 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4442 && ! TREE_READONLY (exp)));
4443 }
4444
4445 /* If we reach here, it is safe. */
4446 return 1;
4447 }
4448
4449 /* Subroutine of expand_expr: return nonzero iff EXP is an
4450 expression whose type is statically determinable. */
4451
4452 static int
4453 fixed_type_p (exp)
4454 tree exp;
4455 {
4456 if (TREE_CODE (exp) == PARM_DECL
4457 || TREE_CODE (exp) == VAR_DECL
4458 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4459 || TREE_CODE (exp) == COMPONENT_REF
4460 || TREE_CODE (exp) == ARRAY_REF)
4461 return 1;
4462 return 0;
4463 }
4464 \f
4465 /* expand_expr: generate code for computing expression EXP.
4466 An rtx for the computed value is returned. The value is never null.
4467 In the case of a void EXP, const0_rtx is returned.
4468
4469 The value may be stored in TARGET if TARGET is nonzero.
4470 TARGET is just a suggestion; callers must assume that
4471 the rtx returned may not be the same as TARGET.
4472
4473 If TARGET is CONST0_RTX, it means that the value will be ignored.
4474
4475 If TMODE is not VOIDmode, it suggests generating the
4476 result in mode TMODE. But this is done only when convenient.
4477 Otherwise, TMODE is ignored and the value generated in its natural mode.
4478 TMODE is just a suggestion; callers must assume that
4479 the rtx returned may not have mode TMODE.
4480
4481 Note that TARGET may have neither TMODE nor MODE. In that case, it
4482 probably will not be used.
4483
4484 If MODIFIER is EXPAND_SUM then when EXP is an addition
4485 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4486 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4487 products as above, or REG or MEM, or constant.
4488 Ordinarily in such cases we would output mul or add instructions
4489 and then return a pseudo reg containing the sum.
4490
4491 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4492 it also marks a label as absolutely required (it can't be dead).
4493 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4494 This is used for outputting expressions used in initializers.
4495
4496 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4497 with a constant address even if that address is not normally legitimate.
4498 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4499
4500 rtx
4501 expand_expr (exp, target, tmode, modifier)
4502 register tree exp;
4503 rtx target;
4504 enum machine_mode tmode;
4505 enum expand_modifier modifier;
4506 {
4507 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4508 This is static so it will be accessible to our recursive callees. */
4509 static tree placeholder_list = 0;
4510 register rtx op0, op1, temp;
4511 tree type = TREE_TYPE (exp);
4512 int unsignedp = TREE_UNSIGNED (type);
4513 register enum machine_mode mode = TYPE_MODE (type);
4514 register enum tree_code code = TREE_CODE (exp);
4515 optab this_optab;
4516 /* Use subtarget as the target for operand 0 of a binary operation. */
4517 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4518 rtx original_target = target;
4519 /* Maybe defer this until sure not doing bytecode? */
4520 int ignore = (target == const0_rtx
4521 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4522 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4523 || code == COND_EXPR)
4524 && TREE_CODE (type) == VOID_TYPE));
4525 tree context;
4526
4527
4528 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4529 {
4530 bc_expand_expr (exp);
4531 return NULL;
4532 }
4533
4534 /* Don't use hard regs as subtargets, because the combiner
4535 can only handle pseudo regs. */
4536 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4537 subtarget = 0;
4538 /* Avoid subtargets inside loops,
4539 since they hide some invariant expressions. */
4540 if (preserve_subexpressions_p ())
4541 subtarget = 0;
4542
4543 /* If we are going to ignore this result, we need only do something
4544 if there is a side-effect somewhere in the expression. If there
4545 is, short-circuit the most common cases here. Note that we must
4546 not call expand_expr with anything but const0_rtx in case this
4547 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4548
4549 if (ignore)
4550 {
4551 if (! TREE_SIDE_EFFECTS (exp))
4552 return const0_rtx;
4553
4554 /* Ensure we reference a volatile object even if value is ignored. */
4555 if (TREE_THIS_VOLATILE (exp)
4556 && TREE_CODE (exp) != FUNCTION_DECL
4557 && mode != VOIDmode && mode != BLKmode)
4558 {
4559 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4560 if (GET_CODE (temp) == MEM)
4561 temp = copy_to_reg (temp);
4562 return const0_rtx;
4563 }
4564
4565 if (TREE_CODE_CLASS (code) == '1')
4566 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4567 VOIDmode, modifier);
4568 else if (TREE_CODE_CLASS (code) == '2'
4569 || TREE_CODE_CLASS (code) == '<')
4570 {
4571 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4572 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4573 return const0_rtx;
4574 }
4575 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4576 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4577 /* If the second operand has no side effects, just evaluate
4578 the first. */
4579 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4580 VOIDmode, modifier);
4581
4582 target = 0;
4583 }
4584
4585 /* If will do cse, generate all results into pseudo registers
4586 since 1) that allows cse to find more things
4587 and 2) otherwise cse could produce an insn the machine
4588 cannot support. */
4589
4590 if (! cse_not_expected && mode != BLKmode && target
4591 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4592 target = subtarget;
4593
4594 switch (code)
4595 {
4596 case LABEL_DECL:
4597 {
4598 tree function = decl_function_context (exp);
4599 /* Handle using a label in a containing function. */
4600 if (function != current_function_decl && function != 0)
4601 {
4602 struct function *p = find_function_data (function);
4603 /* Allocate in the memory associated with the function
4604 that the label is in. */
4605 push_obstacks (p->function_obstack,
4606 p->function_maybepermanent_obstack);
4607
4608 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4609 label_rtx (exp), p->forced_labels);
4610 pop_obstacks ();
4611 }
4612 else if (modifier == EXPAND_INITIALIZER)
4613 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4614 label_rtx (exp), forced_labels);
4615 temp = gen_rtx (MEM, FUNCTION_MODE,
4616 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4617 if (function != current_function_decl && function != 0)
4618 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4619 return temp;
4620 }
4621
4622 case PARM_DECL:
4623 if (DECL_RTL (exp) == 0)
4624 {
4625 error_with_decl (exp, "prior parameter's size depends on `%s'");
4626 return CONST0_RTX (mode);
4627 }
4628
4629 /* ... fall through ... */
4630
4631 case VAR_DECL:
4632 /* If a static var's type was incomplete when the decl was written,
4633 but the type is complete now, lay out the decl now. */
4634 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4635 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4636 {
4637 push_obstacks_nochange ();
4638 end_temporary_allocation ();
4639 layout_decl (exp, 0);
4640 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4641 pop_obstacks ();
4642 }
4643
4644 /* ... fall through ... */
4645
4646 case FUNCTION_DECL:
4647 case RESULT_DECL:
4648 if (DECL_RTL (exp) == 0)
4649 abort ();
4650
4651 /* Ensure variable marked as used even if it doesn't go through
4652 a parser. If it hasn't be used yet, write out an external
4653 definition. */
4654 if (! TREE_USED (exp))
4655 {
4656 assemble_external (exp);
4657 TREE_USED (exp) = 1;
4658 }
4659
4660 /* Show we haven't gotten RTL for this yet. */
4661 temp = 0;
4662
4663 /* Handle variables inherited from containing functions. */
4664 context = decl_function_context (exp);
4665
4666 /* We treat inline_function_decl as an alias for the current function
4667 because that is the inline function whose vars, types, etc.
4668 are being merged into the current function.
4669 See expand_inline_function. */
4670
4671 if (context != 0 && context != current_function_decl
4672 && context != inline_function_decl
4673 /* If var is static, we don't need a static chain to access it. */
4674 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4675 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4676 {
4677 rtx addr;
4678
4679 /* Mark as non-local and addressable. */
4680 DECL_NONLOCAL (exp) = 1;
4681 mark_addressable (exp);
4682 if (GET_CODE (DECL_RTL (exp)) != MEM)
4683 abort ();
4684 addr = XEXP (DECL_RTL (exp), 0);
4685 if (GET_CODE (addr) == MEM)
4686 addr = gen_rtx (MEM, Pmode,
4687 fix_lexical_addr (XEXP (addr, 0), exp));
4688 else
4689 addr = fix_lexical_addr (addr, exp);
4690 temp = change_address (DECL_RTL (exp), mode, addr);
4691 }
4692
4693 /* This is the case of an array whose size is to be determined
4694 from its initializer, while the initializer is still being parsed.
4695 See expand_decl. */
4696
4697 else if (GET_CODE (DECL_RTL (exp)) == MEM
4698 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4699 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4700 XEXP (DECL_RTL (exp), 0));
4701
4702 /* If DECL_RTL is memory, we are in the normal case and either
4703 the address is not valid or it is not a register and -fforce-addr
4704 is specified, get the address into a register. */
4705
4706 else if (GET_CODE (DECL_RTL (exp)) == MEM
4707 && modifier != EXPAND_CONST_ADDRESS
4708 && modifier != EXPAND_SUM
4709 && modifier != EXPAND_INITIALIZER
4710 && (! memory_address_p (DECL_MODE (exp),
4711 XEXP (DECL_RTL (exp), 0))
4712 || (flag_force_addr
4713 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4714 temp = change_address (DECL_RTL (exp), VOIDmode,
4715 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4716
4717 /* If we got something, return it. But first, set the alignment
4718 the address is a register. */
4719 if (temp != 0)
4720 {
4721 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4722 mark_reg_pointer (XEXP (temp, 0),
4723 DECL_ALIGN (exp) / BITS_PER_UNIT);
4724
4725 return temp;
4726 }
4727
4728 /* If the mode of DECL_RTL does not match that of the decl, it
4729 must be a promoted value. We return a SUBREG of the wanted mode,
4730 but mark it so that we know that it was already extended. */
4731
4732 if (GET_CODE (DECL_RTL (exp)) == REG
4733 && GET_MODE (DECL_RTL (exp)) != mode)
4734 {
4735 /* Get the signedness used for this variable. Ensure we get the
4736 same mode we got when the variable was declared. */
4737 if (GET_MODE (DECL_RTL (exp))
4738 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4739 abort ();
4740
4741 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4742 SUBREG_PROMOTED_VAR_P (temp) = 1;
4743 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4744 return temp;
4745 }
4746
4747 return DECL_RTL (exp);
4748
4749 case INTEGER_CST:
4750 return immed_double_const (TREE_INT_CST_LOW (exp),
4751 TREE_INT_CST_HIGH (exp),
4752 mode);
4753
4754 case CONST_DECL:
4755 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4756
4757 case REAL_CST:
4758 /* If optimized, generate immediate CONST_DOUBLE
4759 which will be turned into memory by reload if necessary.
4760
4761 We used to force a register so that loop.c could see it. But
4762 this does not allow gen_* patterns to perform optimizations with
4763 the constants. It also produces two insns in cases like "x = 1.0;".
4764 On most machines, floating-point constants are not permitted in
4765 many insns, so we'd end up copying it to a register in any case.
4766
4767 Now, we do the copying in expand_binop, if appropriate. */
4768 return immed_real_const (exp);
4769
4770 case COMPLEX_CST:
4771 case STRING_CST:
4772 if (! TREE_CST_RTL (exp))
4773 output_constant_def (exp);
4774
4775 /* TREE_CST_RTL probably contains a constant address.
4776 On RISC machines where a constant address isn't valid,
4777 make some insns to get that address into a register. */
4778 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4779 && modifier != EXPAND_CONST_ADDRESS
4780 && modifier != EXPAND_INITIALIZER
4781 && modifier != EXPAND_SUM
4782 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4783 || (flag_force_addr
4784 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4785 return change_address (TREE_CST_RTL (exp), VOIDmode,
4786 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4787 return TREE_CST_RTL (exp);
4788
4789 case SAVE_EXPR:
4790 context = decl_function_context (exp);
4791
4792 /* We treat inline_function_decl as an alias for the current function
4793 because that is the inline function whose vars, types, etc.
4794 are being merged into the current function.
4795 See expand_inline_function. */
4796 if (context == current_function_decl || context == inline_function_decl)
4797 context = 0;
4798
4799 /* If this is non-local, handle it. */
4800 if (context)
4801 {
4802 temp = SAVE_EXPR_RTL (exp);
4803 if (temp && GET_CODE (temp) == REG)
4804 {
4805 put_var_into_stack (exp);
4806 temp = SAVE_EXPR_RTL (exp);
4807 }
4808 if (temp == 0 || GET_CODE (temp) != MEM)
4809 abort ();
4810 return change_address (temp, mode,
4811 fix_lexical_addr (XEXP (temp, 0), exp));
4812 }
4813 if (SAVE_EXPR_RTL (exp) == 0)
4814 {
4815 if (mode == VOIDmode)
4816 temp = const0_rtx;
4817 else
4818 temp = assign_temp (type, 0, 0, 0);
4819
4820 SAVE_EXPR_RTL (exp) = temp;
4821 if (!optimize && GET_CODE (temp) == REG)
4822 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4823 save_expr_regs);
4824
4825 /* If the mode of TEMP does not match that of the expression, it
4826 must be a promoted value. We pass store_expr a SUBREG of the
4827 wanted mode but mark it so that we know that it was already
4828 extended. Note that `unsignedp' was modified above in
4829 this case. */
4830
4831 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4832 {
4833 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4834 SUBREG_PROMOTED_VAR_P (temp) = 1;
4835 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4836 }
4837
4838 if (temp == const0_rtx)
4839 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4840 else
4841 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4842 }
4843
4844 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4845 must be a promoted value. We return a SUBREG of the wanted mode,
4846 but mark it so that we know that it was already extended. */
4847
4848 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4849 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4850 {
4851 /* Compute the signedness and make the proper SUBREG. */
4852 promote_mode (type, mode, &unsignedp, 0);
4853 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4854 SUBREG_PROMOTED_VAR_P (temp) = 1;
4855 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4856 return temp;
4857 }
4858
4859 return SAVE_EXPR_RTL (exp);
4860
4861 case PLACEHOLDER_EXPR:
4862 /* If there is an object on the head of the placeholder list,
4863 see if some object in it's references is of type TYPE. For
4864 further information, see tree.def. */
4865 if (placeholder_list)
4866 {
4867 tree object;
4868 tree old_list = placeholder_list;
4869
4870 for (object = TREE_PURPOSE (placeholder_list);
4871 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4872 != TYPE_MAIN_VARIANT (type))
4873 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4874 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4875 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4876 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4877 object = TREE_OPERAND (object, 0))
4878 ;
4879
4880 if (object != 0
4881 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4882 == TYPE_MAIN_VARIANT (type)))
4883 {
4884 /* Expand this object skipping the list entries before
4885 it was found in case it is also a PLACEHOLDER_EXPR.
4886 In that case, we want to translate it using subsequent
4887 entries. */
4888 placeholder_list = TREE_CHAIN (placeholder_list);
4889 temp = expand_expr (object, original_target, tmode, modifier);
4890 placeholder_list = old_list;
4891 return temp;
4892 }
4893 }
4894
4895 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4896 abort ();
4897
4898 case WITH_RECORD_EXPR:
4899 /* Put the object on the placeholder list, expand our first operand,
4900 and pop the list. */
4901 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4902 placeholder_list);
4903 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4904 tmode, modifier);
4905 placeholder_list = TREE_CHAIN (placeholder_list);
4906 return target;
4907
4908 case EXIT_EXPR:
4909 expand_exit_loop_if_false (NULL_PTR,
4910 invert_truthvalue (TREE_OPERAND (exp, 0)));
4911 return const0_rtx;
4912
4913 case LOOP_EXPR:
4914 push_temp_slots ();
4915 expand_start_loop (1);
4916 expand_expr_stmt (TREE_OPERAND (exp, 0));
4917 expand_end_loop ();
4918 pop_temp_slots ();
4919
4920 return const0_rtx;
4921
4922 case BIND_EXPR:
4923 {
4924 tree vars = TREE_OPERAND (exp, 0);
4925 int vars_need_expansion = 0;
4926
4927 /* Need to open a binding contour here because
4928 if there are any cleanups they most be contained here. */
4929 expand_start_bindings (0);
4930
4931 /* Mark the corresponding BLOCK for output in its proper place. */
4932 if (TREE_OPERAND (exp, 2) != 0
4933 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4934 insert_block (TREE_OPERAND (exp, 2));
4935
4936 /* If VARS have not yet been expanded, expand them now. */
4937 while (vars)
4938 {
4939 if (DECL_RTL (vars) == 0)
4940 {
4941 vars_need_expansion = 1;
4942 expand_decl (vars);
4943 }
4944 expand_decl_init (vars);
4945 vars = TREE_CHAIN (vars);
4946 }
4947
4948 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4949
4950 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4951
4952 return temp;
4953 }
4954
4955 case RTL_EXPR:
4956 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4957 abort ();
4958 emit_insns (RTL_EXPR_SEQUENCE (exp));
4959 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4960 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4961 free_temps_for_rtl_expr (exp);
4962 return RTL_EXPR_RTL (exp);
4963
4964 case CONSTRUCTOR:
4965 /* If we don't need the result, just ensure we evaluate any
4966 subexpressions. */
4967 if (ignore)
4968 {
4969 tree elt;
4970 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4971 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4972 return const0_rtx;
4973 }
4974
4975 /* All elts simple constants => refer to a constant in memory. But
4976 if this is a non-BLKmode mode, let it store a field at a time
4977 since that should make a CONST_INT or CONST_DOUBLE when we
4978 fold. Likewise, if we have a target we can use, it is best to
4979 store directly into the target unless the type is large enough
4980 that memcpy will be used. If we are making an initializer and
4981 all operands are constant, put it in memory as well. */
4982 else if ((TREE_STATIC (exp)
4983 && ((mode == BLKmode
4984 && ! (target != 0 && safe_from_p (target, exp)))
4985 || TREE_ADDRESSABLE (exp)
4986 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4987 && (move_by_pieces_ninsns
4988 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4989 TYPE_ALIGN (type) / BITS_PER_UNIT)
4990 > MOVE_RATIO)
4991 && ! mostly_zeros_p (exp))))
4992 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4993 {
4994 rtx constructor = output_constant_def (exp);
4995 if (modifier != EXPAND_CONST_ADDRESS
4996 && modifier != EXPAND_INITIALIZER
4997 && modifier != EXPAND_SUM
4998 && (! memory_address_p (GET_MODE (constructor),
4999 XEXP (constructor, 0))
5000 || (flag_force_addr
5001 && GET_CODE (XEXP (constructor, 0)) != REG)))
5002 constructor = change_address (constructor, VOIDmode,
5003 XEXP (constructor, 0));
5004 return constructor;
5005 }
5006
5007 else
5008 {
5009 if (target == 0 || ! safe_from_p (target, exp))
5010 {
5011 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5012 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5013 else
5014 target = assign_temp (type, 0, 1, 1);
5015 }
5016
5017 if (TREE_READONLY (exp))
5018 {
5019 if (GET_CODE (target) == MEM)
5020 target = change_address (target, GET_MODE (target),
5021 XEXP (target, 0));
5022 RTX_UNCHANGING_P (target) = 1;
5023 }
5024
5025 store_constructor (exp, target, 0);
5026 return target;
5027 }
5028
5029 case INDIRECT_REF:
5030 {
5031 tree exp1 = TREE_OPERAND (exp, 0);
5032 tree exp2;
5033
5034 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
5035 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
5036 This code has the same general effect as simply doing
5037 expand_expr on the save expr, except that the expression PTR
5038 is computed for use as a memory address. This means different
5039 code, suitable for indexing, may be generated. */
5040 if (TREE_CODE (exp1) == SAVE_EXPR
5041 && SAVE_EXPR_RTL (exp1) == 0
5042 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
5043 {
5044 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
5045 VOIDmode, EXPAND_SUM);
5046 op0 = memory_address (mode, temp);
5047 op0 = copy_all_regs (op0);
5048 SAVE_EXPR_RTL (exp1) = op0;
5049 }
5050 else
5051 {
5052 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5053 op0 = memory_address (mode, op0);
5054 }
5055
5056 temp = gen_rtx (MEM, mode, op0);
5057 /* If address was computed by addition,
5058 mark this as an element of an aggregate. */
5059 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5060 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5061 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5062 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5063 || (TREE_CODE (exp1) == ADDR_EXPR
5064 && (exp2 = TREE_OPERAND (exp1, 0))
5065 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5066 MEM_IN_STRUCT_P (temp) = 1;
5067 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5068
5069 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5070 here, because, in C and C++, the fact that a location is accessed
5071 through a pointer to const does not mean that the value there can
5072 never change. Languages where it can never change should
5073 also set TREE_STATIC. */
5074 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5075 return temp;
5076 }
5077
5078 case ARRAY_REF:
5079 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5080 abort ();
5081
5082 {
5083 tree array = TREE_OPERAND (exp, 0);
5084 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5085 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5086 tree index = TREE_OPERAND (exp, 1);
5087 tree index_type = TREE_TYPE (index);
5088 int i;
5089
5090 if (TREE_CODE (low_bound) != INTEGER_CST
5091 && contains_placeholder_p (low_bound))
5092 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5093
5094 /* Optimize the special-case of a zero lower bound.
5095
5096 We convert the low_bound to sizetype to avoid some problems
5097 with constant folding. (E.g. suppose the lower bound is 1,
5098 and its mode is QI. Without the conversion, (ARRAY
5099 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5100 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5101
5102 But sizetype isn't quite right either (especially if
5103 the lowbound is negative). FIXME */
5104
5105 if (! integer_zerop (low_bound))
5106 index = fold (build (MINUS_EXPR, index_type, index,
5107 convert (sizetype, low_bound)));
5108
5109 if ((TREE_CODE (index) != INTEGER_CST
5110 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5111 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5112 {
5113 /* Nonconstant array index or nonconstant element size, and
5114 not an array in an unaligned (packed) structure field.
5115 Generate the tree for *(&array+index) and expand that,
5116 except do it in a language-independent way
5117 and don't complain about non-lvalue arrays.
5118 `mark_addressable' should already have been called
5119 for any array for which this case will be reached. */
5120
5121 /* Don't forget the const or volatile flag from the array
5122 element. */
5123 tree variant_type = build_type_variant (type,
5124 TREE_READONLY (exp),
5125 TREE_THIS_VOLATILE (exp));
5126 tree array_adr = build1 (ADDR_EXPR,
5127 build_pointer_type (variant_type), array);
5128 tree elt;
5129 tree size = size_in_bytes (type);
5130
5131 /* Convert the integer argument to a type the same size as sizetype
5132 so the multiply won't overflow spuriously. */
5133 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5134 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5135 index);
5136
5137 if (TREE_CODE (size) != INTEGER_CST
5138 && contains_placeholder_p (size))
5139 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5140
5141 /* Don't think the address has side effects
5142 just because the array does.
5143 (In some cases the address might have side effects,
5144 and we fail to record that fact here. However, it should not
5145 matter, since expand_expr should not care.) */
5146 TREE_SIDE_EFFECTS (array_adr) = 0;
5147
5148 elt
5149 = build1
5150 (INDIRECT_REF, type,
5151 fold (build (PLUS_EXPR,
5152 TYPE_POINTER_TO (variant_type),
5153 array_adr,
5154 fold
5155 (build1
5156 (NOP_EXPR,
5157 TYPE_POINTER_TO (variant_type),
5158 fold (build (MULT_EXPR, TREE_TYPE (index),
5159 index,
5160 convert (TREE_TYPE (index),
5161 size))))))));;
5162
5163 /* Volatility, etc., of new expression is same as old
5164 expression. */
5165 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5166 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5167 TREE_READONLY (elt) = TREE_READONLY (exp);
5168
5169 return expand_expr (elt, target, tmode, modifier);
5170 }
5171
5172 /* Fold an expression like: "foo"[2].
5173 This is not done in fold so it won't happen inside &.
5174 Don't fold if this is for wide characters since it's too
5175 difficult to do correctly and this is a very rare case. */
5176
5177 if (TREE_CODE (array) == STRING_CST
5178 && TREE_CODE (index) == INTEGER_CST
5179 && !TREE_INT_CST_HIGH (index)
5180 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5181 && GET_MODE_CLASS (mode) == MODE_INT
5182 && GET_MODE_SIZE (mode) == 1)
5183 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5184
5185 /* If this is a constant index into a constant array,
5186 just get the value from the array. Handle both the cases when
5187 we have an explicit constructor and when our operand is a variable
5188 that was declared const. */
5189
5190 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5191 {
5192 if (TREE_CODE (index) == INTEGER_CST
5193 && TREE_INT_CST_HIGH (index) == 0)
5194 {
5195 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5196
5197 i = TREE_INT_CST_LOW (index);
5198 while (elem && i--)
5199 elem = TREE_CHAIN (elem);
5200 if (elem)
5201 return expand_expr (fold (TREE_VALUE (elem)), target,
5202 tmode, modifier);
5203 }
5204 }
5205
5206 else if (optimize >= 1
5207 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5208 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5209 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5210 {
5211 if (TREE_CODE (index) == INTEGER_CST
5212 && TREE_INT_CST_HIGH (index) == 0)
5213 {
5214 tree init = DECL_INITIAL (array);
5215
5216 i = TREE_INT_CST_LOW (index);
5217 if (TREE_CODE (init) == CONSTRUCTOR)
5218 {
5219 tree elem = CONSTRUCTOR_ELTS (init);
5220
5221 while (elem
5222 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5223 elem = TREE_CHAIN (elem);
5224 if (elem)
5225 return expand_expr (fold (TREE_VALUE (elem)), target,
5226 tmode, modifier);
5227 }
5228 else if (TREE_CODE (init) == STRING_CST
5229 && i < TREE_STRING_LENGTH (init))
5230 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5231 }
5232 }
5233 }
5234
5235 /* Treat array-ref with constant index as a component-ref. */
5236
5237 case COMPONENT_REF:
5238 case BIT_FIELD_REF:
5239 /* If the operand is a CONSTRUCTOR, we can just extract the
5240 appropriate field if it is present. Don't do this if we have
5241 already written the data since we want to refer to that copy
5242 and varasm.c assumes that's what we'll do. */
5243 if (code != ARRAY_REF
5244 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5245 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5246 {
5247 tree elt;
5248
5249 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5250 elt = TREE_CHAIN (elt))
5251 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5252 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5253 }
5254
5255 {
5256 enum machine_mode mode1;
5257 int bitsize;
5258 int bitpos;
5259 tree offset;
5260 int volatilep = 0;
5261 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5262 &mode1, &unsignedp, &volatilep);
5263 int alignment;
5264
5265 /* If we got back the original object, something is wrong. Perhaps
5266 we are evaluating an expression too early. In any event, don't
5267 infinitely recurse. */
5268 if (tem == exp)
5269 abort ();
5270
5271 /* If TEM's type is a union of variable size, pass TARGET to the inner
5272 computation, since it will need a temporary and TARGET is known
5273 to have to do. This occurs in unchecked conversion in Ada. */
5274
5275 op0 = expand_expr (tem,
5276 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5277 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5278 != INTEGER_CST)
5279 ? target : NULL_RTX),
5280 VOIDmode,
5281 modifier == EXPAND_INITIALIZER ? modifier : 0);
5282
5283 /* If this is a constant, put it into a register if it is a
5284 legitimate constant and memory if it isn't. */
5285 if (CONSTANT_P (op0))
5286 {
5287 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5288 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5289 op0 = force_reg (mode, op0);
5290 else
5291 op0 = validize_mem (force_const_mem (mode, op0));
5292 }
5293
5294 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5295 if (offset != 0)
5296 {
5297 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5298
5299 if (GET_CODE (op0) != MEM)
5300 abort ();
5301 op0 = change_address (op0, VOIDmode,
5302 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5303 force_reg (ptr_mode, offset_rtx)));
5304 /* If we have a variable offset, the known alignment
5305 is only that of the innermost structure containing the field.
5306 (Actually, we could sometimes do better by using the
5307 size of an element of the innermost array, but no need.) */
5308 if (TREE_CODE (exp) == COMPONENT_REF
5309 || TREE_CODE (exp) == BIT_FIELD_REF)
5310 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5311 / BITS_PER_UNIT);
5312 }
5313
5314 /* Don't forget about volatility even if this is a bitfield. */
5315 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5316 {
5317 op0 = copy_rtx (op0);
5318 MEM_VOLATILE_P (op0) = 1;
5319 }
5320
5321 /* In cases where an aligned union has an unaligned object
5322 as a field, we might be extracting a BLKmode value from
5323 an integer-mode (e.g., SImode) object. Handle this case
5324 by doing the extract into an object as wide as the field
5325 (which we know to be the width of a basic mode), then
5326 storing into memory, and changing the mode to BLKmode. */
5327 if (mode1 == VOIDmode
5328 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5329 || (modifier != EXPAND_CONST_ADDRESS
5330 && modifier != EXPAND_SUM
5331 && modifier != EXPAND_INITIALIZER
5332 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5333 /* If the field isn't aligned enough to fetch as a memref,
5334 fetch it as a bit field. */
5335 || (SLOW_UNALIGNED_ACCESS
5336 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5337 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5338 {
5339 enum machine_mode ext_mode = mode;
5340
5341 if (ext_mode == BLKmode)
5342 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5343
5344 if (ext_mode == BLKmode)
5345 abort ();
5346
5347 op0 = validize_mem (op0);
5348
5349 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5350 mark_reg_pointer (XEXP (op0, 0), alignment);
5351
5352 op0 = extract_bit_field (op0, bitsize, bitpos,
5353 unsignedp, target, ext_mode, ext_mode,
5354 alignment,
5355 int_size_in_bytes (TREE_TYPE (tem)));
5356 if (mode == BLKmode)
5357 {
5358 rtx new = assign_stack_temp (ext_mode,
5359 bitsize / BITS_PER_UNIT, 0);
5360
5361 emit_move_insn (new, op0);
5362 op0 = copy_rtx (new);
5363 PUT_MODE (op0, BLKmode);
5364 MEM_IN_STRUCT_P (op0) = 1;
5365 }
5366
5367 return op0;
5368 }
5369
5370 /* If the result is BLKmode, use that to access the object
5371 now as well. */
5372 if (mode == BLKmode)
5373 mode1 = BLKmode;
5374
5375 /* Get a reference to just this component. */
5376 if (modifier == EXPAND_CONST_ADDRESS
5377 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5378 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5379 (bitpos / BITS_PER_UNIT)));
5380 else
5381 op0 = change_address (op0, mode1,
5382 plus_constant (XEXP (op0, 0),
5383 (bitpos / BITS_PER_UNIT)));
5384 if (GET_CODE (XEXP (op0, 0)) == REG)
5385 mark_reg_pointer (XEXP (op0, 0), alignment);
5386
5387 MEM_IN_STRUCT_P (op0) = 1;
5388 MEM_VOLATILE_P (op0) |= volatilep;
5389 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5390 return op0;
5391 if (target == 0)
5392 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5393 convert_move (target, op0, unsignedp);
5394 return target;
5395 }
5396
5397 case OFFSET_REF:
5398 {
5399 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
5400 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
5401 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
5402 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
5403 MEM_IN_STRUCT_P (temp) = 1;
5404 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
5405 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
5406 a location is accessed through a pointer to const does not mean
5407 that the value there can never change. */
5408 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
5409 #endif
5410 return temp;
5411 }
5412
5413 /* Intended for a reference to a buffer of a file-object in Pascal.
5414 But it's not certain that a special tree code will really be
5415 necessary for these. INDIRECT_REF might work for them. */
5416 case BUFFER_REF:
5417 abort ();
5418
5419 case IN_EXPR:
5420 {
5421 /* Pascal set IN expression.
5422
5423 Algorithm:
5424 rlo = set_low - (set_low%bits_per_word);
5425 the_word = set [ (index - rlo)/bits_per_word ];
5426 bit_index = index % bits_per_word;
5427 bitmask = 1 << bit_index;
5428 return !!(the_word & bitmask); */
5429
5430 tree set = TREE_OPERAND (exp, 0);
5431 tree index = TREE_OPERAND (exp, 1);
5432 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5433 tree set_type = TREE_TYPE (set);
5434 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5435 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5436 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5437 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5438 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5439 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5440 rtx setaddr = XEXP (setval, 0);
5441 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5442 rtx rlow;
5443 rtx diff, quo, rem, addr, bit, result;
5444
5445 preexpand_calls (exp);
5446
5447 /* If domain is empty, answer is no. Likewise if index is constant
5448 and out of bounds. */
5449 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5450 && TREE_CODE (set_low_bound) == INTEGER_CST
5451 && tree_int_cst_lt (set_high_bound, set_low_bound)
5452 || (TREE_CODE (index) == INTEGER_CST
5453 && TREE_CODE (set_low_bound) == INTEGER_CST
5454 && tree_int_cst_lt (index, set_low_bound))
5455 || (TREE_CODE (set_high_bound) == INTEGER_CST
5456 && TREE_CODE (index) == INTEGER_CST
5457 && tree_int_cst_lt (set_high_bound, index))))
5458 return const0_rtx;
5459
5460 if (target == 0)
5461 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5462
5463 /* If we get here, we have to generate the code for both cases
5464 (in range and out of range). */
5465
5466 op0 = gen_label_rtx ();
5467 op1 = gen_label_rtx ();
5468
5469 if (! (GET_CODE (index_val) == CONST_INT
5470 && GET_CODE (lo_r) == CONST_INT))
5471 {
5472 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5473 GET_MODE (index_val), iunsignedp, 0);
5474 emit_jump_insn (gen_blt (op1));
5475 }
5476
5477 if (! (GET_CODE (index_val) == CONST_INT
5478 && GET_CODE (hi_r) == CONST_INT))
5479 {
5480 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5481 GET_MODE (index_val), iunsignedp, 0);
5482 emit_jump_insn (gen_bgt (op1));
5483 }
5484
5485 /* Calculate the element number of bit zero in the first word
5486 of the set. */
5487 if (GET_CODE (lo_r) == CONST_INT)
5488 rlow = GEN_INT (INTVAL (lo_r)
5489 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5490 else
5491 rlow = expand_binop (index_mode, and_optab, lo_r,
5492 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5493 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5494
5495 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5496 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5497
5498 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5499 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5500 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5501 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5502
5503 addr = memory_address (byte_mode,
5504 expand_binop (index_mode, add_optab, diff,
5505 setaddr, NULL_RTX, iunsignedp,
5506 OPTAB_LIB_WIDEN));
5507
5508 /* Extract the bit we want to examine */
5509 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5510 gen_rtx (MEM, byte_mode, addr),
5511 make_tree (TREE_TYPE (index), rem),
5512 NULL_RTX, 1);
5513 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5514 GET_MODE (target) == byte_mode ? target : 0,
5515 1, OPTAB_LIB_WIDEN);
5516
5517 if (result != target)
5518 convert_move (target, result, 1);
5519
5520 /* Output the code to handle the out-of-range case. */
5521 emit_jump (op0);
5522 emit_label (op1);
5523 emit_move_insn (target, const0_rtx);
5524 emit_label (op0);
5525 return target;
5526 }
5527
5528 case WITH_CLEANUP_EXPR:
5529 if (RTL_EXPR_RTL (exp) == 0)
5530 {
5531 RTL_EXPR_RTL (exp)
5532 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5533 cleanups_this_call
5534 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5535 /* That's it for this cleanup. */
5536 TREE_OPERAND (exp, 2) = 0;
5537 (*interim_eh_hook) (NULL_TREE);
5538 }
5539 return RTL_EXPR_RTL (exp);
5540
5541 case CLEANUP_POINT_EXPR:
5542 {
5543 extern int temp_slot_level;
5544 tree old_cleanups = cleanups_this_call;
5545 int old_temp_level = target_temp_slot_level;
5546 push_temp_slots ();
5547 target_temp_slot_level = temp_slot_level;
5548 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5549 /* If we're going to use this value, load it up now. */
5550 if (! ignore)
5551 op0 = force_not_mem (op0);
5552 expand_cleanups_to (old_cleanups);
5553 preserve_temp_slots (op0);
5554 free_temp_slots ();
5555 pop_temp_slots ();
5556 target_temp_slot_level = old_temp_level;
5557 }
5558 return op0;
5559
5560 case CALL_EXPR:
5561 /* Check for a built-in function. */
5562 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5563 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5564 == FUNCTION_DECL)
5565 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5566 return expand_builtin (exp, target, subtarget, tmode, ignore);
5567
5568 /* If this call was expanded already by preexpand_calls,
5569 just return the result we got. */
5570 if (CALL_EXPR_RTL (exp) != 0)
5571 return CALL_EXPR_RTL (exp);
5572
5573 return expand_call (exp, target, ignore);
5574
5575 case NON_LVALUE_EXPR:
5576 case NOP_EXPR:
5577 case CONVERT_EXPR:
5578 case REFERENCE_EXPR:
5579 if (TREE_CODE (type) == UNION_TYPE)
5580 {
5581 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5582 if (target == 0)
5583 {
5584 if (mode != BLKmode)
5585 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5586 else
5587 target = assign_temp (type, 0, 1, 1);
5588 }
5589
5590 if (GET_CODE (target) == MEM)
5591 /* Store data into beginning of memory target. */
5592 store_expr (TREE_OPERAND (exp, 0),
5593 change_address (target, TYPE_MODE (valtype), 0), 0);
5594
5595 else if (GET_CODE (target) == REG)
5596 /* Store this field into a union of the proper type. */
5597 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5598 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5599 VOIDmode, 0, 1,
5600 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5601 else
5602 abort ();
5603
5604 /* Return the entire union. */
5605 return target;
5606 }
5607
5608 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5609 {
5610 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5611 modifier);
5612
5613 /* If the signedness of the conversion differs and OP0 is
5614 a promoted SUBREG, clear that indication since we now
5615 have to do the proper extension. */
5616 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5617 && GET_CODE (op0) == SUBREG)
5618 SUBREG_PROMOTED_VAR_P (op0) = 0;
5619
5620 return op0;
5621 }
5622
5623 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5624 if (GET_MODE (op0) == mode)
5625 return op0;
5626
5627 /* If OP0 is a constant, just convert it into the proper mode. */
5628 if (CONSTANT_P (op0))
5629 return
5630 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5631 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5632
5633 if (modifier == EXPAND_INITIALIZER)
5634 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5635
5636 if (target == 0)
5637 return
5638 convert_to_mode (mode, op0,
5639 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5640 else
5641 convert_move (target, op0,
5642 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5643 return target;
5644
5645 case PLUS_EXPR:
5646 /* We come here from MINUS_EXPR when the second operand is a constant. */
5647 plus_expr:
5648 this_optab = add_optab;
5649
5650 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5651 something else, make sure we add the register to the constant and
5652 then to the other thing. This case can occur during strength
5653 reduction and doing it this way will produce better code if the
5654 frame pointer or argument pointer is eliminated.
5655
5656 fold-const.c will ensure that the constant is always in the inner
5657 PLUS_EXPR, so the only case we need to do anything about is if
5658 sp, ap, or fp is our second argument, in which case we must swap
5659 the innermost first argument and our second argument. */
5660
5661 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5662 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5663 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5664 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5665 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5666 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5667 {
5668 tree t = TREE_OPERAND (exp, 1);
5669
5670 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5671 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5672 }
5673
5674 /* If the result is to be ptr_mode and we are adding an integer to
5675 something, we might be forming a constant. So try to use
5676 plus_constant. If it produces a sum and we can't accept it,
5677 use force_operand. This allows P = &ARR[const] to generate
5678 efficient code on machines where a SYMBOL_REF is not a valid
5679 address.
5680
5681 If this is an EXPAND_SUM call, always return the sum. */
5682 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5683 || mode == ptr_mode)
5684 {
5685 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5686 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5687 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5688 {
5689 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5690 EXPAND_SUM);
5691 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5692 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5693 op1 = force_operand (op1, target);
5694 return op1;
5695 }
5696
5697 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5698 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5699 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5700 {
5701 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5702 EXPAND_SUM);
5703 if (! CONSTANT_P (op0))
5704 {
5705 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5706 VOIDmode, modifier);
5707 /* Don't go to both_summands if modifier
5708 says it's not right to return a PLUS. */
5709 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5710 goto binop2;
5711 goto both_summands;
5712 }
5713 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5714 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5715 op0 = force_operand (op0, target);
5716 return op0;
5717 }
5718 }
5719
5720 /* No sense saving up arithmetic to be done
5721 if it's all in the wrong mode to form part of an address.
5722 And force_operand won't know whether to sign-extend or
5723 zero-extend. */
5724 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5725 || mode != ptr_mode)
5726 goto binop;
5727
5728 preexpand_calls (exp);
5729 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5730 subtarget = 0;
5731
5732 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5733 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5734
5735 both_summands:
5736 /* Make sure any term that's a sum with a constant comes last. */
5737 if (GET_CODE (op0) == PLUS
5738 && CONSTANT_P (XEXP (op0, 1)))
5739 {
5740 temp = op0;
5741 op0 = op1;
5742 op1 = temp;
5743 }
5744 /* If adding to a sum including a constant,
5745 associate it to put the constant outside. */
5746 if (GET_CODE (op1) == PLUS
5747 && CONSTANT_P (XEXP (op1, 1)))
5748 {
5749 rtx constant_term = const0_rtx;
5750
5751 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5752 if (temp != 0)
5753 op0 = temp;
5754 /* Ensure that MULT comes first if there is one. */
5755 else if (GET_CODE (op0) == MULT)
5756 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5757 else
5758 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5759
5760 /* Let's also eliminate constants from op0 if possible. */
5761 op0 = eliminate_constant_term (op0, &constant_term);
5762
5763 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5764 their sum should be a constant. Form it into OP1, since the
5765 result we want will then be OP0 + OP1. */
5766
5767 temp = simplify_binary_operation (PLUS, mode, constant_term,
5768 XEXP (op1, 1));
5769 if (temp != 0)
5770 op1 = temp;
5771 else
5772 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5773 }
5774
5775 /* Put a constant term last and put a multiplication first. */
5776 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5777 temp = op1, op1 = op0, op0 = temp;
5778
5779 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5780 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5781
5782 case MINUS_EXPR:
5783 /* For initializers, we are allowed to return a MINUS of two
5784 symbolic constants. Here we handle all cases when both operands
5785 are constant. */
5786 /* Handle difference of two symbolic constants,
5787 for the sake of an initializer. */
5788 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5789 && really_constant_p (TREE_OPERAND (exp, 0))
5790 && really_constant_p (TREE_OPERAND (exp, 1)))
5791 {
5792 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5793 VOIDmode, modifier);
5794 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5795 VOIDmode, modifier);
5796
5797 /* If the last operand is a CONST_INT, use plus_constant of
5798 the negated constant. Else make the MINUS. */
5799 if (GET_CODE (op1) == CONST_INT)
5800 return plus_constant (op0, - INTVAL (op1));
5801 else
5802 return gen_rtx (MINUS, mode, op0, op1);
5803 }
5804 /* Convert A - const to A + (-const). */
5805 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5806 {
5807 tree negated = fold (build1 (NEGATE_EXPR, type,
5808 TREE_OPERAND (exp, 1)));
5809
5810 /* Deal with the case where we can't negate the constant
5811 in TYPE. */
5812 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5813 {
5814 tree newtype = signed_type (type);
5815 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5816 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5817 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5818
5819 if (! TREE_OVERFLOW (newneg))
5820 return expand_expr (convert (type,
5821 build (PLUS_EXPR, newtype,
5822 newop0, newneg)),
5823 target, tmode, modifier);
5824 }
5825 else
5826 {
5827 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5828 goto plus_expr;
5829 }
5830 }
5831 this_optab = sub_optab;
5832 goto binop;
5833
5834 case MULT_EXPR:
5835 preexpand_calls (exp);
5836 /* If first operand is constant, swap them.
5837 Thus the following special case checks need only
5838 check the second operand. */
5839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5840 {
5841 register tree t1 = TREE_OPERAND (exp, 0);
5842 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5843 TREE_OPERAND (exp, 1) = t1;
5844 }
5845
5846 /* Attempt to return something suitable for generating an
5847 indexed address, for machines that support that. */
5848
5849 if (modifier == EXPAND_SUM && mode == ptr_mode
5850 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5851 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5852 {
5853 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5854
5855 /* Apply distributive law if OP0 is x+c. */
5856 if (GET_CODE (op0) == PLUS
5857 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5858 return gen_rtx (PLUS, mode,
5859 gen_rtx (MULT, mode, XEXP (op0, 0),
5860 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5861 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5862 * INTVAL (XEXP (op0, 1))));
5863
5864 if (GET_CODE (op0) != REG)
5865 op0 = force_operand (op0, NULL_RTX);
5866 if (GET_CODE (op0) != REG)
5867 op0 = copy_to_mode_reg (mode, op0);
5868
5869 return gen_rtx (MULT, mode, op0,
5870 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5871 }
5872
5873 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5874 subtarget = 0;
5875
5876 /* Check for multiplying things that have been extended
5877 from a narrower type. If this machine supports multiplying
5878 in that narrower type with a result in the desired type,
5879 do it that way, and avoid the explicit type-conversion. */
5880 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5881 && TREE_CODE (type) == INTEGER_TYPE
5882 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5883 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5884 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5885 && int_fits_type_p (TREE_OPERAND (exp, 1),
5886 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5887 /* Don't use a widening multiply if a shift will do. */
5888 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5889 > HOST_BITS_PER_WIDE_INT)
5890 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5891 ||
5892 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5893 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5894 ==
5895 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5896 /* If both operands are extended, they must either both
5897 be zero-extended or both be sign-extended. */
5898 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5899 ==
5900 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5901 {
5902 enum machine_mode innermode
5903 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5904 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5905 ? smul_widen_optab : umul_widen_optab);
5906 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5907 ? umul_widen_optab : smul_widen_optab);
5908 if (mode == GET_MODE_WIDER_MODE (innermode))
5909 {
5910 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5911 {
5912 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5913 NULL_RTX, VOIDmode, 0);
5914 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5915 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5916 VOIDmode, 0);
5917 else
5918 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5919 NULL_RTX, VOIDmode, 0);
5920 goto binop2;
5921 }
5922 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5923 && innermode == word_mode)
5924 {
5925 rtx htem;
5926 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5927 NULL_RTX, VOIDmode, 0);
5928 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5929 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5930 VOIDmode, 0);
5931 else
5932 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5933 NULL_RTX, VOIDmode, 0);
5934 temp = expand_binop (mode, other_optab, op0, op1, target,
5935 unsignedp, OPTAB_LIB_WIDEN);
5936 htem = expand_mult_highpart_adjust (innermode,
5937 gen_highpart (innermode, temp),
5938 op0, op1,
5939 gen_highpart (innermode, temp),
5940 unsignedp);
5941 emit_move_insn (gen_highpart (innermode, temp), htem);
5942 return temp;
5943 }
5944 }
5945 }
5946 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5947 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5948 return expand_mult (mode, op0, op1, target, unsignedp);
5949
5950 case TRUNC_DIV_EXPR:
5951 case FLOOR_DIV_EXPR:
5952 case CEIL_DIV_EXPR:
5953 case ROUND_DIV_EXPR:
5954 case EXACT_DIV_EXPR:
5955 preexpand_calls (exp);
5956 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5957 subtarget = 0;
5958 /* Possible optimization: compute the dividend with EXPAND_SUM
5959 then if the divisor is constant can optimize the case
5960 where some terms of the dividend have coeffs divisible by it. */
5961 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5962 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5963 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5964
5965 case RDIV_EXPR:
5966 this_optab = flodiv_optab;
5967 goto binop;
5968
5969 case TRUNC_MOD_EXPR:
5970 case FLOOR_MOD_EXPR:
5971 case CEIL_MOD_EXPR:
5972 case ROUND_MOD_EXPR:
5973 preexpand_calls (exp);
5974 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5975 subtarget = 0;
5976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5977 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5978 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5979
5980 case FIX_ROUND_EXPR:
5981 case FIX_FLOOR_EXPR:
5982 case FIX_CEIL_EXPR:
5983 abort (); /* Not used for C. */
5984
5985 case FIX_TRUNC_EXPR:
5986 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5987 if (target == 0)
5988 target = gen_reg_rtx (mode);
5989 expand_fix (target, op0, unsignedp);
5990 return target;
5991
5992 case FLOAT_EXPR:
5993 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5994 if (target == 0)
5995 target = gen_reg_rtx (mode);
5996 /* expand_float can't figure out what to do if FROM has VOIDmode.
5997 So give it the correct mode. With -O, cse will optimize this. */
5998 if (GET_MODE (op0) == VOIDmode)
5999 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6000 op0);
6001 expand_float (target, op0,
6002 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6003 return target;
6004
6005 case NEGATE_EXPR:
6006 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6007 temp = expand_unop (mode, neg_optab, op0, target, 0);
6008 if (temp == 0)
6009 abort ();
6010 return temp;
6011
6012 case ABS_EXPR:
6013 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6014
6015 /* Handle complex values specially. */
6016 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6017 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6018 return expand_complex_abs (mode, op0, target, unsignedp);
6019
6020 /* Unsigned abs is simply the operand. Testing here means we don't
6021 risk generating incorrect code below. */
6022 if (TREE_UNSIGNED (type))
6023 return op0;
6024
6025 return expand_abs (mode, op0, target, unsignedp,
6026 safe_from_p (target, TREE_OPERAND (exp, 0)));
6027
6028 case MAX_EXPR:
6029 case MIN_EXPR:
6030 target = original_target;
6031 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6032 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6033 || GET_MODE (target) != mode
6034 || (GET_CODE (target) == REG
6035 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6036 target = gen_reg_rtx (mode);
6037 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6038 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6039
6040 /* First try to do it with a special MIN or MAX instruction.
6041 If that does not win, use a conditional jump to select the proper
6042 value. */
6043 this_optab = (TREE_UNSIGNED (type)
6044 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6045 : (code == MIN_EXPR ? smin_optab : smax_optab));
6046
6047 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6048 OPTAB_WIDEN);
6049 if (temp != 0)
6050 return temp;
6051
6052 /* At this point, a MEM target is no longer useful; we will get better
6053 code without it. */
6054
6055 if (GET_CODE (target) == MEM)
6056 target = gen_reg_rtx (mode);
6057
6058 if (target != op0)
6059 emit_move_insn (target, op0);
6060
6061 op0 = gen_label_rtx ();
6062
6063 /* If this mode is an integer too wide to compare properly,
6064 compare word by word. Rely on cse to optimize constant cases. */
6065 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6066 {
6067 if (code == MAX_EXPR)
6068 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6069 target, op1, NULL_RTX, op0);
6070 else
6071 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6072 op1, target, NULL_RTX, op0);
6073 emit_move_insn (target, op1);
6074 }
6075 else
6076 {
6077 if (code == MAX_EXPR)
6078 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6079 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6080 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6081 else
6082 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6083 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6084 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6085 if (temp == const0_rtx)
6086 emit_move_insn (target, op1);
6087 else if (temp != const_true_rtx)
6088 {
6089 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6090 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6091 else
6092 abort ();
6093 emit_move_insn (target, op1);
6094 }
6095 }
6096 emit_label (op0);
6097 return target;
6098
6099 case BIT_NOT_EXPR:
6100 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6101 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6102 if (temp == 0)
6103 abort ();
6104 return temp;
6105
6106 case FFS_EXPR:
6107 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6108 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6109 if (temp == 0)
6110 abort ();
6111 return temp;
6112
6113 /* ??? Can optimize bitwise operations with one arg constant.
6114 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6115 and (a bitwise1 b) bitwise2 b (etc)
6116 but that is probably not worth while. */
6117
6118 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6119 boolean values when we want in all cases to compute both of them. In
6120 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6121 as actual zero-or-1 values and then bitwise anding. In cases where
6122 there cannot be any side effects, better code would be made by
6123 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6124 how to recognize those cases. */
6125
6126 case TRUTH_AND_EXPR:
6127 case BIT_AND_EXPR:
6128 this_optab = and_optab;
6129 goto binop;
6130
6131 case TRUTH_OR_EXPR:
6132 case BIT_IOR_EXPR:
6133 this_optab = ior_optab;
6134 goto binop;
6135
6136 case TRUTH_XOR_EXPR:
6137 case BIT_XOR_EXPR:
6138 this_optab = xor_optab;
6139 goto binop;
6140
6141 case LSHIFT_EXPR:
6142 case RSHIFT_EXPR:
6143 case LROTATE_EXPR:
6144 case RROTATE_EXPR:
6145 preexpand_calls (exp);
6146 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6147 subtarget = 0;
6148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6149 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6150 unsignedp);
6151
6152 /* Could determine the answer when only additive constants differ. Also,
6153 the addition of one can be handled by changing the condition. */
6154 case LT_EXPR:
6155 case LE_EXPR:
6156 case GT_EXPR:
6157 case GE_EXPR:
6158 case EQ_EXPR:
6159 case NE_EXPR:
6160 preexpand_calls (exp);
6161 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6162 if (temp != 0)
6163 return temp;
6164
6165 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6166 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6167 && original_target
6168 && GET_CODE (original_target) == REG
6169 && (GET_MODE (original_target)
6170 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6171 {
6172 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6173 VOIDmode, 0);
6174
6175 if (temp != original_target)
6176 temp = copy_to_reg (temp);
6177
6178 op1 = gen_label_rtx ();
6179 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6180 GET_MODE (temp), unsignedp, 0);
6181 emit_jump_insn (gen_beq (op1));
6182 emit_move_insn (temp, const1_rtx);
6183 emit_label (op1);
6184 return temp;
6185 }
6186
6187 /* If no set-flag instruction, must generate a conditional
6188 store into a temporary variable. Drop through
6189 and handle this like && and ||. */
6190
6191 case TRUTH_ANDIF_EXPR:
6192 case TRUTH_ORIF_EXPR:
6193 if (! ignore
6194 && (target == 0 || ! safe_from_p (target, exp)
6195 /* Make sure we don't have a hard reg (such as function's return
6196 value) live across basic blocks, if not optimizing. */
6197 || (!optimize && GET_CODE (target) == REG
6198 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6199 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6200
6201 if (target)
6202 emit_clr_insn (target);
6203
6204 op1 = gen_label_rtx ();
6205 jumpifnot (exp, op1);
6206
6207 if (target)
6208 emit_0_to_1_insn (target);
6209
6210 emit_label (op1);
6211 return ignore ? const0_rtx : target;
6212
6213 case TRUTH_NOT_EXPR:
6214 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6215 /* The parser is careful to generate TRUTH_NOT_EXPR
6216 only with operands that are always zero or one. */
6217 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6218 target, 1, OPTAB_LIB_WIDEN);
6219 if (temp == 0)
6220 abort ();
6221 return temp;
6222
6223 case COMPOUND_EXPR:
6224 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6225 emit_queue ();
6226 return expand_expr (TREE_OPERAND (exp, 1),
6227 (ignore ? const0_rtx : target),
6228 VOIDmode, 0);
6229
6230 case COND_EXPR:
6231 {
6232 rtx flag = NULL_RTX;
6233 tree left_cleanups = NULL_TREE;
6234 tree right_cleanups = NULL_TREE;
6235
6236 /* Used to save a pointer to the place to put the setting of
6237 the flag that indicates if this side of the conditional was
6238 taken. We backpatch the code, if we find out later that we
6239 have any conditional cleanups that need to be performed. */
6240 rtx dest_right_flag = NULL_RTX;
6241 rtx dest_left_flag = NULL_RTX;
6242
6243 /* Note that COND_EXPRs whose type is a structure or union
6244 are required to be constructed to contain assignments of
6245 a temporary variable, so that we can evaluate them here
6246 for side effect only. If type is void, we must do likewise. */
6247
6248 /* If an arm of the branch requires a cleanup,
6249 only that cleanup is performed. */
6250
6251 tree singleton = 0;
6252 tree binary_op = 0, unary_op = 0;
6253 tree old_cleanups = cleanups_this_call;
6254
6255 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6256 convert it to our mode, if necessary. */
6257 if (integer_onep (TREE_OPERAND (exp, 1))
6258 && integer_zerop (TREE_OPERAND (exp, 2))
6259 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6260 {
6261 if (ignore)
6262 {
6263 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6264 modifier);
6265 return const0_rtx;
6266 }
6267
6268 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6269 if (GET_MODE (op0) == mode)
6270 return op0;
6271
6272 if (target == 0)
6273 target = gen_reg_rtx (mode);
6274 convert_move (target, op0, unsignedp);
6275 return target;
6276 }
6277
6278 /* If we are not to produce a result, we have no target. Otherwise,
6279 if a target was specified use it; it will not be used as an
6280 intermediate target unless it is safe. If no target, use a
6281 temporary. */
6282
6283 if (ignore)
6284 temp = 0;
6285 else if (original_target
6286 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
6287 && GET_MODE (original_target) == mode
6288 && ! (GET_CODE (original_target) == MEM
6289 && MEM_VOLATILE_P (original_target)))
6290 temp = original_target;
6291 else
6292 temp = assign_temp (type, 0, 0, 1);
6293
6294 /* Check for X ? A + B : A. If we have this, we can copy
6295 A to the output and conditionally add B. Similarly for unary
6296 operations. Don't do this if X has side-effects because
6297 those side effects might affect A or B and the "?" operation is
6298 a sequence point in ANSI. (We test for side effects later.) */
6299
6300 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6301 && operand_equal_p (TREE_OPERAND (exp, 2),
6302 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6303 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6304 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6305 && operand_equal_p (TREE_OPERAND (exp, 1),
6306 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6307 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6308 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6309 && operand_equal_p (TREE_OPERAND (exp, 2),
6310 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6311 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6312 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6313 && operand_equal_p (TREE_OPERAND (exp, 1),
6314 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6315 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6316
6317 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6318 operation, do this as A + (X != 0). Similarly for other simple
6319 binary operators. */
6320 if (temp && singleton && binary_op
6321 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6322 && (TREE_CODE (binary_op) == PLUS_EXPR
6323 || TREE_CODE (binary_op) == MINUS_EXPR
6324 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6325 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6326 && integer_onep (TREE_OPERAND (binary_op, 1))
6327 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6328 {
6329 rtx result;
6330 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6331 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6332 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6333 : xor_optab);
6334
6335 /* If we had X ? A : A + 1, do this as A + (X == 0).
6336
6337 We have to invert the truth value here and then put it
6338 back later if do_store_flag fails. We cannot simply copy
6339 TREE_OPERAND (exp, 0) to another variable and modify that
6340 because invert_truthvalue can modify the tree pointed to
6341 by its argument. */
6342 if (singleton == TREE_OPERAND (exp, 1))
6343 TREE_OPERAND (exp, 0)
6344 = invert_truthvalue (TREE_OPERAND (exp, 0));
6345
6346 result = do_store_flag (TREE_OPERAND (exp, 0),
6347 (safe_from_p (temp, singleton)
6348 ? temp : NULL_RTX),
6349 mode, BRANCH_COST <= 1);
6350
6351 if (result)
6352 {
6353 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6354 return expand_binop (mode, boptab, op1, result, temp,
6355 unsignedp, OPTAB_LIB_WIDEN);
6356 }
6357 else if (singleton == TREE_OPERAND (exp, 1))
6358 TREE_OPERAND (exp, 0)
6359 = invert_truthvalue (TREE_OPERAND (exp, 0));
6360 }
6361
6362 do_pending_stack_adjust ();
6363 NO_DEFER_POP;
6364 op0 = gen_label_rtx ();
6365
6366 flag = gen_reg_rtx (word_mode);
6367 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6368 {
6369 if (temp != 0)
6370 {
6371 /* If the target conflicts with the other operand of the
6372 binary op, we can't use it. Also, we can't use the target
6373 if it is a hard register, because evaluating the condition
6374 might clobber it. */
6375 if ((binary_op
6376 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6377 || (GET_CODE (temp) == REG
6378 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6379 temp = gen_reg_rtx (mode);
6380 store_expr (singleton, temp, 0);
6381 }
6382 else
6383 expand_expr (singleton,
6384 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6385 dest_left_flag = get_last_insn ();
6386 if (singleton == TREE_OPERAND (exp, 1))
6387 jumpif (TREE_OPERAND (exp, 0), op0);
6388 else
6389 jumpifnot (TREE_OPERAND (exp, 0), op0);
6390
6391 /* Allows cleanups up to here. */
6392 old_cleanups = cleanups_this_call;
6393 if (binary_op && temp == 0)
6394 /* Just touch the other operand. */
6395 expand_expr (TREE_OPERAND (binary_op, 1),
6396 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6397 else if (binary_op)
6398 store_expr (build (TREE_CODE (binary_op), type,
6399 make_tree (type, temp),
6400 TREE_OPERAND (binary_op, 1)),
6401 temp, 0);
6402 else
6403 store_expr (build1 (TREE_CODE (unary_op), type,
6404 make_tree (type, temp)),
6405 temp, 0);
6406 op1 = op0;
6407 dest_right_flag = get_last_insn ();
6408 }
6409 #if 0
6410 /* This is now done in jump.c and is better done there because it
6411 produces shorter register lifetimes. */
6412
6413 /* Check for both possibilities either constants or variables
6414 in registers (but not the same as the target!). If so, can
6415 save branches by assigning one, branching, and assigning the
6416 other. */
6417 else if (temp && GET_MODE (temp) != BLKmode
6418 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6419 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6420 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6421 && DECL_RTL (TREE_OPERAND (exp, 1))
6422 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6423 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6424 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6425 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6426 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6427 && DECL_RTL (TREE_OPERAND (exp, 2))
6428 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6429 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6430 {
6431 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6432 temp = gen_reg_rtx (mode);
6433 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6434 dest_left_flag = get_last_insn ();
6435 jumpifnot (TREE_OPERAND (exp, 0), op0);
6436
6437 /* Allows cleanups up to here. */
6438 old_cleanups = cleanups_this_call;
6439 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6440 op1 = op0;
6441 dest_right_flag = get_last_insn ();
6442 }
6443 #endif
6444 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6445 comparison operator. If we have one of these cases, set the
6446 output to A, branch on A (cse will merge these two references),
6447 then set the output to FOO. */
6448 else if (temp
6449 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6450 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6451 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6452 TREE_OPERAND (exp, 1), 0)
6453 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6454 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6455 {
6456 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6457 temp = gen_reg_rtx (mode);
6458 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6459 dest_left_flag = get_last_insn ();
6460 jumpif (TREE_OPERAND (exp, 0), op0);
6461
6462 /* Allows cleanups up to here. */
6463 old_cleanups = cleanups_this_call;
6464 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6465 op1 = op0;
6466 dest_right_flag = get_last_insn ();
6467 }
6468 else if (temp
6469 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6470 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6471 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6472 TREE_OPERAND (exp, 2), 0)
6473 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6474 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6475 {
6476 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6477 temp = gen_reg_rtx (mode);
6478 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6479 dest_left_flag = get_last_insn ();
6480 jumpifnot (TREE_OPERAND (exp, 0), op0);
6481
6482 /* Allows cleanups up to here. */
6483 old_cleanups = cleanups_this_call;
6484 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6485 op1 = op0;
6486 dest_right_flag = get_last_insn ();
6487 }
6488 else
6489 {
6490 op1 = gen_label_rtx ();
6491 jumpifnot (TREE_OPERAND (exp, 0), op0);
6492
6493 /* Allows cleanups up to here. */
6494 old_cleanups = cleanups_this_call;
6495 if (temp != 0)
6496 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6497 else
6498 expand_expr (TREE_OPERAND (exp, 1),
6499 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6500 dest_left_flag = get_last_insn ();
6501
6502 /* Handle conditional cleanups, if any. */
6503 left_cleanups = defer_cleanups_to (old_cleanups);
6504
6505 emit_queue ();
6506 emit_jump_insn (gen_jump (op1));
6507 emit_barrier ();
6508 emit_label (op0);
6509 if (temp != 0)
6510 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6511 else
6512 expand_expr (TREE_OPERAND (exp, 2),
6513 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6514 dest_right_flag = get_last_insn ();
6515 }
6516
6517 /* Handle conditional cleanups, if any. */
6518 right_cleanups = defer_cleanups_to (old_cleanups);
6519
6520 emit_queue ();
6521 emit_label (op1);
6522 OK_DEFER_POP;
6523
6524 /* Add back in, any conditional cleanups. */
6525 if (left_cleanups || right_cleanups)
6526 {
6527 tree new_cleanups;
6528 tree cond;
6529 rtx last;
6530
6531 /* Now that we know that a flag is needed, go back and add in the
6532 setting of the flag. */
6533
6534 /* Do the left side flag. */
6535 last = get_last_insn ();
6536 /* Flag left cleanups as needed. */
6537 emit_move_insn (flag, const1_rtx);
6538 /* ??? deprecated, use sequences instead. */
6539 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6540
6541 /* Do the right side flag. */
6542 last = get_last_insn ();
6543 /* Flag left cleanups as needed. */
6544 emit_move_insn (flag, const0_rtx);
6545 /* ??? deprecated, use sequences instead. */
6546 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6547
6548 /* All cleanups must be on the function_obstack. */
6549 push_obstacks_nochange ();
6550 resume_temporary_allocation ();
6551
6552 /* convert flag, which is an rtx, into a tree. */
6553 cond = make_node (RTL_EXPR);
6554 TREE_TYPE (cond) = integer_type_node;
6555 RTL_EXPR_RTL (cond) = flag;
6556 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6557 cond = save_expr (cond);
6558
6559 if (! left_cleanups)
6560 left_cleanups = integer_zero_node;
6561 if (! right_cleanups)
6562 right_cleanups = integer_zero_node;
6563 new_cleanups = build (COND_EXPR, void_type_node,
6564 truthvalue_conversion (cond),
6565 left_cleanups, right_cleanups);
6566 new_cleanups = fold (new_cleanups);
6567
6568 pop_obstacks ();
6569
6570 /* Now add in the conditionalized cleanups. */
6571 cleanups_this_call
6572 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6573 (*interim_eh_hook) (NULL_TREE);
6574 }
6575 return temp;
6576 }
6577
6578 case TARGET_EXPR:
6579 {
6580 int need_exception_region = 0;
6581 /* Something needs to be initialized, but we didn't know
6582 where that thing was when building the tree. For example,
6583 it could be the return value of a function, or a parameter
6584 to a function which lays down in the stack, or a temporary
6585 variable which must be passed by reference.
6586
6587 We guarantee that the expression will either be constructed
6588 or copied into our original target. */
6589
6590 tree slot = TREE_OPERAND (exp, 0);
6591 tree exp1;
6592 rtx temp;
6593
6594 if (TREE_CODE (slot) != VAR_DECL)
6595 abort ();
6596
6597 if (! ignore)
6598 target = original_target;
6599
6600 if (target == 0)
6601 {
6602 if (DECL_RTL (slot) != 0)
6603 {
6604 target = DECL_RTL (slot);
6605 /* If we have already expanded the slot, so don't do
6606 it again. (mrs) */
6607 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6608 return target;
6609 }
6610 else
6611 {
6612 target = assign_temp (type, 2, 1, 1);
6613 /* All temp slots at this level must not conflict. */
6614 preserve_temp_slots (target);
6615 DECL_RTL (slot) = target;
6616
6617 /* Since SLOT is not known to the called function
6618 to belong to its stack frame, we must build an explicit
6619 cleanup. This case occurs when we must build up a reference
6620 to pass the reference as an argument. In this case,
6621 it is very likely that such a reference need not be
6622 built here. */
6623
6624 if (TREE_OPERAND (exp, 2) == 0)
6625 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6626 if (TREE_OPERAND (exp, 2))
6627 {
6628 cleanups_this_call = tree_cons (NULL_TREE,
6629 TREE_OPERAND (exp, 2),
6630 cleanups_this_call);
6631 need_exception_region = 1;
6632 }
6633 }
6634 }
6635 else
6636 {
6637 /* This case does occur, when expanding a parameter which
6638 needs to be constructed on the stack. The target
6639 is the actual stack address that we want to initialize.
6640 The function we call will perform the cleanup in this case. */
6641
6642 /* If we have already assigned it space, use that space,
6643 not target that we were passed in, as our target
6644 parameter is only a hint. */
6645 if (DECL_RTL (slot) != 0)
6646 {
6647 target = DECL_RTL (slot);
6648 /* If we have already expanded the slot, so don't do
6649 it again. (mrs) */
6650 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6651 return target;
6652 }
6653
6654 DECL_RTL (slot) = target;
6655 }
6656
6657 exp1 = TREE_OPERAND (exp, 1);
6658 /* Mark it as expanded. */
6659 TREE_OPERAND (exp, 1) = NULL_TREE;
6660
6661 store_expr (exp1, target, 0);
6662
6663 if (need_exception_region)
6664 (*interim_eh_hook) (NULL_TREE);
6665
6666 return target;
6667 }
6668
6669 case INIT_EXPR:
6670 {
6671 tree lhs = TREE_OPERAND (exp, 0);
6672 tree rhs = TREE_OPERAND (exp, 1);
6673 tree noncopied_parts = 0;
6674 tree lhs_type = TREE_TYPE (lhs);
6675
6676 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6677 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6678 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6679 TYPE_NONCOPIED_PARTS (lhs_type));
6680 while (noncopied_parts != 0)
6681 {
6682 expand_assignment (TREE_VALUE (noncopied_parts),
6683 TREE_PURPOSE (noncopied_parts), 0, 0);
6684 noncopied_parts = TREE_CHAIN (noncopied_parts);
6685 }
6686 return temp;
6687 }
6688
6689 case MODIFY_EXPR:
6690 {
6691 /* If lhs is complex, expand calls in rhs before computing it.
6692 That's so we don't compute a pointer and save it over a call.
6693 If lhs is simple, compute it first so we can give it as a
6694 target if the rhs is just a call. This avoids an extra temp and copy
6695 and that prevents a partial-subsumption which makes bad code.
6696 Actually we could treat component_ref's of vars like vars. */
6697
6698 tree lhs = TREE_OPERAND (exp, 0);
6699 tree rhs = TREE_OPERAND (exp, 1);
6700 tree noncopied_parts = 0;
6701 tree lhs_type = TREE_TYPE (lhs);
6702
6703 temp = 0;
6704
6705 if (TREE_CODE (lhs) != VAR_DECL
6706 && TREE_CODE (lhs) != RESULT_DECL
6707 && TREE_CODE (lhs) != PARM_DECL)
6708 preexpand_calls (exp);
6709
6710 /* Check for |= or &= of a bitfield of size one into another bitfield
6711 of size 1. In this case, (unless we need the result of the
6712 assignment) we can do this more efficiently with a
6713 test followed by an assignment, if necessary.
6714
6715 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6716 things change so we do, this code should be enhanced to
6717 support it. */
6718 if (ignore
6719 && TREE_CODE (lhs) == COMPONENT_REF
6720 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6721 || TREE_CODE (rhs) == BIT_AND_EXPR)
6722 && TREE_OPERAND (rhs, 0) == lhs
6723 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6724 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6725 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6726 {
6727 rtx label = gen_label_rtx ();
6728
6729 do_jump (TREE_OPERAND (rhs, 1),
6730 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6731 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6732 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6733 (TREE_CODE (rhs) == BIT_IOR_EXPR
6734 ? integer_one_node
6735 : integer_zero_node)),
6736 0, 0);
6737 do_pending_stack_adjust ();
6738 emit_label (label);
6739 return const0_rtx;
6740 }
6741
6742 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6743 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6744 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6745 TYPE_NONCOPIED_PARTS (lhs_type));
6746
6747 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6748 while (noncopied_parts != 0)
6749 {
6750 expand_assignment (TREE_PURPOSE (noncopied_parts),
6751 TREE_VALUE (noncopied_parts), 0, 0);
6752 noncopied_parts = TREE_CHAIN (noncopied_parts);
6753 }
6754 return temp;
6755 }
6756
6757 case PREINCREMENT_EXPR:
6758 case PREDECREMENT_EXPR:
6759 return expand_increment (exp, 0);
6760
6761 case POSTINCREMENT_EXPR:
6762 case POSTDECREMENT_EXPR:
6763 /* Faster to treat as pre-increment if result is not used. */
6764 return expand_increment (exp, ! ignore);
6765
6766 case ADDR_EXPR:
6767 /* If nonzero, TEMP will be set to the address of something that might
6768 be a MEM corresponding to a stack slot. */
6769 temp = 0;
6770
6771 /* Are we taking the address of a nested function? */
6772 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6773 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6774 {
6775 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6776 op0 = force_operand (op0, target);
6777 }
6778 /* If we are taking the address of something erroneous, just
6779 return a zero. */
6780 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6781 return const0_rtx;
6782 else
6783 {
6784 /* We make sure to pass const0_rtx down if we came in with
6785 ignore set, to avoid doing the cleanups twice for something. */
6786 op0 = expand_expr (TREE_OPERAND (exp, 0),
6787 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6788 (modifier == EXPAND_INITIALIZER
6789 ? modifier : EXPAND_CONST_ADDRESS));
6790
6791 /* If we are going to ignore the result, OP0 will have been set
6792 to const0_rtx, so just return it. Don't get confused and
6793 think we are taking the address of the constant. */
6794 if (ignore)
6795 return op0;
6796
6797 op0 = protect_from_queue (op0, 0);
6798
6799 /* We would like the object in memory. If it is a constant,
6800 we can have it be statically allocated into memory. For
6801 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6802 memory and store the value into it. */
6803
6804 if (CONSTANT_P (op0))
6805 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6806 op0);
6807 else if (GET_CODE (op0) == MEM)
6808 {
6809 mark_temp_addr_taken (op0);
6810 temp = XEXP (op0, 0);
6811 }
6812
6813 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6814 || GET_CODE (op0) == CONCAT)
6815 {
6816 /* If this object is in a register, it must be not
6817 be BLKmode. */
6818 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6819 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6820
6821 mark_temp_addr_taken (memloc);
6822 emit_move_insn (memloc, op0);
6823 op0 = memloc;
6824 }
6825
6826 if (GET_CODE (op0) != MEM)
6827 abort ();
6828
6829 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6830 {
6831 temp = XEXP (op0, 0);
6832 #ifdef POINTERS_EXTEND_UNSIGNED
6833 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6834 && mode == ptr_mode)
6835 temp = convert_memory_address (ptr_mode, temp);
6836 #endif
6837 return temp;
6838 }
6839
6840 op0 = force_operand (XEXP (op0, 0), target);
6841 }
6842
6843 if (flag_force_addr && GET_CODE (op0) != REG)
6844 op0 = force_reg (Pmode, op0);
6845
6846 if (GET_CODE (op0) == REG
6847 && ! REG_USERVAR_P (op0))
6848 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
6849
6850 /* If we might have had a temp slot, add an equivalent address
6851 for it. */
6852 if (temp != 0)
6853 update_temp_slot_address (temp, op0);
6854
6855 #ifdef POINTERS_EXTEND_UNSIGNED
6856 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6857 && mode == ptr_mode)
6858 op0 = convert_memory_address (ptr_mode, op0);
6859 #endif
6860
6861 return op0;
6862
6863 case ENTRY_VALUE_EXPR:
6864 abort ();
6865
6866 /* COMPLEX type for Extended Pascal & Fortran */
6867 case COMPLEX_EXPR:
6868 {
6869 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6870 rtx insns;
6871
6872 /* Get the rtx code of the operands. */
6873 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6874 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6875
6876 if (! target)
6877 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6878
6879 start_sequence ();
6880
6881 /* Move the real (op0) and imaginary (op1) parts to their location. */
6882 emit_move_insn (gen_realpart (mode, target), op0);
6883 emit_move_insn (gen_imagpart (mode, target), op1);
6884
6885 insns = get_insns ();
6886 end_sequence ();
6887
6888 /* Complex construction should appear as a single unit. */
6889 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6890 each with a separate pseudo as destination.
6891 It's not correct for flow to treat them as a unit. */
6892 if (GET_CODE (target) != CONCAT)
6893 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6894 else
6895 emit_insns (insns);
6896
6897 return target;
6898 }
6899
6900 case REALPART_EXPR:
6901 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6902 return gen_realpart (mode, op0);
6903
6904 case IMAGPART_EXPR:
6905 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6906 return gen_imagpart (mode, op0);
6907
6908 case CONJ_EXPR:
6909 {
6910 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6911 rtx imag_t;
6912 rtx insns;
6913
6914 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6915
6916 if (! target)
6917 target = gen_reg_rtx (mode);
6918
6919 start_sequence ();
6920
6921 /* Store the realpart and the negated imagpart to target. */
6922 emit_move_insn (gen_realpart (partmode, target),
6923 gen_realpart (partmode, op0));
6924
6925 imag_t = gen_imagpart (partmode, target);
6926 temp = expand_unop (partmode, neg_optab,
6927 gen_imagpart (partmode, op0), imag_t, 0);
6928 if (temp != imag_t)
6929 emit_move_insn (imag_t, temp);
6930
6931 insns = get_insns ();
6932 end_sequence ();
6933
6934 /* Conjugate should appear as a single unit
6935 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6936 each with a separate pseudo as destination.
6937 It's not correct for flow to treat them as a unit. */
6938 if (GET_CODE (target) != CONCAT)
6939 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6940 else
6941 emit_insns (insns);
6942
6943 return target;
6944 }
6945
6946 case ERROR_MARK:
6947 op0 = CONST0_RTX (tmode);
6948 if (op0 != 0)
6949 return op0;
6950 return const0_rtx;
6951
6952 default:
6953 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6954 }
6955
6956 /* Here to do an ordinary binary operator, generating an instruction
6957 from the optab already placed in `this_optab'. */
6958 binop:
6959 preexpand_calls (exp);
6960 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6961 subtarget = 0;
6962 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6963 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6964 binop2:
6965 temp = expand_binop (mode, this_optab, op0, op1, target,
6966 unsignedp, OPTAB_LIB_WIDEN);
6967 if (temp == 0)
6968 abort ();
6969 return temp;
6970 }
6971
6972
6973 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6974 void
6975 bc_expand_expr (exp)
6976 tree exp;
6977 {
6978 enum tree_code code;
6979 tree type, arg0;
6980 rtx r;
6981 struct binary_operator *binoptab;
6982 struct unary_operator *unoptab;
6983 struct increment_operator *incroptab;
6984 struct bc_label *lab, *lab1;
6985 enum bytecode_opcode opcode;
6986
6987
6988 code = TREE_CODE (exp);
6989
6990 switch (code)
6991 {
6992 case PARM_DECL:
6993
6994 if (DECL_RTL (exp) == 0)
6995 {
6996 error_with_decl (exp, "prior parameter's size depends on `%s'");
6997 return;
6998 }
6999
7000 bc_load_parmaddr (DECL_RTL (exp));
7001 bc_load_memory (TREE_TYPE (exp), exp);
7002
7003 return;
7004
7005 case VAR_DECL:
7006
7007 if (DECL_RTL (exp) == 0)
7008 abort ();
7009
7010 #if 0
7011 if (BYTECODE_LABEL (DECL_RTL (exp)))
7012 bc_load_externaddr (DECL_RTL (exp));
7013 else
7014 bc_load_localaddr (DECL_RTL (exp));
7015 #endif
7016 if (TREE_PUBLIC (exp))
7017 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7018 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7019 else
7020 bc_load_localaddr (DECL_RTL (exp));
7021
7022 bc_load_memory (TREE_TYPE (exp), exp);
7023 return;
7024
7025 case INTEGER_CST:
7026
7027 #ifdef DEBUG_PRINT_CODE
7028 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7029 #endif
7030 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7031 ? SImode
7032 : TYPE_MODE (TREE_TYPE (exp)))],
7033 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7034 return;
7035
7036 case REAL_CST:
7037
7038 #if 0
7039 #ifdef DEBUG_PRINT_CODE
7040 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7041 #endif
7042 /* FIX THIS: find a better way to pass real_cst's. -bson */
7043 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7044 (double) TREE_REAL_CST (exp));
7045 #else
7046 abort ();
7047 #endif
7048
7049 return;
7050
7051 case CALL_EXPR:
7052
7053 /* We build a call description vector describing the type of
7054 the return value and of the arguments; this call vector,
7055 together with a pointer to a location for the return value
7056 and the base of the argument list, is passed to the low
7057 level machine dependent call subroutine, which is responsible
7058 for putting the arguments wherever real functions expect
7059 them, as well as getting the return value back. */
7060 {
7061 tree calldesc = 0, arg;
7062 int nargs = 0, i;
7063 rtx retval;
7064
7065 /* Push the evaluated args on the evaluation stack in reverse
7066 order. Also make an entry for each arg in the calldesc
7067 vector while we're at it. */
7068
7069 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7070
7071 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7072 {
7073 ++nargs;
7074 bc_expand_expr (TREE_VALUE (arg));
7075
7076 calldesc = tree_cons ((tree) 0,
7077 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7078 calldesc);
7079 calldesc = tree_cons ((tree) 0,
7080 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7081 calldesc);
7082 }
7083
7084 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7085
7086 /* Allocate a location for the return value and push its
7087 address on the evaluation stack. Also make an entry
7088 at the front of the calldesc for the return value type. */
7089
7090 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7091 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7092 bc_load_localaddr (retval);
7093
7094 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7095 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7096
7097 /* Prepend the argument count. */
7098 calldesc = tree_cons ((tree) 0,
7099 build_int_2 (nargs, 0),
7100 calldesc);
7101
7102 /* Push the address of the call description vector on the stack. */
7103 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7104 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7105 build_index_type (build_int_2 (nargs * 2, 0)));
7106 r = output_constant_def (calldesc);
7107 bc_load_externaddr (r);
7108
7109 /* Push the address of the function to be called. */
7110 bc_expand_expr (TREE_OPERAND (exp, 0));
7111
7112 /* Call the function, popping its address and the calldesc vector
7113 address off the evaluation stack in the process. */
7114 bc_emit_instruction (call);
7115
7116 /* Pop the arguments off the stack. */
7117 bc_adjust_stack (nargs);
7118
7119 /* Load the return value onto the stack. */
7120 bc_load_localaddr (retval);
7121 bc_load_memory (type, TREE_OPERAND (exp, 0));
7122 }
7123 return;
7124
7125 case SAVE_EXPR:
7126
7127 if (!SAVE_EXPR_RTL (exp))
7128 {
7129 /* First time around: copy to local variable */
7130 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7131 TYPE_ALIGN (TREE_TYPE(exp)));
7132 bc_expand_expr (TREE_OPERAND (exp, 0));
7133 bc_emit_instruction (duplicate);
7134
7135 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7136 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7137 }
7138 else
7139 {
7140 /* Consecutive reference: use saved copy */
7141 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7142 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7143 }
7144 return;
7145
7146 #if 0
7147 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7148 how are they handled instead? */
7149 case LET_STMT:
7150
7151 TREE_USED (exp) = 1;
7152 bc_expand_expr (STMT_BODY (exp));
7153 return;
7154 #endif
7155
7156 case NOP_EXPR:
7157 case CONVERT_EXPR:
7158
7159 bc_expand_expr (TREE_OPERAND (exp, 0));
7160 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7161 return;
7162
7163 case MODIFY_EXPR:
7164
7165 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7166 return;
7167
7168 case ADDR_EXPR:
7169
7170 bc_expand_address (TREE_OPERAND (exp, 0));
7171 return;
7172
7173 case INDIRECT_REF:
7174
7175 bc_expand_expr (TREE_OPERAND (exp, 0));
7176 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7177 return;
7178
7179 case ARRAY_REF:
7180
7181 bc_expand_expr (bc_canonicalize_array_ref (exp));
7182 return;
7183
7184 case COMPONENT_REF:
7185
7186 bc_expand_component_address (exp);
7187
7188 /* If we have a bitfield, generate a proper load */
7189 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7190 return;
7191
7192 case COMPOUND_EXPR:
7193
7194 bc_expand_expr (TREE_OPERAND (exp, 0));
7195 bc_emit_instruction (drop);
7196 bc_expand_expr (TREE_OPERAND (exp, 1));
7197 return;
7198
7199 case COND_EXPR:
7200
7201 bc_expand_expr (TREE_OPERAND (exp, 0));
7202 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7203 lab = bc_get_bytecode_label ();
7204 bc_emit_bytecode (xjumpifnot);
7205 bc_emit_bytecode_labelref (lab);
7206
7207 #ifdef DEBUG_PRINT_CODE
7208 fputc ('\n', stderr);
7209 #endif
7210 bc_expand_expr (TREE_OPERAND (exp, 1));
7211 lab1 = bc_get_bytecode_label ();
7212 bc_emit_bytecode (jump);
7213 bc_emit_bytecode_labelref (lab1);
7214
7215 #ifdef DEBUG_PRINT_CODE
7216 fputc ('\n', stderr);
7217 #endif
7218
7219 bc_emit_bytecode_labeldef (lab);
7220 bc_expand_expr (TREE_OPERAND (exp, 2));
7221 bc_emit_bytecode_labeldef (lab1);
7222 return;
7223
7224 case TRUTH_ANDIF_EXPR:
7225
7226 opcode = xjumpifnot;
7227 goto andorif;
7228
7229 case TRUTH_ORIF_EXPR:
7230
7231 opcode = xjumpif;
7232 goto andorif;
7233
7234 case PLUS_EXPR:
7235
7236 binoptab = optab_plus_expr;
7237 goto binop;
7238
7239 case MINUS_EXPR:
7240
7241 binoptab = optab_minus_expr;
7242 goto binop;
7243
7244 case MULT_EXPR:
7245
7246 binoptab = optab_mult_expr;
7247 goto binop;
7248
7249 case TRUNC_DIV_EXPR:
7250 case FLOOR_DIV_EXPR:
7251 case CEIL_DIV_EXPR:
7252 case ROUND_DIV_EXPR:
7253 case EXACT_DIV_EXPR:
7254
7255 binoptab = optab_trunc_div_expr;
7256 goto binop;
7257
7258 case TRUNC_MOD_EXPR:
7259 case FLOOR_MOD_EXPR:
7260 case CEIL_MOD_EXPR:
7261 case ROUND_MOD_EXPR:
7262
7263 binoptab = optab_trunc_mod_expr;
7264 goto binop;
7265
7266 case FIX_ROUND_EXPR:
7267 case FIX_FLOOR_EXPR:
7268 case FIX_CEIL_EXPR:
7269 abort (); /* Not used for C. */
7270
7271 case FIX_TRUNC_EXPR:
7272 case FLOAT_EXPR:
7273 case MAX_EXPR:
7274 case MIN_EXPR:
7275 case FFS_EXPR:
7276 case LROTATE_EXPR:
7277 case RROTATE_EXPR:
7278 abort (); /* FIXME */
7279
7280 case RDIV_EXPR:
7281
7282 binoptab = optab_rdiv_expr;
7283 goto binop;
7284
7285 case BIT_AND_EXPR:
7286
7287 binoptab = optab_bit_and_expr;
7288 goto binop;
7289
7290 case BIT_IOR_EXPR:
7291
7292 binoptab = optab_bit_ior_expr;
7293 goto binop;
7294
7295 case BIT_XOR_EXPR:
7296
7297 binoptab = optab_bit_xor_expr;
7298 goto binop;
7299
7300 case LSHIFT_EXPR:
7301
7302 binoptab = optab_lshift_expr;
7303 goto binop;
7304
7305 case RSHIFT_EXPR:
7306
7307 binoptab = optab_rshift_expr;
7308 goto binop;
7309
7310 case TRUTH_AND_EXPR:
7311
7312 binoptab = optab_truth_and_expr;
7313 goto binop;
7314
7315 case TRUTH_OR_EXPR:
7316
7317 binoptab = optab_truth_or_expr;
7318 goto binop;
7319
7320 case LT_EXPR:
7321
7322 binoptab = optab_lt_expr;
7323 goto binop;
7324
7325 case LE_EXPR:
7326
7327 binoptab = optab_le_expr;
7328 goto binop;
7329
7330 case GE_EXPR:
7331
7332 binoptab = optab_ge_expr;
7333 goto binop;
7334
7335 case GT_EXPR:
7336
7337 binoptab = optab_gt_expr;
7338 goto binop;
7339
7340 case EQ_EXPR:
7341
7342 binoptab = optab_eq_expr;
7343 goto binop;
7344
7345 case NE_EXPR:
7346
7347 binoptab = optab_ne_expr;
7348 goto binop;
7349
7350 case NEGATE_EXPR:
7351
7352 unoptab = optab_negate_expr;
7353 goto unop;
7354
7355 case BIT_NOT_EXPR:
7356
7357 unoptab = optab_bit_not_expr;
7358 goto unop;
7359
7360 case TRUTH_NOT_EXPR:
7361
7362 unoptab = optab_truth_not_expr;
7363 goto unop;
7364
7365 case PREDECREMENT_EXPR:
7366
7367 incroptab = optab_predecrement_expr;
7368 goto increment;
7369
7370 case PREINCREMENT_EXPR:
7371
7372 incroptab = optab_preincrement_expr;
7373 goto increment;
7374
7375 case POSTDECREMENT_EXPR:
7376
7377 incroptab = optab_postdecrement_expr;
7378 goto increment;
7379
7380 case POSTINCREMENT_EXPR:
7381
7382 incroptab = optab_postincrement_expr;
7383 goto increment;
7384
7385 case CONSTRUCTOR:
7386
7387 bc_expand_constructor (exp);
7388 return;
7389
7390 case ERROR_MARK:
7391 case RTL_EXPR:
7392
7393 return;
7394
7395 case BIND_EXPR:
7396 {
7397 tree vars = TREE_OPERAND (exp, 0);
7398 int vars_need_expansion = 0;
7399
7400 /* Need to open a binding contour here because
7401 if there are any cleanups they most be contained here. */
7402 expand_start_bindings (0);
7403
7404 /* Mark the corresponding BLOCK for output. */
7405 if (TREE_OPERAND (exp, 2) != 0)
7406 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7407
7408 /* If VARS have not yet been expanded, expand them now. */
7409 while (vars)
7410 {
7411 if (DECL_RTL (vars) == 0)
7412 {
7413 vars_need_expansion = 1;
7414 expand_decl (vars);
7415 }
7416 expand_decl_init (vars);
7417 vars = TREE_CHAIN (vars);
7418 }
7419
7420 bc_expand_expr (TREE_OPERAND (exp, 1));
7421
7422 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7423
7424 return;
7425 }
7426 }
7427
7428 abort ();
7429
7430 binop:
7431
7432 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7433 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7434 return;
7435
7436
7437 unop:
7438
7439 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7440 return;
7441
7442
7443 andorif:
7444
7445 bc_expand_expr (TREE_OPERAND (exp, 0));
7446 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7447 lab = bc_get_bytecode_label ();
7448
7449 bc_emit_instruction (duplicate);
7450 bc_emit_bytecode (opcode);
7451 bc_emit_bytecode_labelref (lab);
7452
7453 #ifdef DEBUG_PRINT_CODE
7454 fputc ('\n', stderr);
7455 #endif
7456
7457 bc_emit_instruction (drop);
7458
7459 bc_expand_expr (TREE_OPERAND (exp, 1));
7460 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7461 bc_emit_bytecode_labeldef (lab);
7462 return;
7463
7464
7465 increment:
7466
7467 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7468
7469 /* Push the quantum. */
7470 bc_expand_expr (TREE_OPERAND (exp, 1));
7471
7472 /* Convert it to the lvalue's type. */
7473 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7474
7475 /* Push the address of the lvalue */
7476 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7477
7478 /* Perform actual increment */
7479 bc_expand_increment (incroptab, type);
7480 return;
7481 }
7482 \f
7483 /* Return the alignment in bits of EXP, a pointer valued expression.
7484 But don't return more than MAX_ALIGN no matter what.
7485 The alignment returned is, by default, the alignment of the thing that
7486 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7487
7488 Otherwise, look at the expression to see if we can do better, i.e., if the
7489 expression is actually pointing at an object whose alignment is tighter. */
7490
7491 static int
7492 get_pointer_alignment (exp, max_align)
7493 tree exp;
7494 unsigned max_align;
7495 {
7496 unsigned align, inner;
7497
7498 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7499 return 0;
7500
7501 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7502 align = MIN (align, max_align);
7503
7504 while (1)
7505 {
7506 switch (TREE_CODE (exp))
7507 {
7508 case NOP_EXPR:
7509 case CONVERT_EXPR:
7510 case NON_LVALUE_EXPR:
7511 exp = TREE_OPERAND (exp, 0);
7512 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7513 return align;
7514 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7515 align = MIN (inner, max_align);
7516 break;
7517
7518 case PLUS_EXPR:
7519 /* If sum of pointer + int, restrict our maximum alignment to that
7520 imposed by the integer. If not, we can't do any better than
7521 ALIGN. */
7522 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7523 return align;
7524
7525 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7526 & (max_align - 1))
7527 != 0)
7528 max_align >>= 1;
7529
7530 exp = TREE_OPERAND (exp, 0);
7531 break;
7532
7533 case ADDR_EXPR:
7534 /* See what we are pointing at and look at its alignment. */
7535 exp = TREE_OPERAND (exp, 0);
7536 if (TREE_CODE (exp) == FUNCTION_DECL)
7537 align = FUNCTION_BOUNDARY;
7538 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7539 align = DECL_ALIGN (exp);
7540 #ifdef CONSTANT_ALIGNMENT
7541 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7542 align = CONSTANT_ALIGNMENT (exp, align);
7543 #endif
7544 return MIN (align, max_align);
7545
7546 default:
7547 return align;
7548 }
7549 }
7550 }
7551 \f
7552 /* Return the tree node and offset if a given argument corresponds to
7553 a string constant. */
7554
7555 static tree
7556 string_constant (arg, ptr_offset)
7557 tree arg;
7558 tree *ptr_offset;
7559 {
7560 STRIP_NOPS (arg);
7561
7562 if (TREE_CODE (arg) == ADDR_EXPR
7563 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7564 {
7565 *ptr_offset = integer_zero_node;
7566 return TREE_OPERAND (arg, 0);
7567 }
7568 else if (TREE_CODE (arg) == PLUS_EXPR)
7569 {
7570 tree arg0 = TREE_OPERAND (arg, 0);
7571 tree arg1 = TREE_OPERAND (arg, 1);
7572
7573 STRIP_NOPS (arg0);
7574 STRIP_NOPS (arg1);
7575
7576 if (TREE_CODE (arg0) == ADDR_EXPR
7577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7578 {
7579 *ptr_offset = arg1;
7580 return TREE_OPERAND (arg0, 0);
7581 }
7582 else if (TREE_CODE (arg1) == ADDR_EXPR
7583 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7584 {
7585 *ptr_offset = arg0;
7586 return TREE_OPERAND (arg1, 0);
7587 }
7588 }
7589
7590 return 0;
7591 }
7592
7593 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7594 way, because it could contain a zero byte in the middle.
7595 TREE_STRING_LENGTH is the size of the character array, not the string.
7596
7597 Unfortunately, string_constant can't access the values of const char
7598 arrays with initializers, so neither can we do so here. */
7599
7600 static tree
7601 c_strlen (src)
7602 tree src;
7603 {
7604 tree offset_node;
7605 int offset, max;
7606 char *ptr;
7607
7608 src = string_constant (src, &offset_node);
7609 if (src == 0)
7610 return 0;
7611 max = TREE_STRING_LENGTH (src);
7612 ptr = TREE_STRING_POINTER (src);
7613 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7614 {
7615 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7616 compute the offset to the following null if we don't know where to
7617 start searching for it. */
7618 int i;
7619 for (i = 0; i < max; i++)
7620 if (ptr[i] == 0)
7621 return 0;
7622 /* We don't know the starting offset, but we do know that the string
7623 has no internal zero bytes. We can assume that the offset falls
7624 within the bounds of the string; otherwise, the programmer deserves
7625 what he gets. Subtract the offset from the length of the string,
7626 and return that. */
7627 /* This would perhaps not be valid if we were dealing with named
7628 arrays in addition to literal string constants. */
7629 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7630 }
7631
7632 /* We have a known offset into the string. Start searching there for
7633 a null character. */
7634 if (offset_node == 0)
7635 offset = 0;
7636 else
7637 {
7638 /* Did we get a long long offset? If so, punt. */
7639 if (TREE_INT_CST_HIGH (offset_node) != 0)
7640 return 0;
7641 offset = TREE_INT_CST_LOW (offset_node);
7642 }
7643 /* If the offset is known to be out of bounds, warn, and call strlen at
7644 runtime. */
7645 if (offset < 0 || offset > max)
7646 {
7647 warning ("offset outside bounds of constant string");
7648 return 0;
7649 }
7650 /* Use strlen to search for the first zero byte. Since any strings
7651 constructed with build_string will have nulls appended, we win even
7652 if we get handed something like (char[4])"abcd".
7653
7654 Since OFFSET is our starting index into the string, no further
7655 calculation is needed. */
7656 return size_int (strlen (ptr + offset));
7657 }
7658
7659 rtx
7660 expand_builtin_return_addr (fndecl_code, count, tem)
7661 enum built_in_function fndecl_code;
7662 rtx tem;
7663 int count;
7664 {
7665 int i;
7666
7667 /* Some machines need special handling before we can access
7668 arbitrary frames. For example, on the sparc, we must first flush
7669 all register windows to the stack. */
7670 #ifdef SETUP_FRAME_ADDRESSES
7671 SETUP_FRAME_ADDRESSES ();
7672 #endif
7673
7674 /* On the sparc, the return address is not in the frame, it is in a
7675 register. There is no way to access it off of the current frame
7676 pointer, but it can be accessed off the previous frame pointer by
7677 reading the value from the register window save area. */
7678 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7679 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7680 count--;
7681 #endif
7682
7683 /* Scan back COUNT frames to the specified frame. */
7684 for (i = 0; i < count; i++)
7685 {
7686 /* Assume the dynamic chain pointer is in the word that the
7687 frame address points to, unless otherwise specified. */
7688 #ifdef DYNAMIC_CHAIN_ADDRESS
7689 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7690 #endif
7691 tem = memory_address (Pmode, tem);
7692 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7693 }
7694
7695 /* For __builtin_frame_address, return what we've got. */
7696 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7697 return tem;
7698
7699 /* For __builtin_return_address, Get the return address from that
7700 frame. */
7701 #ifdef RETURN_ADDR_RTX
7702 tem = RETURN_ADDR_RTX (count, tem);
7703 #else
7704 tem = memory_address (Pmode,
7705 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7706 tem = gen_rtx (MEM, Pmode, tem);
7707 #endif
7708 return tem;
7709 }
7710 \f
7711 /* Expand an expression EXP that calls a built-in function,
7712 with result going to TARGET if that's convenient
7713 (and in mode MODE if that's convenient).
7714 SUBTARGET may be used as the target for computing one of EXP's operands.
7715 IGNORE is nonzero if the value is to be ignored. */
7716
7717 #define CALLED_AS_BUILT_IN(NODE) \
7718 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7719
7720 static rtx
7721 expand_builtin (exp, target, subtarget, mode, ignore)
7722 tree exp;
7723 rtx target;
7724 rtx subtarget;
7725 enum machine_mode mode;
7726 int ignore;
7727 {
7728 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7729 tree arglist = TREE_OPERAND (exp, 1);
7730 rtx op0;
7731 rtx lab1, insns;
7732 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7733 optab builtin_optab;
7734
7735 switch (DECL_FUNCTION_CODE (fndecl))
7736 {
7737 case BUILT_IN_ABS:
7738 case BUILT_IN_LABS:
7739 case BUILT_IN_FABS:
7740 /* build_function_call changes these into ABS_EXPR. */
7741 abort ();
7742
7743 case BUILT_IN_SIN:
7744 case BUILT_IN_COS:
7745 /* Treat these like sqrt, but only if the user asks for them. */
7746 if (! flag_fast_math)
7747 break;
7748 case BUILT_IN_FSQRT:
7749 /* If not optimizing, call the library function. */
7750 if (! optimize)
7751 break;
7752
7753 if (arglist == 0
7754 /* Arg could be wrong type if user redeclared this fcn wrong. */
7755 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7756 break;
7757
7758 /* Stabilize and compute the argument. */
7759 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7760 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7761 {
7762 exp = copy_node (exp);
7763 arglist = copy_node (arglist);
7764 TREE_OPERAND (exp, 1) = arglist;
7765 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7766 }
7767 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7768
7769 /* Make a suitable register to place result in. */
7770 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7771
7772 emit_queue ();
7773 start_sequence ();
7774
7775 switch (DECL_FUNCTION_CODE (fndecl))
7776 {
7777 case BUILT_IN_SIN:
7778 builtin_optab = sin_optab; break;
7779 case BUILT_IN_COS:
7780 builtin_optab = cos_optab; break;
7781 case BUILT_IN_FSQRT:
7782 builtin_optab = sqrt_optab; break;
7783 default:
7784 abort ();
7785 }
7786
7787 /* Compute into TARGET.
7788 Set TARGET to wherever the result comes back. */
7789 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7790 builtin_optab, op0, target, 0);
7791
7792 /* If we were unable to expand via the builtin, stop the
7793 sequence (without outputting the insns) and break, causing
7794 a call the the library function. */
7795 if (target == 0)
7796 {
7797 end_sequence ();
7798 break;
7799 }
7800
7801 /* Check the results by default. But if flag_fast_math is turned on,
7802 then assume sqrt will always be called with valid arguments. */
7803
7804 if (! flag_fast_math)
7805 {
7806 /* Don't define the builtin FP instructions
7807 if your machine is not IEEE. */
7808 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7809 abort ();
7810
7811 lab1 = gen_label_rtx ();
7812
7813 /* Test the result; if it is NaN, set errno=EDOM because
7814 the argument was not in the domain. */
7815 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7816 emit_jump_insn (gen_beq (lab1));
7817
7818 #ifdef TARGET_EDOM
7819 {
7820 #ifdef GEN_ERRNO_RTX
7821 rtx errno_rtx = GEN_ERRNO_RTX;
7822 #else
7823 rtx errno_rtx
7824 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7825 #endif
7826
7827 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7828 }
7829 #else
7830 /* We can't set errno=EDOM directly; let the library call do it.
7831 Pop the arguments right away in case the call gets deleted. */
7832 NO_DEFER_POP;
7833 expand_call (exp, target, 0);
7834 OK_DEFER_POP;
7835 #endif
7836
7837 emit_label (lab1);
7838 }
7839
7840 /* Output the entire sequence. */
7841 insns = get_insns ();
7842 end_sequence ();
7843 emit_insns (insns);
7844
7845 return target;
7846
7847 /* __builtin_apply_args returns block of memory allocated on
7848 the stack into which is stored the arg pointer, structure
7849 value address, static chain, and all the registers that might
7850 possibly be used in performing a function call. The code is
7851 moved to the start of the function so the incoming values are
7852 saved. */
7853 case BUILT_IN_APPLY_ARGS:
7854 /* Don't do __builtin_apply_args more than once in a function.
7855 Save the result of the first call and reuse it. */
7856 if (apply_args_value != 0)
7857 return apply_args_value;
7858 {
7859 /* When this function is called, it means that registers must be
7860 saved on entry to this function. So we migrate the
7861 call to the first insn of this function. */
7862 rtx temp;
7863 rtx seq;
7864
7865 start_sequence ();
7866 temp = expand_builtin_apply_args ();
7867 seq = get_insns ();
7868 end_sequence ();
7869
7870 apply_args_value = temp;
7871
7872 /* Put the sequence after the NOTE that starts the function.
7873 If this is inside a SEQUENCE, make the outer-level insn
7874 chain current, so the code is placed at the start of the
7875 function. */
7876 push_topmost_sequence ();
7877 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7878 pop_topmost_sequence ();
7879 return temp;
7880 }
7881
7882 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7883 FUNCTION with a copy of the parameters described by
7884 ARGUMENTS, and ARGSIZE. It returns a block of memory
7885 allocated on the stack into which is stored all the registers
7886 that might possibly be used for returning the result of a
7887 function. ARGUMENTS is the value returned by
7888 __builtin_apply_args. ARGSIZE is the number of bytes of
7889 arguments that must be copied. ??? How should this value be
7890 computed? We'll also need a safe worst case value for varargs
7891 functions. */
7892 case BUILT_IN_APPLY:
7893 if (arglist == 0
7894 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7895 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7896 || TREE_CHAIN (arglist) == 0
7897 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7898 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7899 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7900 return const0_rtx;
7901 else
7902 {
7903 int i;
7904 tree t;
7905 rtx ops[3];
7906
7907 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7908 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7909
7910 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7911 }
7912
7913 /* __builtin_return (RESULT) causes the function to return the
7914 value described by RESULT. RESULT is address of the block of
7915 memory returned by __builtin_apply. */
7916 case BUILT_IN_RETURN:
7917 if (arglist
7918 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7919 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7920 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7921 NULL_RTX, VOIDmode, 0));
7922 return const0_rtx;
7923
7924 case BUILT_IN_SAVEREGS:
7925 /* Don't do __builtin_saveregs more than once in a function.
7926 Save the result of the first call and reuse it. */
7927 if (saveregs_value != 0)
7928 return saveregs_value;
7929 {
7930 /* When this function is called, it means that registers must be
7931 saved on entry to this function. So we migrate the
7932 call to the first insn of this function. */
7933 rtx temp;
7934 rtx seq;
7935
7936 /* Now really call the function. `expand_call' does not call
7937 expand_builtin, so there is no danger of infinite recursion here. */
7938 start_sequence ();
7939
7940 #ifdef EXPAND_BUILTIN_SAVEREGS
7941 /* Do whatever the machine needs done in this case. */
7942 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7943 #else
7944 /* The register where the function returns its value
7945 is likely to have something else in it, such as an argument.
7946 So preserve that register around the call. */
7947
7948 if (value_mode != VOIDmode)
7949 {
7950 rtx valreg = hard_libcall_value (value_mode);
7951 rtx saved_valreg = gen_reg_rtx (value_mode);
7952
7953 emit_move_insn (saved_valreg, valreg);
7954 temp = expand_call (exp, target, ignore);
7955 emit_move_insn (valreg, saved_valreg);
7956 }
7957 else
7958 /* Generate the call, putting the value in a pseudo. */
7959 temp = expand_call (exp, target, ignore);
7960 #endif
7961
7962 seq = get_insns ();
7963 end_sequence ();
7964
7965 saveregs_value = temp;
7966
7967 /* Put the sequence after the NOTE that starts the function.
7968 If this is inside a SEQUENCE, make the outer-level insn
7969 chain current, so the code is placed at the start of the
7970 function. */
7971 push_topmost_sequence ();
7972 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7973 pop_topmost_sequence ();
7974 return temp;
7975 }
7976
7977 /* __builtin_args_info (N) returns word N of the arg space info
7978 for the current function. The number and meanings of words
7979 is controlled by the definition of CUMULATIVE_ARGS. */
7980 case BUILT_IN_ARGS_INFO:
7981 {
7982 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7983 int i;
7984 int *word_ptr = (int *) &current_function_args_info;
7985 tree type, elts, result;
7986
7987 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7988 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7989 __FILE__, __LINE__);
7990
7991 if (arglist != 0)
7992 {
7993 tree arg = TREE_VALUE (arglist);
7994 if (TREE_CODE (arg) != INTEGER_CST)
7995 error ("argument of `__builtin_args_info' must be constant");
7996 else
7997 {
7998 int wordnum = TREE_INT_CST_LOW (arg);
7999
8000 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8001 error ("argument of `__builtin_args_info' out of range");
8002 else
8003 return GEN_INT (word_ptr[wordnum]);
8004 }
8005 }
8006 else
8007 error ("missing argument in `__builtin_args_info'");
8008
8009 return const0_rtx;
8010
8011 #if 0
8012 for (i = 0; i < nwords; i++)
8013 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8014
8015 type = build_array_type (integer_type_node,
8016 build_index_type (build_int_2 (nwords, 0)));
8017 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8018 TREE_CONSTANT (result) = 1;
8019 TREE_STATIC (result) = 1;
8020 result = build (INDIRECT_REF, build_pointer_type (type), result);
8021 TREE_CONSTANT (result) = 1;
8022 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8023 #endif
8024 }
8025
8026 /* Return the address of the first anonymous stack arg. */
8027 case BUILT_IN_NEXT_ARG:
8028 {
8029 tree fntype = TREE_TYPE (current_function_decl);
8030
8031 if ((TYPE_ARG_TYPES (fntype) == 0
8032 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8033 == void_type_node))
8034 && ! current_function_varargs)
8035 {
8036 error ("`va_start' used in function with fixed args");
8037 return const0_rtx;
8038 }
8039
8040 if (arglist)
8041 {
8042 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8043 tree arg = TREE_VALUE (arglist);
8044
8045 /* Strip off all nops for the sake of the comparison. This
8046 is not quite the same as STRIP_NOPS. It does more.
8047 We must also strip off INDIRECT_EXPR for C++ reference
8048 parameters. */
8049 while (TREE_CODE (arg) == NOP_EXPR
8050 || TREE_CODE (arg) == CONVERT_EXPR
8051 || TREE_CODE (arg) == NON_LVALUE_EXPR
8052 || TREE_CODE (arg) == INDIRECT_REF)
8053 arg = TREE_OPERAND (arg, 0);
8054 if (arg != last_parm)
8055 warning ("second parameter of `va_start' not last named argument");
8056 }
8057 else if (! current_function_varargs)
8058 /* Evidently an out of date version of <stdarg.h>; can't validate
8059 va_start's second argument, but can still work as intended. */
8060 warning ("`__builtin_next_arg' called without an argument");
8061 }
8062
8063 return expand_binop (Pmode, add_optab,
8064 current_function_internal_arg_pointer,
8065 current_function_arg_offset_rtx,
8066 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8067
8068 case BUILT_IN_CLASSIFY_TYPE:
8069 if (arglist != 0)
8070 {
8071 tree type = TREE_TYPE (TREE_VALUE (arglist));
8072 enum tree_code code = TREE_CODE (type);
8073 if (code == VOID_TYPE)
8074 return GEN_INT (void_type_class);
8075 if (code == INTEGER_TYPE)
8076 return GEN_INT (integer_type_class);
8077 if (code == CHAR_TYPE)
8078 return GEN_INT (char_type_class);
8079 if (code == ENUMERAL_TYPE)
8080 return GEN_INT (enumeral_type_class);
8081 if (code == BOOLEAN_TYPE)
8082 return GEN_INT (boolean_type_class);
8083 if (code == POINTER_TYPE)
8084 return GEN_INT (pointer_type_class);
8085 if (code == REFERENCE_TYPE)
8086 return GEN_INT (reference_type_class);
8087 if (code == OFFSET_TYPE)
8088 return GEN_INT (offset_type_class);
8089 if (code == REAL_TYPE)
8090 return GEN_INT (real_type_class);
8091 if (code == COMPLEX_TYPE)
8092 return GEN_INT (complex_type_class);
8093 if (code == FUNCTION_TYPE)
8094 return GEN_INT (function_type_class);
8095 if (code == METHOD_TYPE)
8096 return GEN_INT (method_type_class);
8097 if (code == RECORD_TYPE)
8098 return GEN_INT (record_type_class);
8099 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8100 return GEN_INT (union_type_class);
8101 if (code == ARRAY_TYPE)
8102 {
8103 if (TYPE_STRING_FLAG (type))
8104 return GEN_INT (string_type_class);
8105 else
8106 return GEN_INT (array_type_class);
8107 }
8108 if (code == SET_TYPE)
8109 return GEN_INT (set_type_class);
8110 if (code == FILE_TYPE)
8111 return GEN_INT (file_type_class);
8112 if (code == LANG_TYPE)
8113 return GEN_INT (lang_type_class);
8114 }
8115 return GEN_INT (no_type_class);
8116
8117 case BUILT_IN_CONSTANT_P:
8118 if (arglist == 0)
8119 return const0_rtx;
8120 else
8121 {
8122 tree arg = TREE_VALUE (arglist);
8123
8124 STRIP_NOPS (arg);
8125 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8126 || (TREE_CODE (arg) == ADDR_EXPR
8127 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8128 ? const1_rtx : const0_rtx);
8129 }
8130
8131 case BUILT_IN_FRAME_ADDRESS:
8132 /* The argument must be a nonnegative integer constant.
8133 It counts the number of frames to scan up the stack.
8134 The value is the address of that frame. */
8135 case BUILT_IN_RETURN_ADDRESS:
8136 /* The argument must be a nonnegative integer constant.
8137 It counts the number of frames to scan up the stack.
8138 The value is the return address saved in that frame. */
8139 if (arglist == 0)
8140 /* Warning about missing arg was already issued. */
8141 return const0_rtx;
8142 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8143 {
8144 error ("invalid arg to `__builtin_return_address'");
8145 return const0_rtx;
8146 }
8147 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8148 {
8149 error ("invalid arg to `__builtin_return_address'");
8150 return const0_rtx;
8151 }
8152 else
8153 {
8154 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8155 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8156 hard_frame_pointer_rtx);
8157
8158 /* For __builtin_frame_address, return what we've got. */
8159 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8160 return tem;
8161
8162 if (GET_CODE (tem) != REG)
8163 tem = copy_to_reg (tem);
8164 return tem;
8165 }
8166
8167 case BUILT_IN_ALLOCA:
8168 if (arglist == 0
8169 /* Arg could be non-integer if user redeclared this fcn wrong. */
8170 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8171 break;
8172
8173 /* Compute the argument. */
8174 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8175
8176 /* Allocate the desired space. */
8177 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8178
8179 case BUILT_IN_FFS:
8180 /* If not optimizing, call the library function. */
8181 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8182 break;
8183
8184 if (arglist == 0
8185 /* Arg could be non-integer if user redeclared this fcn wrong. */
8186 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8187 break;
8188
8189 /* Compute the argument. */
8190 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8191 /* Compute ffs, into TARGET if possible.
8192 Set TARGET to wherever the result comes back. */
8193 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8194 ffs_optab, op0, target, 1);
8195 if (target == 0)
8196 abort ();
8197 return target;
8198
8199 case BUILT_IN_STRLEN:
8200 /* If not optimizing, call the library function. */
8201 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8202 break;
8203
8204 if (arglist == 0
8205 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8206 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8207 break;
8208 else
8209 {
8210 tree src = TREE_VALUE (arglist);
8211 tree len = c_strlen (src);
8212
8213 int align
8214 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8215
8216 rtx result, src_rtx, char_rtx;
8217 enum machine_mode insn_mode = value_mode, char_mode;
8218 enum insn_code icode;
8219
8220 /* If the length is known, just return it. */
8221 if (len != 0)
8222 return expand_expr (len, target, mode, 0);
8223
8224 /* If SRC is not a pointer type, don't do this operation inline. */
8225 if (align == 0)
8226 break;
8227
8228 /* Call a function if we can't compute strlen in the right mode. */
8229
8230 while (insn_mode != VOIDmode)
8231 {
8232 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8233 if (icode != CODE_FOR_nothing)
8234 break;
8235
8236 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8237 }
8238 if (insn_mode == VOIDmode)
8239 break;
8240
8241 /* Make a place to write the result of the instruction. */
8242 result = target;
8243 if (! (result != 0
8244 && GET_CODE (result) == REG
8245 && GET_MODE (result) == insn_mode
8246 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8247 result = gen_reg_rtx (insn_mode);
8248
8249 /* Make sure the operands are acceptable to the predicates. */
8250
8251 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8252 result = gen_reg_rtx (insn_mode);
8253
8254 src_rtx = memory_address (BLKmode,
8255 expand_expr (src, NULL_RTX, ptr_mode,
8256 EXPAND_NORMAL));
8257 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8258 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8259
8260 char_rtx = const0_rtx;
8261 char_mode = insn_operand_mode[(int)icode][2];
8262 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8263 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8264
8265 emit_insn (GEN_FCN (icode) (result,
8266 gen_rtx (MEM, BLKmode, src_rtx),
8267 char_rtx, GEN_INT (align)));
8268
8269 /* Return the value in the proper mode for this function. */
8270 if (GET_MODE (result) == value_mode)
8271 return result;
8272 else if (target != 0)
8273 {
8274 convert_move (target, result, 0);
8275 return target;
8276 }
8277 else
8278 return convert_to_mode (value_mode, result, 0);
8279 }
8280
8281 case BUILT_IN_STRCPY:
8282 /* If not optimizing, call the library function. */
8283 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8284 break;
8285
8286 if (arglist == 0
8287 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8288 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8289 || TREE_CHAIN (arglist) == 0
8290 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8291 break;
8292 else
8293 {
8294 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8295
8296 if (len == 0)
8297 break;
8298
8299 len = size_binop (PLUS_EXPR, len, integer_one_node);
8300
8301 chainon (arglist, build_tree_list (NULL_TREE, len));
8302 }
8303
8304 /* Drops in. */
8305 case BUILT_IN_MEMCPY:
8306 /* If not optimizing, call the library function. */
8307 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8308 break;
8309
8310 if (arglist == 0
8311 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8312 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8313 || TREE_CHAIN (arglist) == 0
8314 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8315 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8316 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8317 break;
8318 else
8319 {
8320 tree dest = TREE_VALUE (arglist);
8321 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8322 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8323 tree type;
8324
8325 int src_align
8326 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8327 int dest_align
8328 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8329 rtx dest_rtx, dest_mem, src_mem;
8330
8331 /* If either SRC or DEST is not a pointer type, don't do
8332 this operation in-line. */
8333 if (src_align == 0 || dest_align == 0)
8334 {
8335 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8336 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8337 break;
8338 }
8339
8340 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8341 dest_mem = gen_rtx (MEM, BLKmode,
8342 memory_address (BLKmode, dest_rtx));
8343 /* There could be a void* cast on top of the object. */
8344 while (TREE_CODE (dest) == NOP_EXPR)
8345 dest = TREE_OPERAND (dest, 0);
8346 type = TREE_TYPE (TREE_TYPE (dest));
8347 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8348 src_mem = gen_rtx (MEM, BLKmode,
8349 memory_address (BLKmode,
8350 expand_expr (src, NULL_RTX,
8351 ptr_mode,
8352 EXPAND_SUM)));
8353 /* There could be a void* cast on top of the object. */
8354 while (TREE_CODE (src) == NOP_EXPR)
8355 src = TREE_OPERAND (src, 0);
8356 type = TREE_TYPE (TREE_TYPE (src));
8357 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8358
8359 /* Copy word part most expediently. */
8360 emit_block_move (dest_mem, src_mem,
8361 expand_expr (len, NULL_RTX, VOIDmode, 0),
8362 MIN (src_align, dest_align));
8363 return force_operand (dest_rtx, NULL_RTX);
8364 }
8365
8366 /* These comparison functions need an instruction that returns an actual
8367 index. An ordinary compare that just sets the condition codes
8368 is not enough. */
8369 #ifdef HAVE_cmpstrsi
8370 case BUILT_IN_STRCMP:
8371 /* If not optimizing, call the library function. */
8372 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8373 break;
8374
8375 if (arglist == 0
8376 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8377 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8378 || TREE_CHAIN (arglist) == 0
8379 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8380 break;
8381 else if (!HAVE_cmpstrsi)
8382 break;
8383 {
8384 tree arg1 = TREE_VALUE (arglist);
8385 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8386 tree offset;
8387 tree len, len2;
8388
8389 len = c_strlen (arg1);
8390 if (len)
8391 len = size_binop (PLUS_EXPR, integer_one_node, len);
8392 len2 = c_strlen (arg2);
8393 if (len2)
8394 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8395
8396 /* If we don't have a constant length for the first, use the length
8397 of the second, if we know it. We don't require a constant for
8398 this case; some cost analysis could be done if both are available
8399 but neither is constant. For now, assume they're equally cheap.
8400
8401 If both strings have constant lengths, use the smaller. This
8402 could arise if optimization results in strcpy being called with
8403 two fixed strings, or if the code was machine-generated. We should
8404 add some code to the `memcmp' handler below to deal with such
8405 situations, someday. */
8406 if (!len || TREE_CODE (len) != INTEGER_CST)
8407 {
8408 if (len2)
8409 len = len2;
8410 else if (len == 0)
8411 break;
8412 }
8413 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8414 {
8415 if (tree_int_cst_lt (len2, len))
8416 len = len2;
8417 }
8418
8419 chainon (arglist, build_tree_list (NULL_TREE, len));
8420 }
8421
8422 /* Drops in. */
8423 case BUILT_IN_MEMCMP:
8424 /* If not optimizing, call the library function. */
8425 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8426 break;
8427
8428 if (arglist == 0
8429 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8430 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8431 || TREE_CHAIN (arglist) == 0
8432 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8433 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8434 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8435 break;
8436 else if (!HAVE_cmpstrsi)
8437 break;
8438 {
8439 tree arg1 = TREE_VALUE (arglist);
8440 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8441 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8442 rtx result;
8443
8444 int arg1_align
8445 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8446 int arg2_align
8447 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8448 enum machine_mode insn_mode
8449 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8450
8451 /* If we don't have POINTER_TYPE, call the function. */
8452 if (arg1_align == 0 || arg2_align == 0)
8453 {
8454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8455 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8456 break;
8457 }
8458
8459 /* Make a place to write the result of the instruction. */
8460 result = target;
8461 if (! (result != 0
8462 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8463 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8464 result = gen_reg_rtx (insn_mode);
8465
8466 emit_insn (gen_cmpstrsi (result,
8467 gen_rtx (MEM, BLKmode,
8468 expand_expr (arg1, NULL_RTX,
8469 ptr_mode,
8470 EXPAND_NORMAL)),
8471 gen_rtx (MEM, BLKmode,
8472 expand_expr (arg2, NULL_RTX,
8473 ptr_mode,
8474 EXPAND_NORMAL)),
8475 expand_expr (len, NULL_RTX, VOIDmode, 0),
8476 GEN_INT (MIN (arg1_align, arg2_align))));
8477
8478 /* Return the value in the proper mode for this function. */
8479 mode = TYPE_MODE (TREE_TYPE (exp));
8480 if (GET_MODE (result) == mode)
8481 return result;
8482 else if (target != 0)
8483 {
8484 convert_move (target, result, 0);
8485 return target;
8486 }
8487 else
8488 return convert_to_mode (mode, result, 0);
8489 }
8490 #else
8491 case BUILT_IN_STRCMP:
8492 case BUILT_IN_MEMCMP:
8493 break;
8494 #endif
8495
8496 /* __builtin_setjmp is passed a pointer to an array of five words
8497 (not all will be used on all machines). It operates similarly to
8498 the C library function of the same name, but is more efficient.
8499 Much of the code below (and for longjmp) is copied from the handling
8500 of non-local gotos.
8501
8502 NOTE: This is intended for use by GNAT and will only work in
8503 the method used by it. This code will likely NOT survive to
8504 the GCC 2.8.0 release. */
8505 case BUILT_IN_SETJMP:
8506 if (arglist == 0
8507 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8508 break;
8509
8510 {
8511 rtx buf_addr
8512 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), subtarget,
8513 VOIDmode, 0));
8514 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8515 enum machine_mode sa_mode = Pmode;
8516 rtx stack_save;
8517
8518 if (target == 0 || GET_CODE (target) != REG
8519 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8520 target = gen_reg_rtx (value_mode);
8521
8522 emit_queue ();
8523
8524 emit_note (NULL_PTR, NOTE_INSN_SETJMP);
8525 current_function_calls_setjmp = 1;
8526
8527 /* We store the frame pointer and the address of lab1 in the buffer
8528 and use the rest of it for the stack save area, which is
8529 machine-dependent. */
8530 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8531 virtual_stack_vars_rtx);
8532 emit_move_insn
8533 (validize_mem (gen_rtx (MEM, Pmode,
8534 plus_constant (buf_addr,
8535 GET_MODE_SIZE (Pmode)))),
8536 gen_rtx (LABEL_REF, Pmode, lab1));
8537
8538 #ifdef HAVE_save_stack_nonlocal
8539 if (HAVE_save_stack_nonlocal)
8540 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8541 #endif
8542
8543 stack_save = gen_rtx (MEM, sa_mode,
8544 plus_constant (buf_addr,
8545 2 * GET_MODE_SIZE (Pmode)));
8546 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8547
8548 /* Set TARGET to zero and branch around the other case. */
8549 emit_move_insn (target, const0_rtx);
8550 emit_jump_insn (gen_jump (lab2));
8551 emit_barrier ();
8552 emit_label (lab1);
8553
8554 /* Now put in the code to restore the frame pointer, and argument
8555 pointer, if needed. The code below is from expand_end_bindings
8556 in stmt.c; see detailed documentation there. */
8557 #ifdef HAVE_nonlocal_goto
8558 if (! HAVE_nonlocal_goto)
8559 #endif
8560 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8561
8562 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8563 if (fixed_regs[ARG_POINTER_REGNUM])
8564 {
8565 #ifdef ELIMINABLE_REGS
8566 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8567 int i;
8568
8569 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8570 if (elim_regs[i].from == ARG_POINTER_REGNUM
8571 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8572 break;
8573
8574 if (i == sizeof elim_regs / sizeof elim_regs [0])
8575 #endif
8576 {
8577 /* Now restore our arg pointer from the address at which it
8578 was saved in our stack frame.
8579 If there hasn't be space allocated for it yet, make
8580 some now. */
8581 if (arg_pointer_save_area == 0)
8582 arg_pointer_save_area
8583 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8584 emit_move_insn (virtual_incoming_args_rtx,
8585 copy_to_reg (arg_pointer_save_area));
8586 }
8587 }
8588 #endif
8589
8590 /* The result to return is in the static chain pointer. */
8591 if (GET_MODE (static_chain_rtx) == GET_MODE (target))
8592 emit_move_insn (target, static_chain_rtx);
8593 else
8594 convert_move (target, static_chain_rtx, 0);
8595
8596 emit_label (lab2);
8597 return target;
8598 }
8599
8600 /* __builtin_longjmp is passed a pointer to an array of five words
8601 and a value to return. It's similar to the C library longjmp
8602 function but works with __builtin_setjmp above. */
8603 case BUILT_IN_LONGJMP:
8604 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8605 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8606 break;
8607
8608 {
8609 rtx buf_addr
8610 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), NULL_RTX,
8611 VOIDmode, 0));
8612 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8613 rtx lab = gen_rtx (MEM, Pmode,
8614 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8615 enum machine_mode sa_mode
8616 #ifdef HAVE_save_stack_nonlocal
8617 = (HAVE_save_stack_nonlocal
8618 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8619 : Pmode);
8620 #else
8621 = Pmode;
8622 #endif
8623 rtx stack = gen_rtx (MEM, sa_mode,
8624 plus_constant (buf_addr,
8625 2 * GET_MODE_SIZE (Pmode)));
8626 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), NULL_RTX,
8627 VOIDmode, 0);
8628
8629 /* Pick up FP, label, and SP from the block and jump. This code is
8630 from expand_goto in stmt.c; see there for detailed comments. */
8631 #if HAVE_nonlocal_goto
8632 if (HAVE_nonlocal_goto)
8633 emit_insn (gen_nonlocal_goto (fp, lab, stack, value));
8634 else
8635 #endif
8636 {
8637 emit_move_insn (hard_frame_pointer_rtx, fp);
8638 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8639
8640 /* Put in the static chain register the return value. */
8641 emit_move_insn (static_chain_rtx, value);
8642 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8643 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8644 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8645 emit_indirect_jump (copy_to_reg (lab));
8646 }
8647
8648 return const0_rtx;
8649 }
8650
8651 default: /* just do library call, if unknown builtin */
8652 error ("built-in function `%s' not currently supported",
8653 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8654 }
8655
8656 /* The switch statement above can drop through to cause the function
8657 to be called normally. */
8658
8659 return expand_call (exp, target, ignore);
8660 }
8661 \f
8662 /* Built-in functions to perform an untyped call and return. */
8663
8664 /* For each register that may be used for calling a function, this
8665 gives a mode used to copy the register's value. VOIDmode indicates
8666 the register is not used for calling a function. If the machine
8667 has register windows, this gives only the outbound registers.
8668 INCOMING_REGNO gives the corresponding inbound register. */
8669 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8670
8671 /* For each register that may be used for returning values, this gives
8672 a mode used to copy the register's value. VOIDmode indicates the
8673 register is not used for returning values. If the machine has
8674 register windows, this gives only the outbound registers.
8675 INCOMING_REGNO gives the corresponding inbound register. */
8676 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8677
8678 /* For each register that may be used for calling a function, this
8679 gives the offset of that register into the block returned by
8680 __builtin_apply_args. 0 indicates that the register is not
8681 used for calling a function. */
8682 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8683
8684 /* Return the offset of register REGNO into the block returned by
8685 __builtin_apply_args. This is not declared static, since it is
8686 needed in objc-act.c. */
8687
8688 int
8689 apply_args_register_offset (regno)
8690 int regno;
8691 {
8692 apply_args_size ();
8693
8694 /* Arguments are always put in outgoing registers (in the argument
8695 block) if such make sense. */
8696 #ifdef OUTGOING_REGNO
8697 regno = OUTGOING_REGNO(regno);
8698 #endif
8699 return apply_args_reg_offset[regno];
8700 }
8701
8702 /* Return the size required for the block returned by __builtin_apply_args,
8703 and initialize apply_args_mode. */
8704
8705 static int
8706 apply_args_size ()
8707 {
8708 static int size = -1;
8709 int align, regno;
8710 enum machine_mode mode;
8711
8712 /* The values computed by this function never change. */
8713 if (size < 0)
8714 {
8715 /* The first value is the incoming arg-pointer. */
8716 size = GET_MODE_SIZE (Pmode);
8717
8718 /* The second value is the structure value address unless this is
8719 passed as an "invisible" first argument. */
8720 if (struct_value_rtx)
8721 size += GET_MODE_SIZE (Pmode);
8722
8723 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8724 if (FUNCTION_ARG_REGNO_P (regno))
8725 {
8726 /* Search for the proper mode for copying this register's
8727 value. I'm not sure this is right, but it works so far. */
8728 enum machine_mode best_mode = VOIDmode;
8729
8730 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8731 mode != VOIDmode;
8732 mode = GET_MODE_WIDER_MODE (mode))
8733 if (HARD_REGNO_MODE_OK (regno, mode)
8734 && HARD_REGNO_NREGS (regno, mode) == 1)
8735 best_mode = mode;
8736
8737 if (best_mode == VOIDmode)
8738 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8739 mode != VOIDmode;
8740 mode = GET_MODE_WIDER_MODE (mode))
8741 if (HARD_REGNO_MODE_OK (regno, mode)
8742 && (mov_optab->handlers[(int) mode].insn_code
8743 != CODE_FOR_nothing))
8744 best_mode = mode;
8745
8746 mode = best_mode;
8747 if (mode == VOIDmode)
8748 abort ();
8749
8750 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8751 if (size % align != 0)
8752 size = CEIL (size, align) * align;
8753 apply_args_reg_offset[regno] = size;
8754 size += GET_MODE_SIZE (mode);
8755 apply_args_mode[regno] = mode;
8756 }
8757 else
8758 {
8759 apply_args_mode[regno] = VOIDmode;
8760 apply_args_reg_offset[regno] = 0;
8761 }
8762 }
8763 return size;
8764 }
8765
8766 /* Return the size required for the block returned by __builtin_apply,
8767 and initialize apply_result_mode. */
8768
8769 static int
8770 apply_result_size ()
8771 {
8772 static int size = -1;
8773 int align, regno;
8774 enum machine_mode mode;
8775
8776 /* The values computed by this function never change. */
8777 if (size < 0)
8778 {
8779 size = 0;
8780
8781 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8782 if (FUNCTION_VALUE_REGNO_P (regno))
8783 {
8784 /* Search for the proper mode for copying this register's
8785 value. I'm not sure this is right, but it works so far. */
8786 enum machine_mode best_mode = VOIDmode;
8787
8788 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8789 mode != TImode;
8790 mode = GET_MODE_WIDER_MODE (mode))
8791 if (HARD_REGNO_MODE_OK (regno, mode))
8792 best_mode = mode;
8793
8794 if (best_mode == VOIDmode)
8795 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8796 mode != VOIDmode;
8797 mode = GET_MODE_WIDER_MODE (mode))
8798 if (HARD_REGNO_MODE_OK (regno, mode)
8799 && (mov_optab->handlers[(int) mode].insn_code
8800 != CODE_FOR_nothing))
8801 best_mode = mode;
8802
8803 mode = best_mode;
8804 if (mode == VOIDmode)
8805 abort ();
8806
8807 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8808 if (size % align != 0)
8809 size = CEIL (size, align) * align;
8810 size += GET_MODE_SIZE (mode);
8811 apply_result_mode[regno] = mode;
8812 }
8813 else
8814 apply_result_mode[regno] = VOIDmode;
8815
8816 /* Allow targets that use untyped_call and untyped_return to override
8817 the size so that machine-specific information can be stored here. */
8818 #ifdef APPLY_RESULT_SIZE
8819 size = APPLY_RESULT_SIZE;
8820 #endif
8821 }
8822 return size;
8823 }
8824
8825 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8826 /* Create a vector describing the result block RESULT. If SAVEP is true,
8827 the result block is used to save the values; otherwise it is used to
8828 restore the values. */
8829
8830 static rtx
8831 result_vector (savep, result)
8832 int savep;
8833 rtx result;
8834 {
8835 int regno, size, align, nelts;
8836 enum machine_mode mode;
8837 rtx reg, mem;
8838 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8839
8840 size = nelts = 0;
8841 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8842 if ((mode = apply_result_mode[regno]) != VOIDmode)
8843 {
8844 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8845 if (size % align != 0)
8846 size = CEIL (size, align) * align;
8847 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8848 mem = change_address (result, mode,
8849 plus_constant (XEXP (result, 0), size));
8850 savevec[nelts++] = (savep
8851 ? gen_rtx (SET, VOIDmode, mem, reg)
8852 : gen_rtx (SET, VOIDmode, reg, mem));
8853 size += GET_MODE_SIZE (mode);
8854 }
8855 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8856 }
8857 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8858
8859 /* Save the state required to perform an untyped call with the same
8860 arguments as were passed to the current function. */
8861
8862 static rtx
8863 expand_builtin_apply_args ()
8864 {
8865 rtx registers;
8866 int size, align, regno;
8867 enum machine_mode mode;
8868
8869 /* Create a block where the arg-pointer, structure value address,
8870 and argument registers can be saved. */
8871 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8872
8873 /* Walk past the arg-pointer and structure value address. */
8874 size = GET_MODE_SIZE (Pmode);
8875 if (struct_value_rtx)
8876 size += GET_MODE_SIZE (Pmode);
8877
8878 /* Save each register used in calling a function to the block. */
8879 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8880 if ((mode = apply_args_mode[regno]) != VOIDmode)
8881 {
8882 rtx tem;
8883
8884 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8885 if (size % align != 0)
8886 size = CEIL (size, align) * align;
8887
8888 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8889
8890 #ifdef STACK_REGS
8891 /* For reg-stack.c's stack register household.
8892 Compare with a similar piece of code in function.c. */
8893
8894 emit_insn (gen_rtx (USE, mode, tem));
8895 #endif
8896
8897 emit_move_insn (change_address (registers, mode,
8898 plus_constant (XEXP (registers, 0),
8899 size)),
8900 tem);
8901 size += GET_MODE_SIZE (mode);
8902 }
8903
8904 /* Save the arg pointer to the block. */
8905 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8906 copy_to_reg (virtual_incoming_args_rtx));
8907 size = GET_MODE_SIZE (Pmode);
8908
8909 /* Save the structure value address unless this is passed as an
8910 "invisible" first argument. */
8911 if (struct_value_incoming_rtx)
8912 {
8913 emit_move_insn (change_address (registers, Pmode,
8914 plus_constant (XEXP (registers, 0),
8915 size)),
8916 copy_to_reg (struct_value_incoming_rtx));
8917 size += GET_MODE_SIZE (Pmode);
8918 }
8919
8920 /* Return the address of the block. */
8921 return copy_addr_to_reg (XEXP (registers, 0));
8922 }
8923
8924 /* Perform an untyped call and save the state required to perform an
8925 untyped return of whatever value was returned by the given function. */
8926
8927 static rtx
8928 expand_builtin_apply (function, arguments, argsize)
8929 rtx function, arguments, argsize;
8930 {
8931 int size, align, regno;
8932 enum machine_mode mode;
8933 rtx incoming_args, result, reg, dest, call_insn;
8934 rtx old_stack_level = 0;
8935 rtx call_fusage = 0;
8936
8937 /* Create a block where the return registers can be saved. */
8938 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8939
8940 /* ??? The argsize value should be adjusted here. */
8941
8942 /* Fetch the arg pointer from the ARGUMENTS block. */
8943 incoming_args = gen_reg_rtx (Pmode);
8944 emit_move_insn (incoming_args,
8945 gen_rtx (MEM, Pmode, arguments));
8946 #ifndef STACK_GROWS_DOWNWARD
8947 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8948 incoming_args, 0, OPTAB_LIB_WIDEN);
8949 #endif
8950
8951 /* Perform postincrements before actually calling the function. */
8952 emit_queue ();
8953
8954 /* Push a new argument block and copy the arguments. */
8955 do_pending_stack_adjust ();
8956 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8957
8958 /* Push a block of memory onto the stack to store the memory arguments.
8959 Save the address in a register, and copy the memory arguments. ??? I
8960 haven't figured out how the calling convention macros effect this,
8961 but it's likely that the source and/or destination addresses in
8962 the block copy will need updating in machine specific ways. */
8963 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8964 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8965 gen_rtx (MEM, BLKmode, incoming_args),
8966 argsize,
8967 PARM_BOUNDARY / BITS_PER_UNIT);
8968
8969 /* Refer to the argument block. */
8970 apply_args_size ();
8971 arguments = gen_rtx (MEM, BLKmode, arguments);
8972
8973 /* Walk past the arg-pointer and structure value address. */
8974 size = GET_MODE_SIZE (Pmode);
8975 if (struct_value_rtx)
8976 size += GET_MODE_SIZE (Pmode);
8977
8978 /* Restore each of the registers previously saved. Make USE insns
8979 for each of these registers for use in making the call. */
8980 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8981 if ((mode = apply_args_mode[regno]) != VOIDmode)
8982 {
8983 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8984 if (size % align != 0)
8985 size = CEIL (size, align) * align;
8986 reg = gen_rtx (REG, mode, regno);
8987 emit_move_insn (reg,
8988 change_address (arguments, mode,
8989 plus_constant (XEXP (arguments, 0),
8990 size)));
8991
8992 use_reg (&call_fusage, reg);
8993 size += GET_MODE_SIZE (mode);
8994 }
8995
8996 /* Restore the structure value address unless this is passed as an
8997 "invisible" first argument. */
8998 size = GET_MODE_SIZE (Pmode);
8999 if (struct_value_rtx)
9000 {
9001 rtx value = gen_reg_rtx (Pmode);
9002 emit_move_insn (value,
9003 change_address (arguments, Pmode,
9004 plus_constant (XEXP (arguments, 0),
9005 size)));
9006 emit_move_insn (struct_value_rtx, value);
9007 if (GET_CODE (struct_value_rtx) == REG)
9008 use_reg (&call_fusage, struct_value_rtx);
9009 size += GET_MODE_SIZE (Pmode);
9010 }
9011
9012 /* All arguments and registers used for the call are set up by now! */
9013 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9014
9015 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9016 and we don't want to load it into a register as an optimization,
9017 because prepare_call_address already did it if it should be done. */
9018 if (GET_CODE (function) != SYMBOL_REF)
9019 function = memory_address (FUNCTION_MODE, function);
9020
9021 /* Generate the actual call instruction and save the return value. */
9022 #ifdef HAVE_untyped_call
9023 if (HAVE_untyped_call)
9024 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9025 result, result_vector (1, result)));
9026 else
9027 #endif
9028 #ifdef HAVE_call_value
9029 if (HAVE_call_value)
9030 {
9031 rtx valreg = 0;
9032
9033 /* Locate the unique return register. It is not possible to
9034 express a call that sets more than one return register using
9035 call_value; use untyped_call for that. In fact, untyped_call
9036 only needs to save the return registers in the given block. */
9037 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9038 if ((mode = apply_result_mode[regno]) != VOIDmode)
9039 {
9040 if (valreg)
9041 abort (); /* HAVE_untyped_call required. */
9042 valreg = gen_rtx (REG, mode, regno);
9043 }
9044
9045 emit_call_insn (gen_call_value (valreg,
9046 gen_rtx (MEM, FUNCTION_MODE, function),
9047 const0_rtx, NULL_RTX, const0_rtx));
9048
9049 emit_move_insn (change_address (result, GET_MODE (valreg),
9050 XEXP (result, 0)),
9051 valreg);
9052 }
9053 else
9054 #endif
9055 abort ();
9056
9057 /* Find the CALL insn we just emitted. */
9058 for (call_insn = get_last_insn ();
9059 call_insn && GET_CODE (call_insn) != CALL_INSN;
9060 call_insn = PREV_INSN (call_insn))
9061 ;
9062
9063 if (! call_insn)
9064 abort ();
9065
9066 /* Put the register usage information on the CALL. If there is already
9067 some usage information, put ours at the end. */
9068 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9069 {
9070 rtx link;
9071
9072 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9073 link = XEXP (link, 1))
9074 ;
9075
9076 XEXP (link, 1) = call_fusage;
9077 }
9078 else
9079 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9080
9081 /* Restore the stack. */
9082 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9083
9084 /* Return the address of the result block. */
9085 return copy_addr_to_reg (XEXP (result, 0));
9086 }
9087
9088 /* Perform an untyped return. */
9089
9090 static void
9091 expand_builtin_return (result)
9092 rtx result;
9093 {
9094 int size, align, regno;
9095 enum machine_mode mode;
9096 rtx reg;
9097 rtx call_fusage = 0;
9098
9099 apply_result_size ();
9100 result = gen_rtx (MEM, BLKmode, result);
9101
9102 #ifdef HAVE_untyped_return
9103 if (HAVE_untyped_return)
9104 {
9105 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9106 emit_barrier ();
9107 return;
9108 }
9109 #endif
9110
9111 /* Restore the return value and note that each value is used. */
9112 size = 0;
9113 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9114 if ((mode = apply_result_mode[regno]) != VOIDmode)
9115 {
9116 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9117 if (size % align != 0)
9118 size = CEIL (size, align) * align;
9119 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9120 emit_move_insn (reg,
9121 change_address (result, mode,
9122 plus_constant (XEXP (result, 0),
9123 size)));
9124
9125 push_to_sequence (call_fusage);
9126 emit_insn (gen_rtx (USE, VOIDmode, reg));
9127 call_fusage = get_insns ();
9128 end_sequence ();
9129 size += GET_MODE_SIZE (mode);
9130 }
9131
9132 /* Put the USE insns before the return. */
9133 emit_insns (call_fusage);
9134
9135 /* Return whatever values was restored by jumping directly to the end
9136 of the function. */
9137 expand_null_return ();
9138 }
9139 \f
9140 /* Expand code for a post- or pre- increment or decrement
9141 and return the RTX for the result.
9142 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9143
9144 static rtx
9145 expand_increment (exp, post)
9146 register tree exp;
9147 int post;
9148 {
9149 register rtx op0, op1;
9150 register rtx temp, value;
9151 register tree incremented = TREE_OPERAND (exp, 0);
9152 optab this_optab = add_optab;
9153 int icode;
9154 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9155 int op0_is_copy = 0;
9156 int single_insn = 0;
9157 /* 1 means we can't store into OP0 directly,
9158 because it is a subreg narrower than a word,
9159 and we don't dare clobber the rest of the word. */
9160 int bad_subreg = 0;
9161
9162 if (output_bytecode)
9163 {
9164 bc_expand_expr (exp);
9165 return NULL_RTX;
9166 }
9167
9168 /* Stabilize any component ref that might need to be
9169 evaluated more than once below. */
9170 if (!post
9171 || TREE_CODE (incremented) == BIT_FIELD_REF
9172 || (TREE_CODE (incremented) == COMPONENT_REF
9173 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9174 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9175 incremented = stabilize_reference (incremented);
9176 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9177 ones into save exprs so that they don't accidentally get evaluated
9178 more than once by the code below. */
9179 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9180 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9181 incremented = save_expr (incremented);
9182
9183 /* Compute the operands as RTX.
9184 Note whether OP0 is the actual lvalue or a copy of it:
9185 I believe it is a copy iff it is a register or subreg
9186 and insns were generated in computing it. */
9187
9188 temp = get_last_insn ();
9189 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9190
9191 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9192 in place but instead must do sign- or zero-extension during assignment,
9193 so we copy it into a new register and let the code below use it as
9194 a copy.
9195
9196 Note that we can safely modify this SUBREG since it is know not to be
9197 shared (it was made by the expand_expr call above). */
9198
9199 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9200 {
9201 if (post)
9202 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9203 else
9204 bad_subreg = 1;
9205 }
9206 else if (GET_CODE (op0) == SUBREG
9207 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9208 {
9209 /* We cannot increment this SUBREG in place. If we are
9210 post-incrementing, get a copy of the old value. Otherwise,
9211 just mark that we cannot increment in place. */
9212 if (post)
9213 op0 = copy_to_reg (op0);
9214 else
9215 bad_subreg = 1;
9216 }
9217
9218 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9219 && temp != get_last_insn ());
9220 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9221
9222 /* Decide whether incrementing or decrementing. */
9223 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9224 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9225 this_optab = sub_optab;
9226
9227 /* Convert decrement by a constant into a negative increment. */
9228 if (this_optab == sub_optab
9229 && GET_CODE (op1) == CONST_INT)
9230 {
9231 op1 = GEN_INT (- INTVAL (op1));
9232 this_optab = add_optab;
9233 }
9234
9235 /* For a preincrement, see if we can do this with a single instruction. */
9236 if (!post)
9237 {
9238 icode = (int) this_optab->handlers[(int) mode].insn_code;
9239 if (icode != (int) CODE_FOR_nothing
9240 /* Make sure that OP0 is valid for operands 0 and 1
9241 of the insn we want to queue. */
9242 && (*insn_operand_predicate[icode][0]) (op0, mode)
9243 && (*insn_operand_predicate[icode][1]) (op0, mode)
9244 && (*insn_operand_predicate[icode][2]) (op1, mode))
9245 single_insn = 1;
9246 }
9247
9248 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9249 then we cannot just increment OP0. We must therefore contrive to
9250 increment the original value. Then, for postincrement, we can return
9251 OP0 since it is a copy of the old value. For preincrement, expand here
9252 unless we can do it with a single insn.
9253
9254 Likewise if storing directly into OP0 would clobber high bits
9255 we need to preserve (bad_subreg). */
9256 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9257 {
9258 /* This is the easiest way to increment the value wherever it is.
9259 Problems with multiple evaluation of INCREMENTED are prevented
9260 because either (1) it is a component_ref or preincrement,
9261 in which case it was stabilized above, or (2) it is an array_ref
9262 with constant index in an array in a register, which is
9263 safe to reevaluate. */
9264 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9265 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9266 ? MINUS_EXPR : PLUS_EXPR),
9267 TREE_TYPE (exp),
9268 incremented,
9269 TREE_OPERAND (exp, 1));
9270
9271 while (TREE_CODE (incremented) == NOP_EXPR
9272 || TREE_CODE (incremented) == CONVERT_EXPR)
9273 {
9274 newexp = convert (TREE_TYPE (incremented), newexp);
9275 incremented = TREE_OPERAND (incremented, 0);
9276 }
9277
9278 temp = expand_assignment (incremented, newexp, ! post, 0);
9279 return post ? op0 : temp;
9280 }
9281
9282 if (post)
9283 {
9284 /* We have a true reference to the value in OP0.
9285 If there is an insn to add or subtract in this mode, queue it.
9286 Queueing the increment insn avoids the register shuffling
9287 that often results if we must increment now and first save
9288 the old value for subsequent use. */
9289
9290 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9291 op0 = stabilize (op0);
9292 #endif
9293
9294 icode = (int) this_optab->handlers[(int) mode].insn_code;
9295 if (icode != (int) CODE_FOR_nothing
9296 /* Make sure that OP0 is valid for operands 0 and 1
9297 of the insn we want to queue. */
9298 && (*insn_operand_predicate[icode][0]) (op0, mode)
9299 && (*insn_operand_predicate[icode][1]) (op0, mode))
9300 {
9301 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9302 op1 = force_reg (mode, op1);
9303
9304 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9305 }
9306 }
9307
9308 /* Preincrement, or we can't increment with one simple insn. */
9309 if (post)
9310 /* Save a copy of the value before inc or dec, to return it later. */
9311 temp = value = copy_to_reg (op0);
9312 else
9313 /* Arrange to return the incremented value. */
9314 /* Copy the rtx because expand_binop will protect from the queue,
9315 and the results of that would be invalid for us to return
9316 if our caller does emit_queue before using our result. */
9317 temp = copy_rtx (value = op0);
9318
9319 /* Increment however we can. */
9320 op1 = expand_binop (mode, this_optab, value, op1, op0,
9321 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9322 /* Make sure the value is stored into OP0. */
9323 if (op1 != op0)
9324 emit_move_insn (op0, op1);
9325
9326 return temp;
9327 }
9328 \f
9329 /* Expand all function calls contained within EXP, innermost ones first.
9330 But don't look within expressions that have sequence points.
9331 For each CALL_EXPR, record the rtx for its value
9332 in the CALL_EXPR_RTL field. */
9333
9334 static void
9335 preexpand_calls (exp)
9336 tree exp;
9337 {
9338 register int nops, i;
9339 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9340
9341 if (! do_preexpand_calls)
9342 return;
9343
9344 /* Only expressions and references can contain calls. */
9345
9346 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9347 return;
9348
9349 switch (TREE_CODE (exp))
9350 {
9351 case CALL_EXPR:
9352 /* Do nothing if already expanded. */
9353 if (CALL_EXPR_RTL (exp) != 0)
9354 return;
9355
9356 /* Do nothing to built-in functions. */
9357 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
9358 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
9359 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9360 /* Do nothing if the call returns a variable-sized object. */
9361 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
9362 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9363 return;
9364
9365 case COMPOUND_EXPR:
9366 case COND_EXPR:
9367 case TRUTH_ANDIF_EXPR:
9368 case TRUTH_ORIF_EXPR:
9369 /* If we find one of these, then we can be sure
9370 the adjust will be done for it (since it makes jumps).
9371 Do it now, so that if this is inside an argument
9372 of a function, we don't get the stack adjustment
9373 after some other args have already been pushed. */
9374 do_pending_stack_adjust ();
9375 return;
9376
9377 case BLOCK:
9378 case RTL_EXPR:
9379 case WITH_CLEANUP_EXPR:
9380 case CLEANUP_POINT_EXPR:
9381 return;
9382
9383 case SAVE_EXPR:
9384 if (SAVE_EXPR_RTL (exp) != 0)
9385 return;
9386 }
9387
9388 nops = tree_code_length[(int) TREE_CODE (exp)];
9389 for (i = 0; i < nops; i++)
9390 if (TREE_OPERAND (exp, i) != 0)
9391 {
9392 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9393 if (type == 'e' || type == '<' || type == '1' || type == '2'
9394 || type == 'r')
9395 preexpand_calls (TREE_OPERAND (exp, i));
9396 }
9397 }
9398 \f
9399 /* At the start of a function, record that we have no previously-pushed
9400 arguments waiting to be popped. */
9401
9402 void
9403 init_pending_stack_adjust ()
9404 {
9405 pending_stack_adjust = 0;
9406 }
9407
9408 /* When exiting from function, if safe, clear out any pending stack adjust
9409 so the adjustment won't get done. */
9410
9411 void
9412 clear_pending_stack_adjust ()
9413 {
9414 #ifdef EXIT_IGNORE_STACK
9415 if (optimize > 0
9416 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9417 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9418 && ! flag_inline_functions)
9419 pending_stack_adjust = 0;
9420 #endif
9421 }
9422
9423 /* Pop any previously-pushed arguments that have not been popped yet. */
9424
9425 void
9426 do_pending_stack_adjust ()
9427 {
9428 if (inhibit_defer_pop == 0)
9429 {
9430 if (pending_stack_adjust != 0)
9431 adjust_stack (GEN_INT (pending_stack_adjust));
9432 pending_stack_adjust = 0;
9433 }
9434 }
9435
9436 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9437 Returns the cleanups to be performed. */
9438
9439 static tree
9440 defer_cleanups_to (old_cleanups)
9441 tree old_cleanups;
9442 {
9443 tree new_cleanups = NULL_TREE;
9444 tree cleanups = cleanups_this_call;
9445 tree last = NULL_TREE;
9446
9447 while (cleanups_this_call != old_cleanups)
9448 {
9449 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
9450 last = cleanups_this_call;
9451 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9452 }
9453
9454 if (last)
9455 {
9456 /* Remove the list from the chain of cleanups. */
9457 TREE_CHAIN (last) = NULL_TREE;
9458
9459 /* reverse them so that we can build them in the right order. */
9460 cleanups = nreverse (cleanups);
9461
9462 /* All cleanups must be on the function_obstack. */
9463 push_obstacks_nochange ();
9464 resume_temporary_allocation ();
9465
9466 while (cleanups)
9467 {
9468 if (new_cleanups)
9469 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9470 TREE_VALUE (cleanups), new_cleanups);
9471 else
9472 new_cleanups = TREE_VALUE (cleanups);
9473
9474 cleanups = TREE_CHAIN (cleanups);
9475 }
9476
9477 pop_obstacks ();
9478 }
9479
9480 return new_cleanups;
9481 }
9482
9483 /* Expand all cleanups up to OLD_CLEANUPS.
9484 Needed here, and also for language-dependent calls. */
9485
9486 void
9487 expand_cleanups_to (old_cleanups)
9488 tree old_cleanups;
9489 {
9490 while (cleanups_this_call != old_cleanups)
9491 {
9492 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
9493 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9494 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9495 }
9496 }
9497 \f
9498 /* Expand conditional expressions. */
9499
9500 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9501 LABEL is an rtx of code CODE_LABEL, in this function and all the
9502 functions here. */
9503
9504 void
9505 jumpifnot (exp, label)
9506 tree exp;
9507 rtx label;
9508 {
9509 do_jump (exp, label, NULL_RTX);
9510 }
9511
9512 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9513
9514 void
9515 jumpif (exp, label)
9516 tree exp;
9517 rtx label;
9518 {
9519 do_jump (exp, NULL_RTX, label);
9520 }
9521
9522 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9523 the result is zero, or IF_TRUE_LABEL if the result is one.
9524 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9525 meaning fall through in that case.
9526
9527 do_jump always does any pending stack adjust except when it does not
9528 actually perform a jump. An example where there is no jump
9529 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9530
9531 This function is responsible for optimizing cases such as
9532 &&, || and comparison operators in EXP. */
9533
9534 void
9535 do_jump (exp, if_false_label, if_true_label)
9536 tree exp;
9537 rtx if_false_label, if_true_label;
9538 {
9539 register enum tree_code code = TREE_CODE (exp);
9540 /* Some cases need to create a label to jump to
9541 in order to properly fall through.
9542 These cases set DROP_THROUGH_LABEL nonzero. */
9543 rtx drop_through_label = 0;
9544 rtx temp;
9545 rtx comparison = 0;
9546 int i;
9547 tree type;
9548 enum machine_mode mode;
9549
9550 emit_queue ();
9551
9552 switch (code)
9553 {
9554 case ERROR_MARK:
9555 break;
9556
9557 case INTEGER_CST:
9558 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9559 if (temp)
9560 emit_jump (temp);
9561 break;
9562
9563 #if 0
9564 /* This is not true with #pragma weak */
9565 case ADDR_EXPR:
9566 /* The address of something can never be zero. */
9567 if (if_true_label)
9568 emit_jump (if_true_label);
9569 break;
9570 #endif
9571
9572 case NOP_EXPR:
9573 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9574 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9575 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9576 goto normal;
9577 case CONVERT_EXPR:
9578 /* If we are narrowing the operand, we have to do the compare in the
9579 narrower mode. */
9580 if ((TYPE_PRECISION (TREE_TYPE (exp))
9581 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9582 goto normal;
9583 case NON_LVALUE_EXPR:
9584 case REFERENCE_EXPR:
9585 case ABS_EXPR:
9586 case NEGATE_EXPR:
9587 case LROTATE_EXPR:
9588 case RROTATE_EXPR:
9589 /* These cannot change zero->non-zero or vice versa. */
9590 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9591 break;
9592
9593 #if 0
9594 /* This is never less insns than evaluating the PLUS_EXPR followed by
9595 a test and can be longer if the test is eliminated. */
9596 case PLUS_EXPR:
9597 /* Reduce to minus. */
9598 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9599 TREE_OPERAND (exp, 0),
9600 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9601 TREE_OPERAND (exp, 1))));
9602 /* Process as MINUS. */
9603 #endif
9604
9605 case MINUS_EXPR:
9606 /* Non-zero iff operands of minus differ. */
9607 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9608 TREE_OPERAND (exp, 0),
9609 TREE_OPERAND (exp, 1)),
9610 NE, NE);
9611 break;
9612
9613 case BIT_AND_EXPR:
9614 /* If we are AND'ing with a small constant, do this comparison in the
9615 smallest type that fits. If the machine doesn't have comparisons
9616 that small, it will be converted back to the wider comparison.
9617 This helps if we are testing the sign bit of a narrower object.
9618 combine can't do this for us because it can't know whether a
9619 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9620
9621 if (! SLOW_BYTE_ACCESS
9622 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9623 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9624 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9625 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9626 && (type = type_for_mode (mode, 1)) != 0
9627 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9628 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9629 != CODE_FOR_nothing))
9630 {
9631 do_jump (convert (type, exp), if_false_label, if_true_label);
9632 break;
9633 }
9634 goto normal;
9635
9636 case TRUTH_NOT_EXPR:
9637 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9638 break;
9639
9640 case TRUTH_ANDIF_EXPR:
9641 {
9642 rtx seq1, seq2;
9643 tree cleanups, old_cleanups;
9644
9645 if (if_false_label == 0)
9646 if_false_label = drop_through_label = gen_label_rtx ();
9647 start_sequence ();
9648 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9649 seq1 = get_insns ();
9650 end_sequence ();
9651
9652 old_cleanups = cleanups_this_call;
9653 start_sequence ();
9654 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9655 seq2 = get_insns ();
9656 end_sequence ();
9657
9658 cleanups = defer_cleanups_to (old_cleanups);
9659 if (cleanups)
9660 {
9661 rtx flag = gen_reg_rtx (word_mode);
9662 tree new_cleanups;
9663 tree cond;
9664
9665 /* Flag cleanups as not needed. */
9666 emit_move_insn (flag, const0_rtx);
9667 emit_insns (seq1);
9668
9669 /* Flag cleanups as needed. */
9670 emit_move_insn (flag, const1_rtx);
9671 emit_insns (seq2);
9672
9673 /* All cleanups must be on the function_obstack. */
9674 push_obstacks_nochange ();
9675 resume_temporary_allocation ();
9676
9677 /* convert flag, which is an rtx, into a tree. */
9678 cond = make_node (RTL_EXPR);
9679 TREE_TYPE (cond) = integer_type_node;
9680 RTL_EXPR_RTL (cond) = flag;
9681 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9682 cond = save_expr (cond);
9683
9684 new_cleanups = build (COND_EXPR, void_type_node,
9685 truthvalue_conversion (cond),
9686 cleanups, integer_zero_node);
9687 new_cleanups = fold (new_cleanups);
9688
9689 pop_obstacks ();
9690
9691 /* Now add in the conditionalized cleanups. */
9692 cleanups_this_call
9693 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9694 (*interim_eh_hook) (NULL_TREE);
9695 }
9696 else
9697 {
9698 emit_insns (seq1);
9699 emit_insns (seq2);
9700 }
9701 }
9702 break;
9703
9704 case TRUTH_ORIF_EXPR:
9705 {
9706 rtx seq1, seq2;
9707 tree cleanups, old_cleanups;
9708
9709 if (if_true_label == 0)
9710 if_true_label = drop_through_label = gen_label_rtx ();
9711 start_sequence ();
9712 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9713 seq1 = get_insns ();
9714 end_sequence ();
9715
9716 old_cleanups = cleanups_this_call;
9717 start_sequence ();
9718 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9719 seq2 = get_insns ();
9720 end_sequence ();
9721
9722 cleanups = defer_cleanups_to (old_cleanups);
9723 if (cleanups)
9724 {
9725 rtx flag = gen_reg_rtx (word_mode);
9726 tree new_cleanups;
9727 tree cond;
9728
9729 /* Flag cleanups as not needed. */
9730 emit_move_insn (flag, const0_rtx);
9731 emit_insns (seq1);
9732
9733 /* Flag cleanups as needed. */
9734 emit_move_insn (flag, const1_rtx);
9735 emit_insns (seq2);
9736
9737 /* All cleanups must be on the function_obstack. */
9738 push_obstacks_nochange ();
9739 resume_temporary_allocation ();
9740
9741 /* convert flag, which is an rtx, into a tree. */
9742 cond = make_node (RTL_EXPR);
9743 TREE_TYPE (cond) = integer_type_node;
9744 RTL_EXPR_RTL (cond) = flag;
9745 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9746 cond = save_expr (cond);
9747
9748 new_cleanups = build (COND_EXPR, void_type_node,
9749 truthvalue_conversion (cond),
9750 cleanups, integer_zero_node);
9751 new_cleanups = fold (new_cleanups);
9752
9753 pop_obstacks ();
9754
9755 /* Now add in the conditionalized cleanups. */
9756 cleanups_this_call
9757 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9758 (*interim_eh_hook) (NULL_TREE);
9759 }
9760 else
9761 {
9762 emit_insns (seq1);
9763 emit_insns (seq2);
9764 }
9765 }
9766 break;
9767
9768 case COMPOUND_EXPR:
9769 push_temp_slots ();
9770 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9771 free_temp_slots ();
9772 pop_temp_slots ();
9773 emit_queue ();
9774 do_pending_stack_adjust ();
9775 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9776 break;
9777
9778 case COMPONENT_REF:
9779 case BIT_FIELD_REF:
9780 case ARRAY_REF:
9781 {
9782 int bitsize, bitpos, unsignedp;
9783 enum machine_mode mode;
9784 tree type;
9785 tree offset;
9786 int volatilep = 0;
9787
9788 /* Get description of this reference. We don't actually care
9789 about the underlying object here. */
9790 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9791 &mode, &unsignedp, &volatilep);
9792
9793 type = type_for_size (bitsize, unsignedp);
9794 if (! SLOW_BYTE_ACCESS
9795 && type != 0 && bitsize >= 0
9796 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9797 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9798 != CODE_FOR_nothing))
9799 {
9800 do_jump (convert (type, exp), if_false_label, if_true_label);
9801 break;
9802 }
9803 goto normal;
9804 }
9805
9806 case COND_EXPR:
9807 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9808 if (integer_onep (TREE_OPERAND (exp, 1))
9809 && integer_zerop (TREE_OPERAND (exp, 2)))
9810 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9811
9812 else if (integer_zerop (TREE_OPERAND (exp, 1))
9813 && integer_onep (TREE_OPERAND (exp, 2)))
9814 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9815
9816 else
9817 {
9818 register rtx label1 = gen_label_rtx ();
9819 drop_through_label = gen_label_rtx ();
9820 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9821 /* Now the THEN-expression. */
9822 do_jump (TREE_OPERAND (exp, 1),
9823 if_false_label ? if_false_label : drop_through_label,
9824 if_true_label ? if_true_label : drop_through_label);
9825 /* In case the do_jump just above never jumps. */
9826 do_pending_stack_adjust ();
9827 emit_label (label1);
9828 /* Now the ELSE-expression. */
9829 do_jump (TREE_OPERAND (exp, 2),
9830 if_false_label ? if_false_label : drop_through_label,
9831 if_true_label ? if_true_label : drop_through_label);
9832 }
9833 break;
9834
9835 case EQ_EXPR:
9836 {
9837 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9838
9839 if (integer_zerop (TREE_OPERAND (exp, 1)))
9840 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9841 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9842 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9843 do_jump
9844 (fold
9845 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9846 fold (build (EQ_EXPR, TREE_TYPE (exp),
9847 fold (build1 (REALPART_EXPR,
9848 TREE_TYPE (inner_type),
9849 TREE_OPERAND (exp, 0))),
9850 fold (build1 (REALPART_EXPR,
9851 TREE_TYPE (inner_type),
9852 TREE_OPERAND (exp, 1))))),
9853 fold (build (EQ_EXPR, TREE_TYPE (exp),
9854 fold (build1 (IMAGPART_EXPR,
9855 TREE_TYPE (inner_type),
9856 TREE_OPERAND (exp, 0))),
9857 fold (build1 (IMAGPART_EXPR,
9858 TREE_TYPE (inner_type),
9859 TREE_OPERAND (exp, 1))))))),
9860 if_false_label, if_true_label);
9861 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9862 && !can_compare_p (TYPE_MODE (inner_type)))
9863 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9864 else
9865 comparison = compare (exp, EQ, EQ);
9866 break;
9867 }
9868
9869 case NE_EXPR:
9870 {
9871 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9872
9873 if (integer_zerop (TREE_OPERAND (exp, 1)))
9874 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9875 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9876 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9877 do_jump
9878 (fold
9879 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9880 fold (build (NE_EXPR, TREE_TYPE (exp),
9881 fold (build1 (REALPART_EXPR,
9882 TREE_TYPE (inner_type),
9883 TREE_OPERAND (exp, 0))),
9884 fold (build1 (REALPART_EXPR,
9885 TREE_TYPE (inner_type),
9886 TREE_OPERAND (exp, 1))))),
9887 fold (build (NE_EXPR, TREE_TYPE (exp),
9888 fold (build1 (IMAGPART_EXPR,
9889 TREE_TYPE (inner_type),
9890 TREE_OPERAND (exp, 0))),
9891 fold (build1 (IMAGPART_EXPR,
9892 TREE_TYPE (inner_type),
9893 TREE_OPERAND (exp, 1))))))),
9894 if_false_label, if_true_label);
9895 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9896 && !can_compare_p (TYPE_MODE (inner_type)))
9897 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9898 else
9899 comparison = compare (exp, NE, NE);
9900 break;
9901 }
9902
9903 case LT_EXPR:
9904 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9905 == MODE_INT)
9906 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9907 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9908 else
9909 comparison = compare (exp, LT, LTU);
9910 break;
9911
9912 case LE_EXPR:
9913 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9914 == MODE_INT)
9915 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9916 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9917 else
9918 comparison = compare (exp, LE, LEU);
9919 break;
9920
9921 case GT_EXPR:
9922 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9923 == MODE_INT)
9924 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9925 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9926 else
9927 comparison = compare (exp, GT, GTU);
9928 break;
9929
9930 case GE_EXPR:
9931 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9932 == MODE_INT)
9933 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9934 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9935 else
9936 comparison = compare (exp, GE, GEU);
9937 break;
9938
9939 default:
9940 normal:
9941 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9942 #if 0
9943 /* This is not needed any more and causes poor code since it causes
9944 comparisons and tests from non-SI objects to have different code
9945 sequences. */
9946 /* Copy to register to avoid generating bad insns by cse
9947 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9948 if (!cse_not_expected && GET_CODE (temp) == MEM)
9949 temp = copy_to_reg (temp);
9950 #endif
9951 do_pending_stack_adjust ();
9952 if (GET_CODE (temp) == CONST_INT)
9953 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9954 else if (GET_CODE (temp) == LABEL_REF)
9955 comparison = const_true_rtx;
9956 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9957 && !can_compare_p (GET_MODE (temp)))
9958 /* Note swapping the labels gives us not-equal. */
9959 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9960 else if (GET_MODE (temp) != VOIDmode)
9961 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9962 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9963 GET_MODE (temp), NULL_RTX, 0);
9964 else
9965 abort ();
9966 }
9967
9968 /* Do any postincrements in the expression that was tested. */
9969 emit_queue ();
9970
9971 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9972 straight into a conditional jump instruction as the jump condition.
9973 Otherwise, all the work has been done already. */
9974
9975 if (comparison == const_true_rtx)
9976 {
9977 if (if_true_label)
9978 emit_jump (if_true_label);
9979 }
9980 else if (comparison == const0_rtx)
9981 {
9982 if (if_false_label)
9983 emit_jump (if_false_label);
9984 }
9985 else if (comparison)
9986 do_jump_for_compare (comparison, if_false_label, if_true_label);
9987
9988 if (drop_through_label)
9989 {
9990 /* If do_jump produces code that might be jumped around,
9991 do any stack adjusts from that code, before the place
9992 where control merges in. */
9993 do_pending_stack_adjust ();
9994 emit_label (drop_through_label);
9995 }
9996 }
9997 \f
9998 /* Given a comparison expression EXP for values too wide to be compared
9999 with one insn, test the comparison and jump to the appropriate label.
10000 The code of EXP is ignored; we always test GT if SWAP is 0,
10001 and LT if SWAP is 1. */
10002
10003 static void
10004 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10005 tree exp;
10006 int swap;
10007 rtx if_false_label, if_true_label;
10008 {
10009 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10010 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10011 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10012 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10013 rtx drop_through_label = 0;
10014 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10015 int i;
10016
10017 if (! if_true_label || ! if_false_label)
10018 drop_through_label = gen_label_rtx ();
10019 if (! if_true_label)
10020 if_true_label = drop_through_label;
10021 if (! if_false_label)
10022 if_false_label = drop_through_label;
10023
10024 /* Compare a word at a time, high order first. */
10025 for (i = 0; i < nwords; i++)
10026 {
10027 rtx comp;
10028 rtx op0_word, op1_word;
10029
10030 if (WORDS_BIG_ENDIAN)
10031 {
10032 op0_word = operand_subword_force (op0, i, mode);
10033 op1_word = operand_subword_force (op1, i, mode);
10034 }
10035 else
10036 {
10037 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10038 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10039 }
10040
10041 /* All but high-order word must be compared as unsigned. */
10042 comp = compare_from_rtx (op0_word, op1_word,
10043 (unsignedp || i > 0) ? GTU : GT,
10044 unsignedp, word_mode, NULL_RTX, 0);
10045 if (comp == const_true_rtx)
10046 emit_jump (if_true_label);
10047 else if (comp != const0_rtx)
10048 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10049
10050 /* Consider lower words only if these are equal. */
10051 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10052 NULL_RTX, 0);
10053 if (comp == const_true_rtx)
10054 emit_jump (if_false_label);
10055 else if (comp != const0_rtx)
10056 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10057 }
10058
10059 if (if_false_label)
10060 emit_jump (if_false_label);
10061 if (drop_through_label)
10062 emit_label (drop_through_label);
10063 }
10064
10065 /* Compare OP0 with OP1, word at a time, in mode MODE.
10066 UNSIGNEDP says to do unsigned comparison.
10067 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10068
10069 void
10070 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10071 enum machine_mode mode;
10072 int unsignedp;
10073 rtx op0, op1;
10074 rtx if_false_label, if_true_label;
10075 {
10076 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10077 rtx drop_through_label = 0;
10078 int i;
10079
10080 if (! if_true_label || ! if_false_label)
10081 drop_through_label = gen_label_rtx ();
10082 if (! if_true_label)
10083 if_true_label = drop_through_label;
10084 if (! if_false_label)
10085 if_false_label = drop_through_label;
10086
10087 /* Compare a word at a time, high order first. */
10088 for (i = 0; i < nwords; i++)
10089 {
10090 rtx comp;
10091 rtx op0_word, op1_word;
10092
10093 if (WORDS_BIG_ENDIAN)
10094 {
10095 op0_word = operand_subword_force (op0, i, mode);
10096 op1_word = operand_subword_force (op1, i, mode);
10097 }
10098 else
10099 {
10100 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10101 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10102 }
10103
10104 /* All but high-order word must be compared as unsigned. */
10105 comp = compare_from_rtx (op0_word, op1_word,
10106 (unsignedp || i > 0) ? GTU : GT,
10107 unsignedp, word_mode, NULL_RTX, 0);
10108 if (comp == const_true_rtx)
10109 emit_jump (if_true_label);
10110 else if (comp != const0_rtx)
10111 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10112
10113 /* Consider lower words only if these are equal. */
10114 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10115 NULL_RTX, 0);
10116 if (comp == const_true_rtx)
10117 emit_jump (if_false_label);
10118 else if (comp != const0_rtx)
10119 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10120 }
10121
10122 if (if_false_label)
10123 emit_jump (if_false_label);
10124 if (drop_through_label)
10125 emit_label (drop_through_label);
10126 }
10127
10128 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10129 with one insn, test the comparison and jump to the appropriate label. */
10130
10131 static void
10132 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10133 tree exp;
10134 rtx if_false_label, if_true_label;
10135 {
10136 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10137 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10138 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10139 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10140 int i;
10141 rtx drop_through_label = 0;
10142
10143 if (! if_false_label)
10144 drop_through_label = if_false_label = gen_label_rtx ();
10145
10146 for (i = 0; i < nwords; i++)
10147 {
10148 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10149 operand_subword_force (op1, i, mode),
10150 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10151 word_mode, NULL_RTX, 0);
10152 if (comp == const_true_rtx)
10153 emit_jump (if_false_label);
10154 else if (comp != const0_rtx)
10155 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10156 }
10157
10158 if (if_true_label)
10159 emit_jump (if_true_label);
10160 if (drop_through_label)
10161 emit_label (drop_through_label);
10162 }
10163 \f
10164 /* Jump according to whether OP0 is 0.
10165 We assume that OP0 has an integer mode that is too wide
10166 for the available compare insns. */
10167
10168 static void
10169 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10170 rtx op0;
10171 rtx if_false_label, if_true_label;
10172 {
10173 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10174 int i;
10175 rtx drop_through_label = 0;
10176
10177 if (! if_false_label)
10178 drop_through_label = if_false_label = gen_label_rtx ();
10179
10180 for (i = 0; i < nwords; i++)
10181 {
10182 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10183 GET_MODE (op0)),
10184 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10185 if (comp == const_true_rtx)
10186 emit_jump (if_false_label);
10187 else if (comp != const0_rtx)
10188 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10189 }
10190
10191 if (if_true_label)
10192 emit_jump (if_true_label);
10193 if (drop_through_label)
10194 emit_label (drop_through_label);
10195 }
10196
10197 /* Given a comparison expression in rtl form, output conditional branches to
10198 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10199
10200 static void
10201 do_jump_for_compare (comparison, if_false_label, if_true_label)
10202 rtx comparison, if_false_label, if_true_label;
10203 {
10204 if (if_true_label)
10205 {
10206 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10207 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10208 else
10209 abort ();
10210
10211 if (if_false_label)
10212 emit_jump (if_false_label);
10213 }
10214 else if (if_false_label)
10215 {
10216 rtx insn;
10217 rtx prev = get_last_insn ();
10218 rtx branch = 0;
10219
10220 /* Output the branch with the opposite condition. Then try to invert
10221 what is generated. If more than one insn is a branch, or if the
10222 branch is not the last insn written, abort. If we can't invert
10223 the branch, emit make a true label, redirect this jump to that,
10224 emit a jump to the false label and define the true label. */
10225
10226 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10227 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10228 else
10229 abort ();
10230
10231 /* Here we get the first insn that was just emitted. It used to be the
10232 case that, on some machines, emitting the branch would discard
10233 the previous compare insn and emit a replacement. This isn't
10234 done anymore, but abort if we see that PREV is deleted. */
10235
10236 if (prev == 0)
10237 insn = get_insns ();
10238 else if (INSN_DELETED_P (prev))
10239 abort ();
10240 else
10241 insn = NEXT_INSN (prev);
10242
10243 for (; insn; insn = NEXT_INSN (insn))
10244 if (GET_CODE (insn) == JUMP_INSN)
10245 {
10246 if (branch)
10247 abort ();
10248 branch = insn;
10249 }
10250
10251 if (branch != get_last_insn ())
10252 abort ();
10253
10254 JUMP_LABEL (branch) = if_false_label;
10255 if (! invert_jump (branch, if_false_label))
10256 {
10257 if_true_label = gen_label_rtx ();
10258 redirect_jump (branch, if_true_label);
10259 emit_jump (if_false_label);
10260 emit_label (if_true_label);
10261 }
10262 }
10263 }
10264 \f
10265 /* Generate code for a comparison expression EXP
10266 (including code to compute the values to be compared)
10267 and set (CC0) according to the result.
10268 SIGNED_CODE should be the rtx operation for this comparison for
10269 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10270
10271 We force a stack adjustment unless there are currently
10272 things pushed on the stack that aren't yet used. */
10273
10274 static rtx
10275 compare (exp, signed_code, unsigned_code)
10276 register tree exp;
10277 enum rtx_code signed_code, unsigned_code;
10278 {
10279 register rtx op0
10280 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10281 register rtx op1
10282 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10283 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10284 register enum machine_mode mode = TYPE_MODE (type);
10285 int unsignedp = TREE_UNSIGNED (type);
10286 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10287
10288 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10289 ((mode == BLKmode)
10290 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10291 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10292 }
10293
10294 /* Like compare but expects the values to compare as two rtx's.
10295 The decision as to signed or unsigned comparison must be made by the caller.
10296
10297 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10298 compared.
10299
10300 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10301 size of MODE should be used. */
10302
10303 rtx
10304 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10305 register rtx op0, op1;
10306 enum rtx_code code;
10307 int unsignedp;
10308 enum machine_mode mode;
10309 rtx size;
10310 int align;
10311 {
10312 rtx tem;
10313
10314 /* If one operand is constant, make it the second one. Only do this
10315 if the other operand is not constant as well. */
10316
10317 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10318 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10319 {
10320 tem = op0;
10321 op0 = op1;
10322 op1 = tem;
10323 code = swap_condition (code);
10324 }
10325
10326 if (flag_force_mem)
10327 {
10328 op0 = force_not_mem (op0);
10329 op1 = force_not_mem (op1);
10330 }
10331
10332 do_pending_stack_adjust ();
10333
10334 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10335 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10336 return tem;
10337
10338 #if 0
10339 /* There's no need to do this now that combine.c can eliminate lots of
10340 sign extensions. This can be less efficient in certain cases on other
10341 machines. */
10342
10343 /* If this is a signed equality comparison, we can do it as an
10344 unsigned comparison since zero-extension is cheaper than sign
10345 extension and comparisons with zero are done as unsigned. This is
10346 the case even on machines that can do fast sign extension, since
10347 zero-extension is easier to combine with other operations than
10348 sign-extension is. If we are comparing against a constant, we must
10349 convert it to what it would look like unsigned. */
10350 if ((code == EQ || code == NE) && ! unsignedp
10351 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10352 {
10353 if (GET_CODE (op1) == CONST_INT
10354 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10355 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10356 unsignedp = 1;
10357 }
10358 #endif
10359
10360 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10361
10362 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10363 }
10364 \f
10365 /* Generate code to calculate EXP using a store-flag instruction
10366 and return an rtx for the result. EXP is either a comparison
10367 or a TRUTH_NOT_EXPR whose operand is a comparison.
10368
10369 If TARGET is nonzero, store the result there if convenient.
10370
10371 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10372 cheap.
10373
10374 Return zero if there is no suitable set-flag instruction
10375 available on this machine.
10376
10377 Once expand_expr has been called on the arguments of the comparison,
10378 we are committed to doing the store flag, since it is not safe to
10379 re-evaluate the expression. We emit the store-flag insn by calling
10380 emit_store_flag, but only expand the arguments if we have a reason
10381 to believe that emit_store_flag will be successful. If we think that
10382 it will, but it isn't, we have to simulate the store-flag with a
10383 set/jump/set sequence. */
10384
10385 static rtx
10386 do_store_flag (exp, target, mode, only_cheap)
10387 tree exp;
10388 rtx target;
10389 enum machine_mode mode;
10390 int only_cheap;
10391 {
10392 enum rtx_code code;
10393 tree arg0, arg1, type;
10394 tree tem;
10395 enum machine_mode operand_mode;
10396 int invert = 0;
10397 int unsignedp;
10398 rtx op0, op1;
10399 enum insn_code icode;
10400 rtx subtarget = target;
10401 rtx result, label, pattern, jump_pat;
10402
10403 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10404 result at the end. We can't simply invert the test since it would
10405 have already been inverted if it were valid. This case occurs for
10406 some floating-point comparisons. */
10407
10408 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10409 invert = 1, exp = TREE_OPERAND (exp, 0);
10410
10411 arg0 = TREE_OPERAND (exp, 0);
10412 arg1 = TREE_OPERAND (exp, 1);
10413 type = TREE_TYPE (arg0);
10414 operand_mode = TYPE_MODE (type);
10415 unsignedp = TREE_UNSIGNED (type);
10416
10417 /* We won't bother with BLKmode store-flag operations because it would mean
10418 passing a lot of information to emit_store_flag. */
10419 if (operand_mode == BLKmode)
10420 return 0;
10421
10422 STRIP_NOPS (arg0);
10423 STRIP_NOPS (arg1);
10424
10425 /* Get the rtx comparison code to use. We know that EXP is a comparison
10426 operation of some type. Some comparisons against 1 and -1 can be
10427 converted to comparisons with zero. Do so here so that the tests
10428 below will be aware that we have a comparison with zero. These
10429 tests will not catch constants in the first operand, but constants
10430 are rarely passed as the first operand. */
10431
10432 switch (TREE_CODE (exp))
10433 {
10434 case EQ_EXPR:
10435 code = EQ;
10436 break;
10437 case NE_EXPR:
10438 code = NE;
10439 break;
10440 case LT_EXPR:
10441 if (integer_onep (arg1))
10442 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10443 else
10444 code = unsignedp ? LTU : LT;
10445 break;
10446 case LE_EXPR:
10447 if (! unsignedp && integer_all_onesp (arg1))
10448 arg1 = integer_zero_node, code = LT;
10449 else
10450 code = unsignedp ? LEU : LE;
10451 break;
10452 case GT_EXPR:
10453 if (! unsignedp && integer_all_onesp (arg1))
10454 arg1 = integer_zero_node, code = GE;
10455 else
10456 code = unsignedp ? GTU : GT;
10457 break;
10458 case GE_EXPR:
10459 if (integer_onep (arg1))
10460 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10461 else
10462 code = unsignedp ? GEU : GE;
10463 break;
10464 default:
10465 abort ();
10466 }
10467
10468 /* Put a constant second. */
10469 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10470 {
10471 tem = arg0; arg0 = arg1; arg1 = tem;
10472 code = swap_condition (code);
10473 }
10474
10475 /* If this is an equality or inequality test of a single bit, we can
10476 do this by shifting the bit being tested to the low-order bit and
10477 masking the result with the constant 1. If the condition was EQ,
10478 we xor it with 1. This does not require an scc insn and is faster
10479 than an scc insn even if we have it. */
10480
10481 if ((code == NE || code == EQ)
10482 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10483 && integer_pow2p (TREE_OPERAND (arg0, 1))
10484 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10485 {
10486 tree inner = TREE_OPERAND (arg0, 0);
10487 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10488 NULL_RTX, VOIDmode, 0)));
10489 int ops_unsignedp;
10490
10491 /* If INNER is a right shift of a constant and it plus BITNUM does
10492 not overflow, adjust BITNUM and INNER. */
10493
10494 if (TREE_CODE (inner) == RSHIFT_EXPR
10495 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10496 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10497 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10498 < TYPE_PRECISION (type)))
10499 {
10500 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10501 inner = TREE_OPERAND (inner, 0);
10502 }
10503
10504 /* If we are going to be able to omit the AND below, we must do our
10505 operations as unsigned. If we must use the AND, we have a choice.
10506 Normally unsigned is faster, but for some machines signed is. */
10507 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10508 #ifdef LOAD_EXTEND_OP
10509 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10510 #else
10511 : 1
10512 #endif
10513 );
10514
10515 if (subtarget == 0 || GET_CODE (subtarget) != REG
10516 || GET_MODE (subtarget) != operand_mode
10517 || ! safe_from_p (subtarget, inner))
10518 subtarget = 0;
10519
10520 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10521
10522 if (bitnum != 0)
10523 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10524 size_int (bitnum), subtarget, ops_unsignedp);
10525
10526 if (GET_MODE (op0) != mode)
10527 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10528
10529 if ((code == EQ && ! invert) || (code == NE && invert))
10530 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10531 ops_unsignedp, OPTAB_LIB_WIDEN);
10532
10533 /* Put the AND last so it can combine with more things. */
10534 if (bitnum != TYPE_PRECISION (type) - 1)
10535 op0 = expand_and (op0, const1_rtx, subtarget);
10536
10537 return op0;
10538 }
10539
10540 /* Now see if we are likely to be able to do this. Return if not. */
10541 if (! can_compare_p (operand_mode))
10542 return 0;
10543 icode = setcc_gen_code[(int) code];
10544 if (icode == CODE_FOR_nothing
10545 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10546 {
10547 /* We can only do this if it is one of the special cases that
10548 can be handled without an scc insn. */
10549 if ((code == LT && integer_zerop (arg1))
10550 || (! only_cheap && code == GE && integer_zerop (arg1)))
10551 ;
10552 else if (BRANCH_COST >= 0
10553 && ! only_cheap && (code == NE || code == EQ)
10554 && TREE_CODE (type) != REAL_TYPE
10555 && ((abs_optab->handlers[(int) operand_mode].insn_code
10556 != CODE_FOR_nothing)
10557 || (ffs_optab->handlers[(int) operand_mode].insn_code
10558 != CODE_FOR_nothing)))
10559 ;
10560 else
10561 return 0;
10562 }
10563
10564 preexpand_calls (exp);
10565 if (subtarget == 0 || GET_CODE (subtarget) != REG
10566 || GET_MODE (subtarget) != operand_mode
10567 || ! safe_from_p (subtarget, arg1))
10568 subtarget = 0;
10569
10570 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10571 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10572
10573 if (target == 0)
10574 target = gen_reg_rtx (mode);
10575
10576 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10577 because, if the emit_store_flag does anything it will succeed and
10578 OP0 and OP1 will not be used subsequently. */
10579
10580 result = emit_store_flag (target, code,
10581 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10582 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10583 operand_mode, unsignedp, 1);
10584
10585 if (result)
10586 {
10587 if (invert)
10588 result = expand_binop (mode, xor_optab, result, const1_rtx,
10589 result, 0, OPTAB_LIB_WIDEN);
10590 return result;
10591 }
10592
10593 /* If this failed, we have to do this with set/compare/jump/set code. */
10594 if (target == 0 || GET_CODE (target) != REG
10595 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10596 target = gen_reg_rtx (GET_MODE (target));
10597
10598 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10599 result = compare_from_rtx (op0, op1, code, unsignedp,
10600 operand_mode, NULL_RTX, 0);
10601 if (GET_CODE (result) == CONST_INT)
10602 return (((result == const0_rtx && ! invert)
10603 || (result != const0_rtx && invert))
10604 ? const0_rtx : const1_rtx);
10605
10606 label = gen_label_rtx ();
10607 if (bcc_gen_fctn[(int) code] == 0)
10608 abort ();
10609
10610 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10611 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10612 emit_label (label);
10613
10614 return target;
10615 }
10616 \f
10617 /* Generate a tablejump instruction (used for switch statements). */
10618
10619 #ifdef HAVE_tablejump
10620
10621 /* INDEX is the value being switched on, with the lowest value
10622 in the table already subtracted.
10623 MODE is its expected mode (needed if INDEX is constant).
10624 RANGE is the length of the jump table.
10625 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10626
10627 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10628 index value is out of range. */
10629
10630 void
10631 do_tablejump (index, mode, range, table_label, default_label)
10632 rtx index, range, table_label, default_label;
10633 enum machine_mode mode;
10634 {
10635 register rtx temp, vector;
10636
10637 /* Do an unsigned comparison (in the proper mode) between the index
10638 expression and the value which represents the length of the range.
10639 Since we just finished subtracting the lower bound of the range
10640 from the index expression, this comparison allows us to simultaneously
10641 check that the original index expression value is both greater than
10642 or equal to the minimum value of the range and less than or equal to
10643 the maximum value of the range. */
10644
10645 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10646 emit_jump_insn (gen_bgtu (default_label));
10647
10648 /* If index is in range, it must fit in Pmode.
10649 Convert to Pmode so we can index with it. */
10650 if (mode != Pmode)
10651 index = convert_to_mode (Pmode, index, 1);
10652
10653 /* Don't let a MEM slip thru, because then INDEX that comes
10654 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10655 and break_out_memory_refs will go to work on it and mess it up. */
10656 #ifdef PIC_CASE_VECTOR_ADDRESS
10657 if (flag_pic && GET_CODE (index) != REG)
10658 index = copy_to_mode_reg (Pmode, index);
10659 #endif
10660
10661 /* If flag_force_addr were to affect this address
10662 it could interfere with the tricky assumptions made
10663 about addresses that contain label-refs,
10664 which may be valid only very near the tablejump itself. */
10665 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10666 GET_MODE_SIZE, because this indicates how large insns are. The other
10667 uses should all be Pmode, because they are addresses. This code
10668 could fail if addresses and insns are not the same size. */
10669 index = gen_rtx (PLUS, Pmode,
10670 gen_rtx (MULT, Pmode, index,
10671 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10672 gen_rtx (LABEL_REF, Pmode, table_label));
10673 #ifdef PIC_CASE_VECTOR_ADDRESS
10674 if (flag_pic)
10675 index = PIC_CASE_VECTOR_ADDRESS (index);
10676 else
10677 #endif
10678 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10679 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10680 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
10681 RTX_UNCHANGING_P (vector) = 1;
10682 convert_move (temp, vector, 0);
10683
10684 emit_jump_insn (gen_tablejump (temp, table_label));
10685
10686 #ifndef CASE_VECTOR_PC_RELATIVE
10687 /* If we are generating PIC code or if the table is PC-relative, the
10688 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10689 if (! flag_pic)
10690 emit_barrier ();
10691 #endif
10692 }
10693
10694 #endif /* HAVE_tablejump */
10695
10696
10697 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10698 to that value is on the top of the stack. The resulting type is TYPE, and
10699 the source declaration is DECL. */
10700
10701 void
10702 bc_load_memory (type, decl)
10703 tree type, decl;
10704 {
10705 enum bytecode_opcode opcode;
10706
10707
10708 /* Bit fields are special. We only know about signed and
10709 unsigned ints, and enums. The latter are treated as
10710 signed integers. */
10711
10712 if (DECL_BIT_FIELD (decl))
10713 if (TREE_CODE (type) == ENUMERAL_TYPE
10714 || TREE_CODE (type) == INTEGER_TYPE)
10715 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10716 else
10717 abort ();
10718 else
10719 /* See corresponding comment in bc_store_memory(). */
10720 if (TYPE_MODE (type) == BLKmode
10721 || TYPE_MODE (type) == VOIDmode)
10722 return;
10723 else
10724 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
10725
10726 if (opcode == neverneverland)
10727 abort ();
10728
10729 bc_emit_bytecode (opcode);
10730
10731 #ifdef DEBUG_PRINT_CODE
10732 fputc ('\n', stderr);
10733 #endif
10734 }
10735
10736
10737 /* Store the contents of the second stack slot to the address in the
10738 top stack slot. DECL is the declaration of the destination and is used
10739 to determine whether we're dealing with a bitfield. */
10740
10741 void
10742 bc_store_memory (type, decl)
10743 tree type, decl;
10744 {
10745 enum bytecode_opcode opcode;
10746
10747
10748 if (DECL_BIT_FIELD (decl))
10749 {
10750 if (TREE_CODE (type) == ENUMERAL_TYPE
10751 || TREE_CODE (type) == INTEGER_TYPE)
10752 opcode = sstoreBI;
10753 else
10754 abort ();
10755 }
10756 else
10757 if (TYPE_MODE (type) == BLKmode)
10758 {
10759 /* Copy structure. This expands to a block copy instruction, storeBLK.
10760 In addition to the arguments expected by the other store instructions,
10761 it also expects a type size (SImode) on top of the stack, which is the
10762 structure size in size units (usually bytes). The two first arguments
10763 are already on the stack; so we just put the size on level 1. For some
10764 other languages, the size may be variable, this is why we don't encode
10765 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10766
10767 bc_expand_expr (TYPE_SIZE (type));
10768 opcode = storeBLK;
10769 }
10770 else
10771 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10772
10773 if (opcode == neverneverland)
10774 abort ();
10775
10776 bc_emit_bytecode (opcode);
10777
10778 #ifdef DEBUG_PRINT_CODE
10779 fputc ('\n', stderr);
10780 #endif
10781 }
10782
10783
10784 /* Allocate local stack space sufficient to hold a value of the given
10785 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10786 integral power of 2. A special case is locals of type VOID, which
10787 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10788 remapped into the corresponding attribute of SI. */
10789
10790 rtx
10791 bc_allocate_local (size, alignment)
10792 int size, alignment;
10793 {
10794 rtx retval;
10795 int byte_alignment;
10796
10797 if (size < 0)
10798 abort ();
10799
10800 /* Normalize size and alignment */
10801 if (!size)
10802 size = UNITS_PER_WORD;
10803
10804 if (alignment < BITS_PER_UNIT)
10805 byte_alignment = 1 << (INT_ALIGN - 1);
10806 else
10807 /* Align */
10808 byte_alignment = alignment / BITS_PER_UNIT;
10809
10810 if (local_vars_size & (byte_alignment - 1))
10811 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10812
10813 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10814 local_vars_size += size;
10815
10816 return retval;
10817 }
10818
10819
10820 /* Allocate variable-sized local array. Variable-sized arrays are
10821 actually pointers to the address in memory where they are stored. */
10822
10823 rtx
10824 bc_allocate_variable_array (size)
10825 tree size;
10826 {
10827 rtx retval;
10828 const int ptralign = (1 << (PTR_ALIGN - 1));
10829
10830 /* Align pointer */
10831 if (local_vars_size & ptralign)
10832 local_vars_size += ptralign - (local_vars_size & ptralign);
10833
10834 /* Note down local space needed: pointer to block; also return
10835 dummy rtx */
10836
10837 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10838 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10839 return retval;
10840 }
10841
10842
10843 /* Push the machine address for the given external variable offset. */
10844 void
10845 bc_load_externaddr (externaddr)
10846 rtx externaddr;
10847 {
10848 bc_emit_bytecode (constP);
10849 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10850 BYTECODE_BC_LABEL (externaddr)->offset);
10851
10852 #ifdef DEBUG_PRINT_CODE
10853 fputc ('\n', stderr);
10854 #endif
10855 }
10856
10857
10858 static char *
10859 bc_strdup (s)
10860 char *s;
10861 {
10862 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10863 strcpy (new, s);
10864 return new;
10865 }
10866
10867
10868 /* Like above, but expects an IDENTIFIER. */
10869 void
10870 bc_load_externaddr_id (id, offset)
10871 tree id;
10872 int offset;
10873 {
10874 if (!IDENTIFIER_POINTER (id))
10875 abort ();
10876
10877 bc_emit_bytecode (constP);
10878 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10879
10880 #ifdef DEBUG_PRINT_CODE
10881 fputc ('\n', stderr);
10882 #endif
10883 }
10884
10885
10886 /* Push the machine address for the given local variable offset. */
10887 void
10888 bc_load_localaddr (localaddr)
10889 rtx localaddr;
10890 {
10891 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10892 }
10893
10894
10895 /* Push the machine address for the given parameter offset.
10896 NOTE: offset is in bits. */
10897 void
10898 bc_load_parmaddr (parmaddr)
10899 rtx parmaddr;
10900 {
10901 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10902 / BITS_PER_UNIT));
10903 }
10904
10905
10906 /* Convert a[i] into *(a + i). */
10907 tree
10908 bc_canonicalize_array_ref (exp)
10909 tree exp;
10910 {
10911 tree type = TREE_TYPE (exp);
10912 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10913 TREE_OPERAND (exp, 0));
10914 tree index = TREE_OPERAND (exp, 1);
10915
10916
10917 /* Convert the integer argument to a type the same size as a pointer
10918 so the multiply won't overflow spuriously. */
10919
10920 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10921 index = convert (type_for_size (POINTER_SIZE, 0), index);
10922
10923 /* The array address isn't volatile even if the array is.
10924 (Of course this isn't terribly relevant since the bytecode
10925 translator treats nearly everything as volatile anyway.) */
10926 TREE_THIS_VOLATILE (array_adr) = 0;
10927
10928 return build1 (INDIRECT_REF, type,
10929 fold (build (PLUS_EXPR,
10930 TYPE_POINTER_TO (type),
10931 array_adr,
10932 fold (build (MULT_EXPR,
10933 TYPE_POINTER_TO (type),
10934 index,
10935 size_in_bytes (type))))));
10936 }
10937
10938
10939 /* Load the address of the component referenced by the given
10940 COMPONENT_REF expression.
10941
10942 Returns innermost lvalue. */
10943
10944 tree
10945 bc_expand_component_address (exp)
10946 tree exp;
10947 {
10948 tree tem, chain;
10949 enum machine_mode mode;
10950 int bitpos = 0;
10951 HOST_WIDE_INT SIval;
10952
10953
10954 tem = TREE_OPERAND (exp, 1);
10955 mode = DECL_MODE (tem);
10956
10957
10958 /* Compute cumulative bit offset for nested component refs
10959 and array refs, and find the ultimate containing object. */
10960
10961 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10962 {
10963 if (TREE_CODE (tem) == COMPONENT_REF)
10964 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10965 else
10966 if (TREE_CODE (tem) == ARRAY_REF
10967 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10968 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10969
10970 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10971 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10972 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10973 else
10974 break;
10975 }
10976
10977 bc_expand_expr (tem);
10978
10979
10980 /* For bitfields also push their offset and size */
10981 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10982 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10983 else
10984 if (SIval = bitpos / BITS_PER_UNIT)
10985 bc_emit_instruction (addconstPSI, SIval);
10986
10987 return (TREE_OPERAND (exp, 1));
10988 }
10989
10990
10991 /* Emit code to push two SI constants */
10992 void
10993 bc_push_offset_and_size (offset, size)
10994 HOST_WIDE_INT offset, size;
10995 {
10996 bc_emit_instruction (constSI, offset);
10997 bc_emit_instruction (constSI, size);
10998 }
10999
11000
11001 /* Emit byte code to push the address of the given lvalue expression to
11002 the stack. If it's a bit field, we also push offset and size info.
11003
11004 Returns innermost component, which allows us to determine not only
11005 its type, but also whether it's a bitfield. */
11006
11007 tree
11008 bc_expand_address (exp)
11009 tree exp;
11010 {
11011 /* Safeguard */
11012 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11013 return (exp);
11014
11015
11016 switch (TREE_CODE (exp))
11017 {
11018 case ARRAY_REF:
11019
11020 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11021
11022 case COMPONENT_REF:
11023
11024 return (bc_expand_component_address (exp));
11025
11026 case INDIRECT_REF:
11027
11028 bc_expand_expr (TREE_OPERAND (exp, 0));
11029
11030 /* For variable-sized types: retrieve pointer. Sometimes the
11031 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11032 also make sure we have an operand, just in case... */
11033
11034 if (TREE_OPERAND (exp, 0)
11035 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11036 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11037 bc_emit_instruction (loadP);
11038
11039 /* If packed, also return offset and size */
11040 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11041
11042 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11043 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11044
11045 return (TREE_OPERAND (exp, 0));
11046
11047 case FUNCTION_DECL:
11048
11049 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11050 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11051 break;
11052
11053 case PARM_DECL:
11054
11055 bc_load_parmaddr (DECL_RTL (exp));
11056
11057 /* For variable-sized types: retrieve pointer */
11058 if (TYPE_SIZE (TREE_TYPE (exp))
11059 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11060 bc_emit_instruction (loadP);
11061
11062 /* If packed, also return offset and size */
11063 if (DECL_BIT_FIELD (exp))
11064 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11065 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11066
11067 break;
11068
11069 case RESULT_DECL:
11070
11071 bc_emit_instruction (returnP);
11072 break;
11073
11074 case VAR_DECL:
11075
11076 #if 0
11077 if (BYTECODE_LABEL (DECL_RTL (exp)))
11078 bc_load_externaddr (DECL_RTL (exp));
11079 #endif
11080
11081 if (DECL_EXTERNAL (exp))
11082 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11083 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11084 else
11085 bc_load_localaddr (DECL_RTL (exp));
11086
11087 /* For variable-sized types: retrieve pointer */
11088 if (TYPE_SIZE (TREE_TYPE (exp))
11089 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11090 bc_emit_instruction (loadP);
11091
11092 /* If packed, also return offset and size */
11093 if (DECL_BIT_FIELD (exp))
11094 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11095 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11096
11097 break;
11098
11099 case STRING_CST:
11100 {
11101 rtx r;
11102
11103 bc_emit_bytecode (constP);
11104 r = output_constant_def (exp);
11105 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11106
11107 #ifdef DEBUG_PRINT_CODE
11108 fputc ('\n', stderr);
11109 #endif
11110 }
11111 break;
11112
11113 default:
11114
11115 abort();
11116 break;
11117 }
11118
11119 /* Most lvalues don't have components. */
11120 return (exp);
11121 }
11122
11123
11124 /* Emit a type code to be used by the runtime support in handling
11125 parameter passing. The type code consists of the machine mode
11126 plus the minimal alignment shifted left 8 bits. */
11127
11128 tree
11129 bc_runtime_type_code (type)
11130 tree type;
11131 {
11132 int val;
11133
11134 switch (TREE_CODE (type))
11135 {
11136 case VOID_TYPE:
11137 case INTEGER_TYPE:
11138 case REAL_TYPE:
11139 case COMPLEX_TYPE:
11140 case ENUMERAL_TYPE:
11141 case POINTER_TYPE:
11142 case RECORD_TYPE:
11143
11144 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11145 break;
11146
11147 case ERROR_MARK:
11148
11149 val = 0;
11150 break;
11151
11152 default:
11153
11154 abort ();
11155 }
11156 return build_int_2 (val, 0);
11157 }
11158
11159
11160 /* Generate constructor label */
11161 char *
11162 bc_gen_constr_label ()
11163 {
11164 static int label_counter;
11165 static char label[20];
11166
11167 sprintf (label, "*LR%d", label_counter++);
11168
11169 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11170 }
11171
11172
11173 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11174 expand the constructor data as static data, and push a pointer to it.
11175 The pointer is put in the pointer table and is retrieved by a constP
11176 bytecode instruction. We then loop and store each constructor member in
11177 the corresponding component. Finally, we return the original pointer on
11178 the stack. */
11179
11180 void
11181 bc_expand_constructor (constr)
11182 tree constr;
11183 {
11184 char *l;
11185 HOST_WIDE_INT ptroffs;
11186 rtx constr_rtx;
11187
11188
11189 /* Literal constructors are handled as constants, whereas
11190 non-literals are evaluated and stored element by element
11191 into the data segment. */
11192
11193 /* Allocate space in proper segment and push pointer to space on stack.
11194 */
11195
11196 l = bc_gen_constr_label ();
11197
11198 if (TREE_CONSTANT (constr))
11199 {
11200 text_section ();
11201
11202 bc_emit_const_labeldef (l);
11203 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11204 }
11205 else
11206 {
11207 data_section ();
11208
11209 bc_emit_data_labeldef (l);
11210 bc_output_data_constructor (constr);
11211 }
11212
11213
11214 /* Add reference to pointer table and recall pointer to stack;
11215 this code is common for both types of constructors: literals
11216 and non-literals. */
11217
11218 ptroffs = bc_define_pointer (l);
11219 bc_emit_instruction (constP, ptroffs);
11220
11221 /* This is all that has to be done if it's a literal. */
11222 if (TREE_CONSTANT (constr))
11223 return;
11224
11225
11226 /* At this point, we have the pointer to the structure on top of the stack.
11227 Generate sequences of store_memory calls for the constructor. */
11228
11229 /* constructor type is structure */
11230 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11231 {
11232 register tree elt;
11233
11234 /* If the constructor has fewer fields than the structure,
11235 clear the whole structure first. */
11236
11237 if (list_length (CONSTRUCTOR_ELTS (constr))
11238 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11239 {
11240 bc_emit_instruction (duplicate);
11241 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11242 bc_emit_instruction (clearBLK);
11243 }
11244
11245 /* Store each element of the constructor into the corresponding
11246 field of TARGET. */
11247
11248 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11249 {
11250 register tree field = TREE_PURPOSE (elt);
11251 register enum machine_mode mode;
11252 int bitsize;
11253 int bitpos;
11254 int unsignedp;
11255
11256 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11257 mode = DECL_MODE (field);
11258 unsignedp = TREE_UNSIGNED (field);
11259
11260 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11261
11262 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11263 /* The alignment of TARGET is
11264 at least what its type requires. */
11265 VOIDmode, 0,
11266 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11267 int_size_in_bytes (TREE_TYPE (constr)));
11268 }
11269 }
11270 else
11271
11272 /* Constructor type is array */
11273 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11274 {
11275 register tree elt;
11276 register int i;
11277 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11278 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11279 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11280 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11281
11282 /* If the constructor has fewer fields than the structure,
11283 clear the whole structure first. */
11284
11285 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11286 {
11287 bc_emit_instruction (duplicate);
11288 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11289 bc_emit_instruction (clearBLK);
11290 }
11291
11292
11293 /* Store each element of the constructor into the corresponding
11294 element of TARGET, determined by counting the elements. */
11295
11296 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11297 elt;
11298 elt = TREE_CHAIN (elt), i++)
11299 {
11300 register enum machine_mode mode;
11301 int bitsize;
11302 int bitpos;
11303 int unsignedp;
11304
11305 mode = TYPE_MODE (elttype);
11306 bitsize = GET_MODE_BITSIZE (mode);
11307 unsignedp = TREE_UNSIGNED (elttype);
11308
11309 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11310 /* * TYPE_SIZE_UNIT (elttype) */ );
11311
11312 bc_store_field (elt, bitsize, bitpos, mode,
11313 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11314 /* The alignment of TARGET is
11315 at least what its type requires. */
11316 VOIDmode, 0,
11317 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11318 int_size_in_bytes (TREE_TYPE (constr)));
11319 }
11320
11321 }
11322 }
11323
11324
11325 /* Store the value of EXP (an expression tree) into member FIELD of
11326 structure at address on stack, which has type TYPE, mode MODE and
11327 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11328 structure.
11329
11330 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11331 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11332
11333 void
11334 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11335 value_mode, unsignedp, align, total_size)
11336 int bitsize, bitpos;
11337 enum machine_mode mode;
11338 tree field, exp, type;
11339 enum machine_mode value_mode;
11340 int unsignedp;
11341 int align;
11342 int total_size;
11343 {
11344
11345 /* Expand expression and copy pointer */
11346 bc_expand_expr (exp);
11347 bc_emit_instruction (over);
11348
11349
11350 /* If the component is a bit field, we cannot use addressing to access
11351 it. Use bit-field techniques to store in it. */
11352
11353 if (DECL_BIT_FIELD (field))
11354 {
11355 bc_store_bit_field (bitpos, bitsize, unsignedp);
11356 return;
11357 }
11358 else
11359 /* Not bit field */
11360 {
11361 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11362
11363 /* Advance pointer to the desired member */
11364 if (offset)
11365 bc_emit_instruction (addconstPSI, offset);
11366
11367 /* Store */
11368 bc_store_memory (type, field);
11369 }
11370 }
11371
11372
11373 /* Store SI/SU in bitfield */
11374 void
11375 bc_store_bit_field (offset, size, unsignedp)
11376 int offset, size, unsignedp;
11377 {
11378 /* Push bitfield offset and size */
11379 bc_push_offset_and_size (offset, size);
11380
11381 /* Store */
11382 bc_emit_instruction (sstoreBI);
11383 }
11384
11385
11386 /* Load SI/SU from bitfield */
11387 void
11388 bc_load_bit_field (offset, size, unsignedp)
11389 int offset, size, unsignedp;
11390 {
11391 /* Push bitfield offset and size */
11392 bc_push_offset_and_size (offset, size);
11393
11394 /* Load: sign-extend if signed, else zero-extend */
11395 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11396 }
11397
11398
11399 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11400 (adjust stack pointer upwards), negative means add that number of
11401 levels (adjust the stack pointer downwards). Only positive values
11402 normally make sense. */
11403
11404 void
11405 bc_adjust_stack (nlevels)
11406 int nlevels;
11407 {
11408 switch (nlevels)
11409 {
11410 case 0:
11411 break;
11412
11413 case 2:
11414 bc_emit_instruction (drop);
11415
11416 case 1:
11417 bc_emit_instruction (drop);
11418 break;
11419
11420 default:
11421
11422 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11423 stack_depth -= nlevels;
11424 }
11425
11426 #if defined (VALIDATE_STACK_FOR_BC)
11427 VALIDATE_STACK_FOR_BC ();
11428 #endif
11429 }
This page took 0.576866 seconds and 6 git commands to generate.