]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(probe_stack_range): Do probing with loop if more than a small number.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
34 #include "expr.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "typeclass.h"
39
40 #include "bytecode.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
43 #include "bc-optab.h"
44 #include "bc-emit.h"
45
46
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
90
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
96
97 /* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
100 of TARGET_EXPRs. */
101 int target_temp_slot_level;
102
103 /* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
105 returned. */
106 static rtx saveregs_value;
107
108 /* Similarly for __builtin_apply_args. */
109 static rtx apply_args_value;
110
111 /* This structure is used by move_by_pieces to describe the move to
112 be performed. */
113
114 struct move_by_pieces
115 {
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 int to_struct;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 int from_struct;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134 struct clear_by_pieces
135 {
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144 };
145
146 /* Used to generate bytecodes: keep track of size of local variables,
147 as well as depth of arithmetic stack. (Notice that variables are
148 stored on the machine's stack, not the arithmetic stack.) */
149
150 extern int local_vars_size;
151 extern int stack_depth;
152 extern int max_stack_depth;
153 extern struct obstack permanent_obstack;
154 extern rtx arg_pointer_save_area;
155
156 static rtx enqueue_insn PROTO((rtx, rtx));
157 static int queued_subexp_p PROTO((rtx));
158 static void init_queue PROTO((void));
159 static void move_by_pieces PROTO((rtx, rtx, int, int));
160 static int move_by_pieces_ninsns PROTO((unsigned int, int));
161 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
162 struct move_by_pieces *));
163 static void clear_by_pieces PROTO((rtx, int, int));
164 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
165 struct clear_by_pieces *));
166 static int is_zeros_p PROTO((tree));
167 static int mostly_zeros_p PROTO((tree));
168 static void store_constructor PROTO((tree, rtx, int));
169 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
170 enum machine_mode, int, int, int));
171 static int get_inner_unaligned_p PROTO((tree));
172 static tree save_noncopied_parts PROTO((tree, tree));
173 static tree init_noncopied_parts PROTO((tree, tree));
174 static int safe_from_p PROTO((rtx, tree));
175 static int fixed_type_p PROTO((tree));
176 static rtx var_rtx PROTO((tree));
177 static int get_pointer_alignment PROTO((tree, unsigned));
178 static tree string_constant PROTO((tree, tree *));
179 static tree c_strlen PROTO((tree));
180 static rtx expand_builtin PROTO((tree, rtx, rtx,
181 enum machine_mode, int));
182 static int apply_args_size PROTO((void));
183 static int apply_result_size PROTO((void));
184 static rtx result_vector PROTO((int, rtx));
185 static rtx expand_builtin_apply_args PROTO((void));
186 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
187 static void expand_builtin_return PROTO((rtx));
188 static rtx expand_increment PROTO((tree, int, int));
189 void bc_expand_increment PROTO((struct increment_operator *, tree));
190 rtx bc_allocate_local PROTO((int, int));
191 void bc_store_memory PROTO((tree, tree));
192 tree bc_expand_component_address PROTO((tree));
193 tree bc_expand_address PROTO((tree));
194 void bc_expand_constructor PROTO((tree));
195 void bc_adjust_stack PROTO((int));
196 tree bc_canonicalize_array_ref PROTO((tree));
197 void bc_load_memory PROTO((tree, tree));
198 void bc_load_externaddr PROTO((rtx));
199 void bc_load_externaddr_id PROTO((tree, int));
200 void bc_load_localaddr PROTO((rtx));
201 void bc_load_parmaddr PROTO((rtx));
202 static void preexpand_calls PROTO((tree));
203 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
204 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
205 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
206 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
207 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
208 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
209 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
210 extern tree truthvalue_conversion PROTO((tree));
211
212 /* Record for each mode whether we can move a register directly to or
213 from an object of that mode in memory. If we can't, we won't try
214 to use that mode directly when accessing a field of that mode. */
215
216 static char direct_load[NUM_MACHINE_MODES];
217 static char direct_store[NUM_MACHINE_MODES];
218
219 /* MOVE_RATIO is the number of move instructions that is better than
220 a block move. */
221
222 #ifndef MOVE_RATIO
223 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
224 #define MOVE_RATIO 2
225 #else
226 /* A value of around 6 would minimize code size; infinity would minimize
227 execution time. */
228 #define MOVE_RATIO 15
229 #endif
230 #endif
231
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
234
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
237
238 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
239
240 #ifndef SLOW_UNALIGNED_ACCESS
241 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
242 #endif
243
244 /* Register mappings for target machines without register windows. */
245 #ifndef INCOMING_REGNO
246 #define INCOMING_REGNO(OUT) (OUT)
247 #endif
248 #ifndef OUTGOING_REGNO
249 #define OUTGOING_REGNO(IN) (IN)
250 #endif
251 \f
252 /* Maps used to convert modes to const, load, and store bytecodes. */
253 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
254 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
255 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
256
257 /* Initialize maps used to convert modes to const, load, and store
258 bytecodes. */
259
260 void
261 bc_init_mode_to_opcode_maps ()
262 {
263 int mode;
264
265 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
266 mode_to_const_map[mode]
267 = mode_to_load_map[mode]
268 = mode_to_store_map[mode] = neverneverland;
269
270 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
271 mode_to_const_map[(int) SYM] = CONST; \
272 mode_to_load_map[(int) SYM] = LOAD; \
273 mode_to_store_map[(int) SYM] = STORE;
274
275 #include "modemap.def"
276 #undef DEF_MODEMAP
277 }
278 \f
279 /* This is run once per compilation to set up which modes can be used
280 directly in memory and to initialize the block move optab. */
281
282 void
283 init_expr_once ()
284 {
285 rtx insn, pat;
286 enum machine_mode mode;
287 /* Try indexing by frame ptr and try by stack ptr.
288 It is known that on the Convex the stack ptr isn't a valid index.
289 With luck, one or the other is valid on any machine. */
290 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
291 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
292
293 start_sequence ();
294 insn = emit_insn (gen_rtx (SET, 0, 0));
295 pat = PATTERN (insn);
296
297 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
298 mode = (enum machine_mode) ((int) mode + 1))
299 {
300 int regno;
301 rtx reg;
302 int num_clobbers;
303
304 direct_load[(int) mode] = direct_store[(int) mode] = 0;
305 PUT_MODE (mem, mode);
306 PUT_MODE (mem1, mode);
307
308 /* See if there is some register that can be used in this mode and
309 directly loaded or stored from memory. */
310
311 if (mode != VOIDmode && mode != BLKmode)
312 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
313 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
314 regno++)
315 {
316 if (! HARD_REGNO_MODE_OK (regno, mode))
317 continue;
318
319 reg = gen_rtx (REG, mode, regno);
320
321 SET_SRC (pat) = mem;
322 SET_DEST (pat) = reg;
323 if (recog (pat, insn, &num_clobbers) >= 0)
324 direct_load[(int) mode] = 1;
325
326 SET_SRC (pat) = mem1;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
330
331 SET_SRC (pat) = reg;
332 SET_DEST (pat) = mem;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_store[(int) mode] = 1;
335
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem1;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
340 }
341 }
342
343 end_sequence ();
344 }
345
346 /* This is run at the start of compiling a function. */
347
348 void
349 init_expr ()
350 {
351 init_queue ();
352
353 pending_stack_adjust = 0;
354 inhibit_defer_pop = 0;
355 saveregs_value = 0;
356 apply_args_value = 0;
357 forced_labels = 0;
358 }
359
360 /* Save all variables describing the current status into the structure *P.
361 This is used before starting a nested function. */
362
363 void
364 save_expr_status (p)
365 struct function *p;
366 {
367 /* Instead of saving the postincrement queue, empty it. */
368 emit_queue ();
369
370 p->pending_stack_adjust = pending_stack_adjust;
371 p->inhibit_defer_pop = inhibit_defer_pop;
372 p->saveregs_value = saveregs_value;
373 p->apply_args_value = apply_args_value;
374 p->forced_labels = forced_labels;
375
376 pending_stack_adjust = 0;
377 inhibit_defer_pop = 0;
378 saveregs_value = 0;
379 apply_args_value = 0;
380 forced_labels = 0;
381 }
382
383 /* Restore all variables describing the current status from the structure *P.
384 This is used after a nested function. */
385
386 void
387 restore_expr_status (p)
388 struct function *p;
389 {
390 pending_stack_adjust = p->pending_stack_adjust;
391 inhibit_defer_pop = p->inhibit_defer_pop;
392 saveregs_value = p->saveregs_value;
393 apply_args_value = p->apply_args_value;
394 forced_labels = p->forced_labels;
395 }
396 \f
397 /* Manage the queue of increment instructions to be output
398 for POSTINCREMENT_EXPR expressions, etc. */
399
400 static rtx pending_chain;
401
402 /* Queue up to increment (or change) VAR later. BODY says how:
403 BODY should be the same thing you would pass to emit_insn
404 to increment right away. It will go to emit_insn later on.
405
406 The value is a QUEUED expression to be used in place of VAR
407 where you want to guarantee the pre-incrementation value of VAR. */
408
409 static rtx
410 enqueue_insn (var, body)
411 rtx var, body;
412 {
413 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
414 var, NULL_RTX, NULL_RTX, body, pending_chain);
415 return pending_chain;
416 }
417
418 /* Use protect_from_queue to convert a QUEUED expression
419 into something that you can put immediately into an instruction.
420 If the queued incrementation has not happened yet,
421 protect_from_queue returns the variable itself.
422 If the incrementation has happened, protect_from_queue returns a temp
423 that contains a copy of the old value of the variable.
424
425 Any time an rtx which might possibly be a QUEUED is to be put
426 into an instruction, it must be passed through protect_from_queue first.
427 QUEUED expressions are not meaningful in instructions.
428
429 Do not pass a value through protect_from_queue and then hold
430 on to it for a while before putting it in an instruction!
431 If the queue is flushed in between, incorrect code will result. */
432
433 rtx
434 protect_from_queue (x, modify)
435 register rtx x;
436 int modify;
437 {
438 register RTX_CODE code = GET_CODE (x);
439
440 #if 0 /* A QUEUED can hang around after the queue is forced out. */
441 /* Shortcut for most common case. */
442 if (pending_chain == 0)
443 return x;
444 #endif
445
446 if (code != QUEUED)
447 {
448 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
449 use of autoincrement. Make a copy of the contents of the memory
450 location rather than a copy of the address, but not if the value is
451 of mode BLKmode. Don't modify X in place since it might be
452 shared. */
453 if (code == MEM && GET_MODE (x) != BLKmode
454 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
455 {
456 register rtx y = XEXP (x, 0);
457 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
458
459 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
460 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
461 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
462
463 if (QUEUED_INSN (y))
464 {
465 register rtx temp = gen_reg_rtx (GET_MODE (new));
466 emit_insn_before (gen_move_insn (temp, new),
467 QUEUED_INSN (y));
468 return temp;
469 }
470 return new;
471 }
472 /* Otherwise, recursively protect the subexpressions of all
473 the kinds of rtx's that can contain a QUEUED. */
474 if (code == MEM)
475 {
476 rtx tem = protect_from_queue (XEXP (x, 0), 0);
477 if (tem != XEXP (x, 0))
478 {
479 x = copy_rtx (x);
480 XEXP (x, 0) = tem;
481 }
482 }
483 else if (code == PLUS || code == MULT)
484 {
485 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
486 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
487 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
488 {
489 x = copy_rtx (x);
490 XEXP (x, 0) = new0;
491 XEXP (x, 1) = new1;
492 }
493 }
494 return x;
495 }
496 /* If the increment has not happened, use the variable itself. */
497 if (QUEUED_INSN (x) == 0)
498 return QUEUED_VAR (x);
499 /* If the increment has happened and a pre-increment copy exists,
500 use that copy. */
501 if (QUEUED_COPY (x) != 0)
502 return QUEUED_COPY (x);
503 /* The increment has happened but we haven't set up a pre-increment copy.
504 Set one up now, and use it. */
505 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
506 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
507 QUEUED_INSN (x));
508 return QUEUED_COPY (x);
509 }
510
511 /* Return nonzero if X contains a QUEUED expression:
512 if it contains anything that will be altered by a queued increment.
513 We handle only combinations of MEM, PLUS, MINUS and MULT operators
514 since memory addresses generally contain only those. */
515
516 static int
517 queued_subexp_p (x)
518 rtx x;
519 {
520 register enum rtx_code code = GET_CODE (x);
521 switch (code)
522 {
523 case QUEUED:
524 return 1;
525 case MEM:
526 return queued_subexp_p (XEXP (x, 0));
527 case MULT:
528 case PLUS:
529 case MINUS:
530 return queued_subexp_p (XEXP (x, 0))
531 || queued_subexp_p (XEXP (x, 1));
532 }
533 return 0;
534 }
535
536 /* Perform all the pending incrementations. */
537
538 void
539 emit_queue ()
540 {
541 register rtx p;
542 while (p = pending_chain)
543 {
544 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
545 pending_chain = QUEUED_NEXT (p);
546 }
547 }
548
549 static void
550 init_queue ()
551 {
552 if (pending_chain)
553 abort ();
554 }
555 \f
556 /* Copy data from FROM to TO, where the machine modes are not the same.
557 Both modes may be integer, or both may be floating.
558 UNSIGNEDP should be nonzero if FROM is an unsigned type.
559 This causes zero-extension instead of sign-extension. */
560
561 void
562 convert_move (to, from, unsignedp)
563 register rtx to, from;
564 int unsignedp;
565 {
566 enum machine_mode to_mode = GET_MODE (to);
567 enum machine_mode from_mode = GET_MODE (from);
568 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
569 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
570 enum insn_code code;
571 rtx libcall;
572
573 /* rtx code for making an equivalent value. */
574 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
575
576 to = protect_from_queue (to, 1);
577 from = protect_from_queue (from, 0);
578
579 if (to_real != from_real)
580 abort ();
581
582 /* If FROM is a SUBREG that indicates that we have already done at least
583 the required extension, strip it. We don't handle such SUBREGs as
584 TO here. */
585
586 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
587 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
588 >= GET_MODE_SIZE (to_mode))
589 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
590 from = gen_lowpart (to_mode, from), from_mode = to_mode;
591
592 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
593 abort ();
594
595 if (to_mode == from_mode
596 || (from_mode == VOIDmode && CONSTANT_P (from)))
597 {
598 emit_move_insn (to, from);
599 return;
600 }
601
602 if (to_real)
603 {
604 rtx value;
605
606 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
607 {
608 /* Try converting directly if the insn is supported. */
609 if ((code = can_extend_p (to_mode, from_mode, 0))
610 != CODE_FOR_nothing)
611 {
612 emit_unop_insn (code, to, from, UNKNOWN);
613 return;
614 }
615 }
616
617 #ifdef HAVE_trunchfqf2
618 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
619 {
620 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
621 return;
622 }
623 #endif
624 #ifdef HAVE_truncsfqf2
625 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
626 {
627 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
628 return;
629 }
630 #endif
631 #ifdef HAVE_truncdfqf2
632 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
633 {
634 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
635 return;
636 }
637 #endif
638 #ifdef HAVE_truncxfqf2
639 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
640 {
641 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
642 return;
643 }
644 #endif
645 #ifdef HAVE_trunctfqf2
646 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
647 {
648 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
649 return;
650 }
651 #endif
652
653 #ifdef HAVE_trunctqfhf2
654 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
655 {
656 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
657 return;
658 }
659 #endif
660 #ifdef HAVE_truncsfhf2
661 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
662 {
663 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
664 return;
665 }
666 #endif
667 #ifdef HAVE_truncdfhf2
668 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
669 {
670 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
671 return;
672 }
673 #endif
674 #ifdef HAVE_truncxfhf2
675 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
676 {
677 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
678 return;
679 }
680 #endif
681 #ifdef HAVE_trunctfhf2
682 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
683 {
684 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
685 return;
686 }
687 #endif
688
689 #ifdef HAVE_truncsftqf2
690 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
691 {
692 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
693 return;
694 }
695 #endif
696 #ifdef HAVE_truncdftqf2
697 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
698 {
699 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
700 return;
701 }
702 #endif
703 #ifdef HAVE_truncxftqf2
704 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
705 {
706 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
707 return;
708 }
709 #endif
710 #ifdef HAVE_trunctftqf2
711 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
712 {
713 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
714 return;
715 }
716 #endif
717
718 #ifdef HAVE_truncdfsf2
719 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
720 {
721 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
722 return;
723 }
724 #endif
725 #ifdef HAVE_truncxfsf2
726 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
727 {
728 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
729 return;
730 }
731 #endif
732 #ifdef HAVE_trunctfsf2
733 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
734 {
735 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
736 return;
737 }
738 #endif
739 #ifdef HAVE_truncxfdf2
740 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
741 {
742 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
743 return;
744 }
745 #endif
746 #ifdef HAVE_trunctfdf2
747 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
748 {
749 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
750 return;
751 }
752 #endif
753
754 libcall = (rtx) 0;
755 switch (from_mode)
756 {
757 case SFmode:
758 switch (to_mode)
759 {
760 case DFmode:
761 libcall = extendsfdf2_libfunc;
762 break;
763
764 case XFmode:
765 libcall = extendsfxf2_libfunc;
766 break;
767
768 case TFmode:
769 libcall = extendsftf2_libfunc;
770 break;
771 }
772 break;
773
774 case DFmode:
775 switch (to_mode)
776 {
777 case SFmode:
778 libcall = truncdfsf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extenddfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extenddftf2_libfunc;
787 break;
788 }
789 break;
790
791 case XFmode:
792 switch (to_mode)
793 {
794 case SFmode:
795 libcall = truncxfsf2_libfunc;
796 break;
797
798 case DFmode:
799 libcall = truncxfdf2_libfunc;
800 break;
801 }
802 break;
803
804 case TFmode:
805 switch (to_mode)
806 {
807 case SFmode:
808 libcall = trunctfsf2_libfunc;
809 break;
810
811 case DFmode:
812 libcall = trunctfdf2_libfunc;
813 break;
814 }
815 break;
816 }
817
818 if (libcall == (rtx) 0)
819 /* This conversion is not implemented yet. */
820 abort ();
821
822 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
823 1, from, from_mode);
824 emit_move_insn (to, value);
825 return;
826 }
827
828 /* Now both modes are integers. */
829
830 /* Handle expanding beyond a word. */
831 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
832 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
833 {
834 rtx insns;
835 rtx lowpart;
836 rtx fill_value;
837 rtx lowfrom;
838 int i;
839 enum machine_mode lowpart_mode;
840 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
841
842 /* Try converting directly if the insn is supported. */
843 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
844 != CODE_FOR_nothing)
845 {
846 /* If FROM is a SUBREG, put it into a register. Do this
847 so that we always generate the same set of insns for
848 better cse'ing; if an intermediate assignment occurred,
849 we won't be doing the operation directly on the SUBREG. */
850 if (optimize > 0 && GET_CODE (from) == SUBREG)
851 from = force_reg (from_mode, from);
852 emit_unop_insn (code, to, from, equiv_code);
853 return;
854 }
855 /* Next, try converting via full word. */
856 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
857 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
858 != CODE_FOR_nothing))
859 {
860 if (GET_CODE (to) == REG)
861 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
862 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
863 emit_unop_insn (code, to,
864 gen_lowpart (word_mode, to), equiv_code);
865 return;
866 }
867
868 /* No special multiword conversion insn; do it by hand. */
869 start_sequence ();
870
871 /* Since we will turn this into a no conflict block, we must ensure
872 that the source does not overlap the target. */
873
874 if (reg_overlap_mentioned_p (to, from))
875 from = force_reg (from_mode, from);
876
877 /* Get a copy of FROM widened to a word, if necessary. */
878 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
879 lowpart_mode = word_mode;
880 else
881 lowpart_mode = from_mode;
882
883 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
884
885 lowpart = gen_lowpart (lowpart_mode, to);
886 emit_move_insn (lowpart, lowfrom);
887
888 /* Compute the value to put in each remaining word. */
889 if (unsignedp)
890 fill_value = const0_rtx;
891 else
892 {
893 #ifdef HAVE_slt
894 if (HAVE_slt
895 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
896 && STORE_FLAG_VALUE == -1)
897 {
898 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
899 lowpart_mode, 0, 0);
900 fill_value = gen_reg_rtx (word_mode);
901 emit_insn (gen_slt (fill_value));
902 }
903 else
904 #endif
905 {
906 fill_value
907 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
908 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
909 NULL_RTX, 0);
910 fill_value = convert_to_mode (word_mode, fill_value, 1);
911 }
912 }
913
914 /* Fill the remaining words. */
915 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
916 {
917 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
918 rtx subword = operand_subword (to, index, 1, to_mode);
919
920 if (subword == 0)
921 abort ();
922
923 if (fill_value != subword)
924 emit_move_insn (subword, fill_value);
925 }
926
927 insns = get_insns ();
928 end_sequence ();
929
930 emit_no_conflict_block (insns, to, from, NULL_RTX,
931 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
932 return;
933 }
934
935 /* Truncating multi-word to a word or less. */
936 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
937 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
938 {
939 if (!((GET_CODE (from) == MEM
940 && ! MEM_VOLATILE_P (from)
941 && direct_load[(int) to_mode]
942 && ! mode_dependent_address_p (XEXP (from, 0)))
943 || GET_CODE (from) == REG
944 || GET_CODE (from) == SUBREG))
945 from = force_reg (from_mode, from);
946 convert_move (to, gen_lowpart (word_mode, from), 0);
947 return;
948 }
949
950 /* Handle pointer conversion */ /* SPEE 900220 */
951 if (to_mode == PSImode)
952 {
953 if (from_mode != SImode)
954 from = convert_to_mode (SImode, from, unsignedp);
955
956 #ifdef HAVE_truncsipsi2
957 if (HAVE_truncsipsi2)
958 {
959 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960 return;
961 }
962 #endif /* HAVE_truncsipsi2 */
963 abort ();
964 }
965
966 if (from_mode == PSImode)
967 {
968 if (to_mode != SImode)
969 {
970 from = convert_to_mode (SImode, from, unsignedp);
971 from_mode = SImode;
972 }
973 else
974 {
975 #ifdef HAVE_extendpsisi2
976 if (HAVE_extendpsisi2)
977 {
978 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979 return;
980 }
981 #endif /* HAVE_extendpsisi2 */
982 abort ();
983 }
984 }
985
986 if (to_mode == PDImode)
987 {
988 if (from_mode != DImode)
989 from = convert_to_mode (DImode, from, unsignedp);
990
991 #ifdef HAVE_truncdipdi2
992 if (HAVE_truncdipdi2)
993 {
994 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
995 return;
996 }
997 #endif /* HAVE_truncdipdi2 */
998 abort ();
999 }
1000
1001 if (from_mode == PDImode)
1002 {
1003 if (to_mode != DImode)
1004 {
1005 from = convert_to_mode (DImode, from, unsignedp);
1006 from_mode = DImode;
1007 }
1008 else
1009 {
1010 #ifdef HAVE_extendpdidi2
1011 if (HAVE_extendpdidi2)
1012 {
1013 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1014 return;
1015 }
1016 #endif /* HAVE_extendpdidi2 */
1017 abort ();
1018 }
1019 }
1020
1021 /* Now follow all the conversions between integers
1022 no more than a word long. */
1023
1024 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1025 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1026 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1027 GET_MODE_BITSIZE (from_mode)))
1028 {
1029 if (!((GET_CODE (from) == MEM
1030 && ! MEM_VOLATILE_P (from)
1031 && direct_load[(int) to_mode]
1032 && ! mode_dependent_address_p (XEXP (from, 0)))
1033 || GET_CODE (from) == REG
1034 || GET_CODE (from) == SUBREG))
1035 from = force_reg (from_mode, from);
1036 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1037 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1038 from = copy_to_reg (from);
1039 emit_move_insn (to, gen_lowpart (to_mode, from));
1040 return;
1041 }
1042
1043 /* Handle extension. */
1044 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1045 {
1046 /* Convert directly if that works. */
1047 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1048 != CODE_FOR_nothing)
1049 {
1050 emit_unop_insn (code, to, from, equiv_code);
1051 return;
1052 }
1053 else
1054 {
1055 enum machine_mode intermediate;
1056
1057 /* Search for a mode to convert via. */
1058 for (intermediate = from_mode; intermediate != VOIDmode;
1059 intermediate = GET_MODE_WIDER_MODE (intermediate))
1060 if (((can_extend_p (to_mode, intermediate, unsignedp)
1061 != CODE_FOR_nothing)
1062 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1063 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1064 && (can_extend_p (intermediate, from_mode, unsignedp)
1065 != CODE_FOR_nothing))
1066 {
1067 convert_move (to, convert_to_mode (intermediate, from,
1068 unsignedp), unsignedp);
1069 return;
1070 }
1071
1072 /* No suitable intermediate mode. */
1073 abort ();
1074 }
1075 }
1076
1077 /* Support special truncate insns for certain modes. */
1078
1079 if (from_mode == DImode && to_mode == SImode)
1080 {
1081 #ifdef HAVE_truncdisi2
1082 if (HAVE_truncdisi2)
1083 {
1084 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1085 return;
1086 }
1087 #endif
1088 convert_move (to, force_reg (from_mode, from), unsignedp);
1089 return;
1090 }
1091
1092 if (from_mode == DImode && to_mode == HImode)
1093 {
1094 #ifdef HAVE_truncdihi2
1095 if (HAVE_truncdihi2)
1096 {
1097 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1098 return;
1099 }
1100 #endif
1101 convert_move (to, force_reg (from_mode, from), unsignedp);
1102 return;
1103 }
1104
1105 if (from_mode == DImode && to_mode == QImode)
1106 {
1107 #ifdef HAVE_truncdiqi2
1108 if (HAVE_truncdiqi2)
1109 {
1110 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1111 return;
1112 }
1113 #endif
1114 convert_move (to, force_reg (from_mode, from), unsignedp);
1115 return;
1116 }
1117
1118 if (from_mode == SImode && to_mode == HImode)
1119 {
1120 #ifdef HAVE_truncsihi2
1121 if (HAVE_truncsihi2)
1122 {
1123 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1124 return;
1125 }
1126 #endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1129 }
1130
1131 if (from_mode == SImode && to_mode == QImode)
1132 {
1133 #ifdef HAVE_truncsiqi2
1134 if (HAVE_truncsiqi2)
1135 {
1136 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1137 return;
1138 }
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1142 }
1143
1144 if (from_mode == HImode && to_mode == QImode)
1145 {
1146 #ifdef HAVE_trunchiqi2
1147 if (HAVE_trunchiqi2)
1148 {
1149 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1150 return;
1151 }
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1155 }
1156
1157 if (from_mode == TImode && to_mode == DImode)
1158 {
1159 #ifdef HAVE_trunctidi2
1160 if (HAVE_trunctidi2)
1161 {
1162 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1163 return;
1164 }
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1168 }
1169
1170 if (from_mode == TImode && to_mode == SImode)
1171 {
1172 #ifdef HAVE_trunctisi2
1173 if (HAVE_trunctisi2)
1174 {
1175 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1176 return;
1177 }
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1181 }
1182
1183 if (from_mode == TImode && to_mode == HImode)
1184 {
1185 #ifdef HAVE_trunctihi2
1186 if (HAVE_trunctihi2)
1187 {
1188 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1189 return;
1190 }
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1194 }
1195
1196 if (from_mode == TImode && to_mode == QImode)
1197 {
1198 #ifdef HAVE_trunctiqi2
1199 if (HAVE_trunctiqi2)
1200 {
1201 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1202 return;
1203 }
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1207 }
1208
1209 /* Handle truncation of volatile memrefs, and so on;
1210 the things that couldn't be truncated directly,
1211 and for which there was no special instruction. */
1212 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1213 {
1214 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1215 emit_move_insn (to, temp);
1216 return;
1217 }
1218
1219 /* Mode combination is not recognized. */
1220 abort ();
1221 }
1222
1223 /* Return an rtx for a value that would result
1224 from converting X to mode MODE.
1225 Both X and MODE may be floating, or both integer.
1226 UNSIGNEDP is nonzero if X is an unsigned value.
1227 This can be done by referring to a part of X in place
1228 or by copying to a new temporary with conversion.
1229
1230 This function *must not* call protect_from_queue
1231 except when putting X into an insn (in which case convert_move does it). */
1232
1233 rtx
1234 convert_to_mode (mode, x, unsignedp)
1235 enum machine_mode mode;
1236 rtx x;
1237 int unsignedp;
1238 {
1239 return convert_modes (mode, VOIDmode, x, unsignedp);
1240 }
1241
1242 /* Return an rtx for a value that would result
1243 from converting X from mode OLDMODE to mode MODE.
1244 Both modes may be floating, or both integer.
1245 UNSIGNEDP is nonzero if X is an unsigned value.
1246
1247 This can be done by referring to a part of X in place
1248 or by copying to a new temporary with conversion.
1249
1250 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1251
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1254
1255 rtx
1256 convert_modes (mode, oldmode, x, unsignedp)
1257 enum machine_mode mode, oldmode;
1258 rtx x;
1259 int unsignedp;
1260 {
1261 register rtx temp;
1262
1263 /* If FROM is a SUBREG that indicates that we have already done at least
1264 the required extension, strip it. */
1265
1266 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1267 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1268 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1269 x = gen_lowpart (mode, x);
1270
1271 if (GET_MODE (x) != VOIDmode)
1272 oldmode = GET_MODE (x);
1273
1274 if (mode == oldmode)
1275 return x;
1276
1277 /* There is one case that we must handle specially: If we are converting
1278 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1279 we are to interpret the constant as unsigned, gen_lowpart will do
1280 the wrong if the constant appears negative. What we want to do is
1281 make the high-order word of the constant zero, not all ones. */
1282
1283 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1284 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1285 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1286 {
1287 HOST_WIDE_INT val = INTVAL (x);
1288
1289 if (oldmode != VOIDmode
1290 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1291 {
1292 int width = GET_MODE_BITSIZE (oldmode);
1293
1294 /* We need to zero extend VAL. */
1295 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1296 }
1297
1298 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1299 }
1300
1301 /* We can do this with a gen_lowpart if both desired and current modes
1302 are integer, and this is either a constant integer, a register, or a
1303 non-volatile MEM. Except for the constant case where MODE is no
1304 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1305
1306 if ((GET_CODE (x) == CONST_INT
1307 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1308 || (GET_MODE_CLASS (mode) == MODE_INT
1309 && GET_MODE_CLASS (oldmode) == MODE_INT
1310 && (GET_CODE (x) == CONST_DOUBLE
1311 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1312 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1313 && direct_load[(int) mode])
1314 || (GET_CODE (x) == REG
1315 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1316 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1317 {
1318 /* ?? If we don't know OLDMODE, we have to assume here that
1319 X does not need sign- or zero-extension. This may not be
1320 the case, but it's the best we can do. */
1321 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1322 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1323 {
1324 HOST_WIDE_INT val = INTVAL (x);
1325 int width = GET_MODE_BITSIZE (oldmode);
1326
1327 /* We must sign or zero-extend in this case. Start by
1328 zero-extending, then sign extend if we need to. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 if (! unsignedp
1331 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1332 val |= (HOST_WIDE_INT) (-1) << width;
1333
1334 return GEN_INT (val);
1335 }
1336
1337 return gen_lowpart (mode, x);
1338 }
1339
1340 temp = gen_reg_rtx (mode);
1341 convert_move (temp, x, unsignedp);
1342 return temp;
1343 }
1344 \f
1345 /* Generate several move instructions to copy LEN bytes
1346 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1347 The caller must pass FROM and TO
1348 through protect_from_queue before calling.
1349 ALIGN (in bytes) is maximum alignment we can assume. */
1350
1351 static void
1352 move_by_pieces (to, from, len, align)
1353 rtx to, from;
1354 int len, align;
1355 {
1356 struct move_by_pieces data;
1357 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1358 int max_size = MOVE_MAX + 1;
1359
1360 data.offset = 0;
1361 data.to_addr = to_addr;
1362 data.from_addr = from_addr;
1363 data.to = to;
1364 data.from = from;
1365 data.autinc_to
1366 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1367 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1368 data.autinc_from
1369 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1370 || GET_CODE (from_addr) == POST_INC
1371 || GET_CODE (from_addr) == POST_DEC);
1372
1373 data.explicit_inc_from = 0;
1374 data.explicit_inc_to = 0;
1375 data.reverse
1376 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1377 if (data.reverse) data.offset = len;
1378 data.len = len;
1379
1380 data.to_struct = MEM_IN_STRUCT_P (to);
1381 data.from_struct = MEM_IN_STRUCT_P (from);
1382
1383 /* If copying requires more than two move insns,
1384 copy addresses to registers (to make displacements shorter)
1385 and use post-increment if available. */
1386 if (!(data.autinc_from && data.autinc_to)
1387 && move_by_pieces_ninsns (len, align) > 2)
1388 {
1389 #ifdef HAVE_PRE_DECREMENT
1390 if (data.reverse && ! data.autinc_from)
1391 {
1392 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1393 data.autinc_from = 1;
1394 data.explicit_inc_from = -1;
1395 }
1396 #endif
1397 #ifdef HAVE_POST_INCREMENT
1398 if (! data.autinc_from)
1399 {
1400 data.from_addr = copy_addr_to_reg (from_addr);
1401 data.autinc_from = 1;
1402 data.explicit_inc_from = 1;
1403 }
1404 #endif
1405 if (!data.autinc_from && CONSTANT_P (from_addr))
1406 data.from_addr = copy_addr_to_reg (from_addr);
1407 #ifdef HAVE_PRE_DECREMENT
1408 if (data.reverse && ! data.autinc_to)
1409 {
1410 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1411 data.autinc_to = 1;
1412 data.explicit_inc_to = -1;
1413 }
1414 #endif
1415 #ifdef HAVE_POST_INCREMENT
1416 if (! data.reverse && ! data.autinc_to)
1417 {
1418 data.to_addr = copy_addr_to_reg (to_addr);
1419 data.autinc_to = 1;
1420 data.explicit_inc_to = 1;
1421 }
1422 #endif
1423 if (!data.autinc_to && CONSTANT_P (to_addr))
1424 data.to_addr = copy_addr_to_reg (to_addr);
1425 }
1426
1427 if (! SLOW_UNALIGNED_ACCESS
1428 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1429 align = MOVE_MAX;
1430
1431 /* First move what we can in the largest integer mode, then go to
1432 successively smaller modes. */
1433
1434 while (max_size > 1)
1435 {
1436 enum machine_mode mode = VOIDmode, tmode;
1437 enum insn_code icode;
1438
1439 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1440 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1441 if (GET_MODE_SIZE (tmode) < max_size)
1442 mode = tmode;
1443
1444 if (mode == VOIDmode)
1445 break;
1446
1447 icode = mov_optab->handlers[(int) mode].insn_code;
1448 if (icode != CODE_FOR_nothing
1449 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1450 GET_MODE_SIZE (mode)))
1451 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1452
1453 max_size = GET_MODE_SIZE (mode);
1454 }
1455
1456 /* The code above should have handled everything. */
1457 if (data.len > 0)
1458 abort ();
1459 }
1460
1461 /* Return number of insns required to move L bytes by pieces.
1462 ALIGN (in bytes) is maximum alignment we can assume. */
1463
1464 static int
1465 move_by_pieces_ninsns (l, align)
1466 unsigned int l;
1467 int align;
1468 {
1469 register int n_insns = 0;
1470 int max_size = MOVE_MAX + 1;
1471
1472 if (! SLOW_UNALIGNED_ACCESS
1473 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1474 align = MOVE_MAX;
1475
1476 while (max_size > 1)
1477 {
1478 enum machine_mode mode = VOIDmode, tmode;
1479 enum insn_code icode;
1480
1481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1483 if (GET_MODE_SIZE (tmode) < max_size)
1484 mode = tmode;
1485
1486 if (mode == VOIDmode)
1487 break;
1488
1489 icode = mov_optab->handlers[(int) mode].insn_code;
1490 if (icode != CODE_FOR_nothing
1491 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1492 GET_MODE_SIZE (mode)))
1493 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1494
1495 max_size = GET_MODE_SIZE (mode);
1496 }
1497
1498 return n_insns;
1499 }
1500
1501 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1502 with move instructions for mode MODE. GENFUN is the gen_... function
1503 to make a move insn for that mode. DATA has all the other info. */
1504
1505 static void
1506 move_by_pieces_1 (genfun, mode, data)
1507 rtx (*genfun) ();
1508 enum machine_mode mode;
1509 struct move_by_pieces *data;
1510 {
1511 register int size = GET_MODE_SIZE (mode);
1512 register rtx to1, from1;
1513
1514 while (data->len >= size)
1515 {
1516 if (data->reverse) data->offset -= size;
1517
1518 to1 = (data->autinc_to
1519 ? gen_rtx (MEM, mode, data->to_addr)
1520 : copy_rtx (change_address (data->to, mode,
1521 plus_constant (data->to_addr,
1522 data->offset))));
1523 MEM_IN_STRUCT_P (to1) = data->to_struct;
1524
1525 from1
1526 = (data->autinc_from
1527 ? gen_rtx (MEM, mode, data->from_addr)
1528 : copy_rtx (change_address (data->from, mode,
1529 plus_constant (data->from_addr,
1530 data->offset))));
1531 MEM_IN_STRUCT_P (from1) = data->from_struct;
1532
1533 #ifdef HAVE_PRE_DECREMENT
1534 if (data->explicit_inc_to < 0)
1535 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1536 if (data->explicit_inc_from < 0)
1537 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1538 #endif
1539
1540 emit_insn ((*genfun) (to1, from1));
1541 #ifdef HAVE_POST_INCREMENT
1542 if (data->explicit_inc_to > 0)
1543 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1544 if (data->explicit_inc_from > 0)
1545 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1546 #endif
1547
1548 if (! data->reverse) data->offset += size;
1549
1550 data->len -= size;
1551 }
1552 }
1553 \f
1554 /* Emit code to move a block Y to a block X.
1555 This may be done with string-move instructions,
1556 with multiple scalar move instructions, or with a library call.
1557
1558 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1559 with mode BLKmode.
1560 SIZE is an rtx that says how long they are.
1561 ALIGN is the maximum alignment we can assume they have,
1562 measured in bytes. */
1563
1564 void
1565 emit_block_move (x, y, size, align)
1566 rtx x, y;
1567 rtx size;
1568 int align;
1569 {
1570 if (GET_MODE (x) != BLKmode)
1571 abort ();
1572
1573 if (GET_MODE (y) != BLKmode)
1574 abort ();
1575
1576 x = protect_from_queue (x, 1);
1577 y = protect_from_queue (y, 0);
1578 size = protect_from_queue (size, 0);
1579
1580 if (GET_CODE (x) != MEM)
1581 abort ();
1582 if (GET_CODE (y) != MEM)
1583 abort ();
1584 if (size == 0)
1585 abort ();
1586
1587 if (GET_CODE (size) == CONST_INT
1588 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1589 move_by_pieces (x, y, INTVAL (size), align);
1590 else
1591 {
1592 /* Try the most limited insn first, because there's no point
1593 including more than one in the machine description unless
1594 the more limited one has some advantage. */
1595
1596 rtx opalign = GEN_INT (align);
1597 enum machine_mode mode;
1598
1599 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1600 mode = GET_MODE_WIDER_MODE (mode))
1601 {
1602 enum insn_code code = movstr_optab[(int) mode];
1603
1604 if (code != CODE_FOR_nothing
1605 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1606 here because if SIZE is less than the mode mask, as it is
1607 returned by the macro, it will definitely be less than the
1608 actual mode mask. */
1609 && ((GET_CODE (size) == CONST_INT
1610 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1611 <= GET_MODE_MASK (mode)))
1612 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1613 && (insn_operand_predicate[(int) code][0] == 0
1614 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1615 && (insn_operand_predicate[(int) code][1] == 0
1616 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1617 && (insn_operand_predicate[(int) code][3] == 0
1618 || (*insn_operand_predicate[(int) code][3]) (opalign,
1619 VOIDmode)))
1620 {
1621 rtx op2;
1622 rtx last = get_last_insn ();
1623 rtx pat;
1624
1625 op2 = convert_to_mode (mode, size, 1);
1626 if (insn_operand_predicate[(int) code][2] != 0
1627 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1628 op2 = copy_to_mode_reg (mode, op2);
1629
1630 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1631 if (pat)
1632 {
1633 emit_insn (pat);
1634 return;
1635 }
1636 else
1637 delete_insns_since (last);
1638 }
1639 }
1640
1641 #ifdef TARGET_MEM_FUNCTIONS
1642 emit_library_call (memcpy_libfunc, 0,
1643 VOIDmode, 3, XEXP (x, 0), Pmode,
1644 XEXP (y, 0), Pmode,
1645 convert_to_mode (TYPE_MODE (sizetype), size,
1646 TREE_UNSIGNED (sizetype)),
1647 TYPE_MODE (sizetype));
1648 #else
1649 emit_library_call (bcopy_libfunc, 0,
1650 VOIDmode, 3, XEXP (y, 0), Pmode,
1651 XEXP (x, 0), Pmode,
1652 convert_to_mode (TYPE_MODE (integer_type_node), size,
1653 TREE_UNSIGNED (integer_type_node)),
1654 TYPE_MODE (integer_type_node));
1655 #endif
1656 }
1657 }
1658 \f
1659 /* Copy all or part of a value X into registers starting at REGNO.
1660 The number of registers to be filled is NREGS. */
1661
1662 void
1663 move_block_to_reg (regno, x, nregs, mode)
1664 int regno;
1665 rtx x;
1666 int nregs;
1667 enum machine_mode mode;
1668 {
1669 int i;
1670 rtx pat, last;
1671
1672 if (nregs == 0)
1673 return;
1674
1675 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1676 x = validize_mem (force_const_mem (mode, x));
1677
1678 /* See if the machine can do this with a load multiple insn. */
1679 #ifdef HAVE_load_multiple
1680 if (HAVE_load_multiple)
1681 {
1682 last = get_last_insn ();
1683 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1684 GEN_INT (nregs));
1685 if (pat)
1686 {
1687 emit_insn (pat);
1688 return;
1689 }
1690 else
1691 delete_insns_since (last);
1692 }
1693 #endif
1694
1695 for (i = 0; i < nregs; i++)
1696 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1697 operand_subword_force (x, i, mode));
1698 }
1699
1700 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1701 The number of registers to be filled is NREGS. SIZE indicates the number
1702 of bytes in the object X. */
1703
1704
1705 void
1706 move_block_from_reg (regno, x, nregs, size)
1707 int regno;
1708 rtx x;
1709 int nregs;
1710 int size;
1711 {
1712 int i;
1713 rtx pat, last;
1714 enum machine_mode mode;
1715
1716 /* If SIZE is that of a mode no bigger than a word, just use that
1717 mode's store operation. */
1718 if (size <= UNITS_PER_WORD
1719 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1720 {
1721 emit_move_insn (change_address (x, mode, NULL),
1722 gen_rtx (REG, mode, regno));
1723 return;
1724 }
1725
1726 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1727 to the left before storing to memory. Note that the previous test
1728 doesn't handle all cases (e.g. SIZE == 3). */
1729 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1730 {
1731 rtx tem = operand_subword (x, 0, 1, BLKmode);
1732 rtx shift;
1733
1734 if (tem == 0)
1735 abort ();
1736
1737 shift = expand_shift (LSHIFT_EXPR, word_mode,
1738 gen_rtx (REG, word_mode, regno),
1739 build_int_2 ((UNITS_PER_WORD - size)
1740 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1741 emit_move_insn (tem, shift);
1742 return;
1743 }
1744
1745 /* See if the machine can do this with a store multiple insn. */
1746 #ifdef HAVE_store_multiple
1747 if (HAVE_store_multiple)
1748 {
1749 last = get_last_insn ();
1750 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1751 GEN_INT (nregs));
1752 if (pat)
1753 {
1754 emit_insn (pat);
1755 return;
1756 }
1757 else
1758 delete_insns_since (last);
1759 }
1760 #endif
1761
1762 for (i = 0; i < nregs; i++)
1763 {
1764 rtx tem = operand_subword (x, i, 1, BLKmode);
1765
1766 if (tem == 0)
1767 abort ();
1768
1769 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1770 }
1771 }
1772
1773 /* Emit code to move a block Y to a block X, where X is non-consecutive
1774 registers represented by a PARALLEL. */
1775
1776 void
1777 emit_group_load (x, y)
1778 rtx x, y;
1779 {
1780 rtx target_reg, source;
1781 int i;
1782
1783 if (GET_CODE (x) != PARALLEL)
1784 abort ();
1785
1786 /* Check for a NULL entry, used to indicate that the parameter goes
1787 both on the stack and in registers. */
1788 if (XEXP (XVECEXP (x, 0, 0), 0))
1789 i = 0;
1790 else
1791 i = 1;
1792
1793 for (; i < XVECLEN (x, 0); i++)
1794 {
1795 rtx element = XVECEXP (x, 0, i);
1796
1797 target_reg = XEXP (element, 0);
1798
1799 if (GET_CODE (y) == MEM)
1800 source = change_address (y, GET_MODE (target_reg),
1801 plus_constant (XEXP (y, 0),
1802 INTVAL (XEXP (element, 1))));
1803 else if (XEXP (element, 1) == const0_rtx)
1804 {
1805 if (GET_MODE (target_reg) == GET_MODE (y))
1806 source = y;
1807 /* Allow for the target_reg to be smaller than the input register
1808 to allow for AIX with 4 DF arguments after a single SI arg. The
1809 last DF argument will only load 1 word into the integer registers,
1810 but load a DF value into the float registers. */
1811 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1812 <= GET_MODE_SIZE (GET_MODE (y)))
1813 && GET_MODE (target_reg) == word_mode)
1814 /* This might be a const_double, so we can't just use SUBREG. */
1815 source = operand_subword (y, 0, 0, VOIDmode);
1816 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1817 == GET_MODE_SIZE (GET_MODE (y)))
1818 source = gen_lowpart (GET_MODE (target_reg), y);
1819 else
1820 abort ();
1821 }
1822 else
1823 abort ();
1824
1825 emit_move_insn (target_reg, source);
1826 }
1827 }
1828
1829 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1830 registers represented by a PARALLEL. */
1831
1832 void
1833 emit_group_store (x, y)
1834 rtx x, y;
1835 {
1836 rtx source_reg, target;
1837 int i;
1838
1839 if (GET_CODE (y) != PARALLEL)
1840 abort ();
1841
1842 /* Check for a NULL entry, used to indicate that the parameter goes
1843 both on the stack and in registers. */
1844 if (XEXP (XVECEXP (y, 0, 0), 0))
1845 i = 0;
1846 else
1847 i = 1;
1848
1849 for (; i < XVECLEN (y, 0); i++)
1850 {
1851 rtx element = XVECEXP (y, 0, i);
1852
1853 source_reg = XEXP (element, 0);
1854
1855 if (GET_CODE (x) == MEM)
1856 target = change_address (x, GET_MODE (source_reg),
1857 plus_constant (XEXP (x, 0),
1858 INTVAL (XEXP (element, 1))));
1859 else if (XEXP (element, 1) == const0_rtx)
1860 {
1861 target = x;
1862 if (GET_MODE (target) != GET_MODE (source_reg))
1863 target = gen_lowpart (GET_MODE (source_reg), target);
1864 }
1865 else
1866 abort ();
1867
1868 emit_move_insn (target, source_reg);
1869 }
1870 }
1871
1872 /* Add a USE expression for REG to the (possibly empty) list pointed
1873 to by CALL_FUSAGE. REG must denote a hard register. */
1874
1875 void
1876 use_reg (call_fusage, reg)
1877 rtx *call_fusage, reg;
1878 {
1879 if (GET_CODE (reg) != REG
1880 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1881 abort();
1882
1883 *call_fusage
1884 = gen_rtx (EXPR_LIST, VOIDmode,
1885 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1886 }
1887
1888 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1889 starting at REGNO. All of these registers must be hard registers. */
1890
1891 void
1892 use_regs (call_fusage, regno, nregs)
1893 rtx *call_fusage;
1894 int regno;
1895 int nregs;
1896 {
1897 int i;
1898
1899 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1900 abort ();
1901
1902 for (i = 0; i < nregs; i++)
1903 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1904 }
1905
1906 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1907 PARALLEL REGS. This is for calls that pass values in multiple
1908 non-contiguous locations. The Irix 6 ABI has examples of this. */
1909
1910 void
1911 use_group_regs (call_fusage, regs)
1912 rtx *call_fusage;
1913 rtx regs;
1914 {
1915 int i;
1916
1917 /* Check for a NULL entry, used to indicate that the parameter goes
1918 both on the stack and in registers. */
1919 if (XEXP (XVECEXP (regs, 0, 0), 0))
1920 i = 0;
1921 else
1922 i = 1;
1923
1924 for (; i < XVECLEN (regs, 0); i++)
1925 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1926 }
1927 \f
1928 /* Generate several move instructions to clear LEN bytes of block TO.
1929 (A MEM rtx with BLKmode). The caller must pass TO through
1930 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1931 we can assume. */
1932
1933 static void
1934 clear_by_pieces (to, len, align)
1935 rtx to;
1936 int len, align;
1937 {
1938 struct clear_by_pieces data;
1939 rtx to_addr = XEXP (to, 0);
1940 int max_size = MOVE_MAX + 1;
1941
1942 data.offset = 0;
1943 data.to_addr = to_addr;
1944 data.to = to;
1945 data.autinc_to
1946 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1947 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1948
1949 data.explicit_inc_to = 0;
1950 data.reverse
1951 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1952 if (data.reverse) data.offset = len;
1953 data.len = len;
1954
1955 data.to_struct = MEM_IN_STRUCT_P (to);
1956
1957 /* If copying requires more than two move insns,
1958 copy addresses to registers (to make displacements shorter)
1959 and use post-increment if available. */
1960 if (!data.autinc_to
1961 && move_by_pieces_ninsns (len, align) > 2)
1962 {
1963 #ifdef HAVE_PRE_DECREMENT
1964 if (data.reverse && ! data.autinc_to)
1965 {
1966 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1967 data.autinc_to = 1;
1968 data.explicit_inc_to = -1;
1969 }
1970 #endif
1971 #ifdef HAVE_POST_INCREMENT
1972 if (! data.reverse && ! data.autinc_to)
1973 {
1974 data.to_addr = copy_addr_to_reg (to_addr);
1975 data.autinc_to = 1;
1976 data.explicit_inc_to = 1;
1977 }
1978 #endif
1979 if (!data.autinc_to && CONSTANT_P (to_addr))
1980 data.to_addr = copy_addr_to_reg (to_addr);
1981 }
1982
1983 if (! SLOW_UNALIGNED_ACCESS
1984 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1985 align = MOVE_MAX;
1986
1987 /* First move what we can in the largest integer mode, then go to
1988 successively smaller modes. */
1989
1990 while (max_size > 1)
1991 {
1992 enum machine_mode mode = VOIDmode, tmode;
1993 enum insn_code icode;
1994
1995 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1996 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1997 if (GET_MODE_SIZE (tmode) < max_size)
1998 mode = tmode;
1999
2000 if (mode == VOIDmode)
2001 break;
2002
2003 icode = mov_optab->handlers[(int) mode].insn_code;
2004 if (icode != CODE_FOR_nothing
2005 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2006 GET_MODE_SIZE (mode)))
2007 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2008
2009 max_size = GET_MODE_SIZE (mode);
2010 }
2011
2012 /* The code above should have handled everything. */
2013 if (data.len != 0)
2014 abort ();
2015 }
2016
2017 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2018 with move instructions for mode MODE. GENFUN is the gen_... function
2019 to make a move insn for that mode. DATA has all the other info. */
2020
2021 static void
2022 clear_by_pieces_1 (genfun, mode, data)
2023 rtx (*genfun) ();
2024 enum machine_mode mode;
2025 struct clear_by_pieces *data;
2026 {
2027 register int size = GET_MODE_SIZE (mode);
2028 register rtx to1;
2029
2030 while (data->len >= size)
2031 {
2032 if (data->reverse) data->offset -= size;
2033
2034 to1 = (data->autinc_to
2035 ? gen_rtx (MEM, mode, data->to_addr)
2036 : copy_rtx (change_address (data->to, mode,
2037 plus_constant (data->to_addr,
2038 data->offset))));
2039 MEM_IN_STRUCT_P (to1) = data->to_struct;
2040
2041 #ifdef HAVE_PRE_DECREMENT
2042 if (data->explicit_inc_to < 0)
2043 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2044 #endif
2045
2046 emit_insn ((*genfun) (to1, const0_rtx));
2047 #ifdef HAVE_POST_INCREMENT
2048 if (data->explicit_inc_to > 0)
2049 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2050 #endif
2051
2052 if (! data->reverse) data->offset += size;
2053
2054 data->len -= size;
2055 }
2056 }
2057 \f
2058 /* Write zeros through the storage of OBJECT.
2059 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2060 the maximum alignment we can is has, measured in bytes. */
2061
2062 void
2063 clear_storage (object, size, align)
2064 rtx object;
2065 rtx size;
2066 int align;
2067 {
2068 if (GET_MODE (object) == BLKmode)
2069 {
2070 object = protect_from_queue (object, 1);
2071 size = protect_from_queue (size, 0);
2072
2073 if (GET_CODE (size) == CONST_INT
2074 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2075 clear_by_pieces (object, INTVAL (size), align);
2076
2077 else
2078 {
2079 /* Try the most limited insn first, because there's no point
2080 including more than one in the machine description unless
2081 the more limited one has some advantage. */
2082
2083 rtx opalign = GEN_INT (align);
2084 enum machine_mode mode;
2085
2086 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2087 mode = GET_MODE_WIDER_MODE (mode))
2088 {
2089 enum insn_code code = clrstr_optab[(int) mode];
2090
2091 if (code != CODE_FOR_nothing
2092 /* We don't need MODE to be narrower than
2093 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2094 the mode mask, as it is returned by the macro, it will
2095 definitely be less than the actual mode mask. */
2096 && ((GET_CODE (size) == CONST_INT
2097 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2098 <= GET_MODE_MASK (mode)))
2099 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2100 && (insn_operand_predicate[(int) code][0] == 0
2101 || (*insn_operand_predicate[(int) code][0]) (object,
2102 BLKmode))
2103 && (insn_operand_predicate[(int) code][2] == 0
2104 || (*insn_operand_predicate[(int) code][2]) (opalign,
2105 VOIDmode)))
2106 {
2107 rtx op1;
2108 rtx last = get_last_insn ();
2109 rtx pat;
2110
2111 op1 = convert_to_mode (mode, size, 1);
2112 if (insn_operand_predicate[(int) code][1] != 0
2113 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2114 mode))
2115 op1 = copy_to_mode_reg (mode, op1);
2116
2117 pat = GEN_FCN ((int) code) (object, op1, opalign);
2118 if (pat)
2119 {
2120 emit_insn (pat);
2121 return;
2122 }
2123 else
2124 delete_insns_since (last);
2125 }
2126 }
2127
2128
2129 #ifdef TARGET_MEM_FUNCTIONS
2130 emit_library_call (memset_libfunc, 0,
2131 VOIDmode, 3,
2132 XEXP (object, 0), Pmode,
2133 const0_rtx, TYPE_MODE (integer_type_node),
2134 convert_to_mode (TYPE_MODE (sizetype),
2135 size, TREE_UNSIGNED (sizetype)),
2136 TYPE_MODE (sizetype));
2137 #else
2138 emit_library_call (bzero_libfunc, 0,
2139 VOIDmode, 2,
2140 XEXP (object, 0), Pmode,
2141 convert_to_mode (TYPE_MODE (integer_type_node),
2142 size,
2143 TREE_UNSIGNED (integer_type_node)),
2144 TYPE_MODE (integer_type_node));
2145 #endif
2146 }
2147 }
2148 else
2149 emit_move_insn (object, const0_rtx);
2150 }
2151
2152 /* Generate code to copy Y into X.
2153 Both Y and X must have the same mode, except that
2154 Y can be a constant with VOIDmode.
2155 This mode cannot be BLKmode; use emit_block_move for that.
2156
2157 Return the last instruction emitted. */
2158
2159 rtx
2160 emit_move_insn (x, y)
2161 rtx x, y;
2162 {
2163 enum machine_mode mode = GET_MODE (x);
2164
2165 x = protect_from_queue (x, 1);
2166 y = protect_from_queue (y, 0);
2167
2168 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2169 abort ();
2170
2171 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2172 y = force_const_mem (mode, y);
2173
2174 /* If X or Y are memory references, verify that their addresses are valid
2175 for the machine. */
2176 if (GET_CODE (x) == MEM
2177 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2178 && ! push_operand (x, GET_MODE (x)))
2179 || (flag_force_addr
2180 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2181 x = change_address (x, VOIDmode, XEXP (x, 0));
2182
2183 if (GET_CODE (y) == MEM
2184 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2185 || (flag_force_addr
2186 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2187 y = change_address (y, VOIDmode, XEXP (y, 0));
2188
2189 if (mode == BLKmode)
2190 abort ();
2191
2192 return emit_move_insn_1 (x, y);
2193 }
2194
2195 /* Low level part of emit_move_insn.
2196 Called just like emit_move_insn, but assumes X and Y
2197 are basically valid. */
2198
2199 rtx
2200 emit_move_insn_1 (x, y)
2201 rtx x, y;
2202 {
2203 enum machine_mode mode = GET_MODE (x);
2204 enum machine_mode submode;
2205 enum mode_class class = GET_MODE_CLASS (mode);
2206 int i;
2207
2208 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2209 return
2210 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2211
2212 /* Expand complex moves by moving real part and imag part, if possible. */
2213 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2214 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2215 * BITS_PER_UNIT),
2216 (class == MODE_COMPLEX_INT
2217 ? MODE_INT : MODE_FLOAT),
2218 0))
2219 && (mov_optab->handlers[(int) submode].insn_code
2220 != CODE_FOR_nothing))
2221 {
2222 /* Don't split destination if it is a stack push. */
2223 int stack = push_operand (x, GET_MODE (x));
2224 rtx insns;
2225
2226 /* If this is a stack, push the highpart first, so it
2227 will be in the argument order.
2228
2229 In that case, change_address is used only to convert
2230 the mode, not to change the address. */
2231 if (stack)
2232 {
2233 /* Note that the real part always precedes the imag part in memory
2234 regardless of machine's endianness. */
2235 #ifdef STACK_GROWS_DOWNWARD
2236 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2237 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2238 gen_imagpart (submode, y)));
2239 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2240 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2241 gen_realpart (submode, y)));
2242 #else
2243 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2244 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2245 gen_realpart (submode, y)));
2246 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2247 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2248 gen_imagpart (submode, y)));
2249 #endif
2250 }
2251 else
2252 {
2253 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2254 (gen_realpart (submode, x), gen_realpart (submode, y)));
2255 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2256 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2257 }
2258
2259 return get_last_insn ();
2260 }
2261
2262 /* This will handle any multi-word mode that lacks a move_insn pattern.
2263 However, you will get better code if you define such patterns,
2264 even if they must turn into multiple assembler instructions. */
2265 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2266 {
2267 rtx last_insn = 0;
2268 rtx insns;
2269
2270 #ifdef PUSH_ROUNDING
2271
2272 /* If X is a push on the stack, do the push now and replace
2273 X with a reference to the stack pointer. */
2274 if (push_operand (x, GET_MODE (x)))
2275 {
2276 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2277 x = change_address (x, VOIDmode, stack_pointer_rtx);
2278 }
2279 #endif
2280
2281 /* Show the output dies here. */
2282 if (x != y)
2283 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2284
2285 for (i = 0;
2286 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2287 i++)
2288 {
2289 rtx xpart = operand_subword (x, i, 1, mode);
2290 rtx ypart = operand_subword (y, i, 1, mode);
2291
2292 /* If we can't get a part of Y, put Y into memory if it is a
2293 constant. Otherwise, force it into a register. If we still
2294 can't get a part of Y, abort. */
2295 if (ypart == 0 && CONSTANT_P (y))
2296 {
2297 y = force_const_mem (mode, y);
2298 ypart = operand_subword (y, i, 1, mode);
2299 }
2300 else if (ypart == 0)
2301 ypart = operand_subword_force (y, i, mode);
2302
2303 if (xpart == 0 || ypart == 0)
2304 abort ();
2305
2306 last_insn = emit_move_insn (xpart, ypart);
2307 }
2308
2309 return last_insn;
2310 }
2311 else
2312 abort ();
2313 }
2314 \f
2315 /* Pushing data onto the stack. */
2316
2317 /* Push a block of length SIZE (perhaps variable)
2318 and return an rtx to address the beginning of the block.
2319 Note that it is not possible for the value returned to be a QUEUED.
2320 The value may be virtual_outgoing_args_rtx.
2321
2322 EXTRA is the number of bytes of padding to push in addition to SIZE.
2323 BELOW nonzero means this padding comes at low addresses;
2324 otherwise, the padding comes at high addresses. */
2325
2326 rtx
2327 push_block (size, extra, below)
2328 rtx size;
2329 int extra, below;
2330 {
2331 register rtx temp;
2332
2333 size = convert_modes (Pmode, ptr_mode, size, 1);
2334 if (CONSTANT_P (size))
2335 anti_adjust_stack (plus_constant (size, extra));
2336 else if (GET_CODE (size) == REG && extra == 0)
2337 anti_adjust_stack (size);
2338 else
2339 {
2340 rtx temp = copy_to_mode_reg (Pmode, size);
2341 if (extra != 0)
2342 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2343 temp, 0, OPTAB_LIB_WIDEN);
2344 anti_adjust_stack (temp);
2345 }
2346
2347 #ifdef STACK_GROWS_DOWNWARD
2348 temp = virtual_outgoing_args_rtx;
2349 if (extra != 0 && below)
2350 temp = plus_constant (temp, extra);
2351 #else
2352 if (GET_CODE (size) == CONST_INT)
2353 temp = plus_constant (virtual_outgoing_args_rtx,
2354 - INTVAL (size) - (below ? 0 : extra));
2355 else if (extra != 0 && !below)
2356 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2357 negate_rtx (Pmode, plus_constant (size, extra)));
2358 else
2359 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2360 negate_rtx (Pmode, size));
2361 #endif
2362
2363 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2364 }
2365
2366 rtx
2367 gen_push_operand ()
2368 {
2369 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2370 }
2371
2372 /* Generate code to push X onto the stack, assuming it has mode MODE and
2373 type TYPE.
2374 MODE is redundant except when X is a CONST_INT (since they don't
2375 carry mode info).
2376 SIZE is an rtx for the size of data to be copied (in bytes),
2377 needed only if X is BLKmode.
2378
2379 ALIGN (in bytes) is maximum alignment we can assume.
2380
2381 If PARTIAL and REG are both nonzero, then copy that many of the first
2382 words of X into registers starting with REG, and push the rest of X.
2383 The amount of space pushed is decreased by PARTIAL words,
2384 rounded *down* to a multiple of PARM_BOUNDARY.
2385 REG must be a hard register in this case.
2386 If REG is zero but PARTIAL is not, take any all others actions for an
2387 argument partially in registers, but do not actually load any
2388 registers.
2389
2390 EXTRA is the amount in bytes of extra space to leave next to this arg.
2391 This is ignored if an argument block has already been allocated.
2392
2393 On a machine that lacks real push insns, ARGS_ADDR is the address of
2394 the bottom of the argument block for this call. We use indexing off there
2395 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2396 argument block has not been preallocated.
2397
2398 ARGS_SO_FAR is the size of args previously pushed for this call. */
2399
2400 void
2401 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2402 args_addr, args_so_far)
2403 register rtx x;
2404 enum machine_mode mode;
2405 tree type;
2406 rtx size;
2407 int align;
2408 int partial;
2409 rtx reg;
2410 int extra;
2411 rtx args_addr;
2412 rtx args_so_far;
2413 {
2414 rtx xinner;
2415 enum direction stack_direction
2416 #ifdef STACK_GROWS_DOWNWARD
2417 = downward;
2418 #else
2419 = upward;
2420 #endif
2421
2422 /* Decide where to pad the argument: `downward' for below,
2423 `upward' for above, or `none' for don't pad it.
2424 Default is below for small data on big-endian machines; else above. */
2425 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2426
2427 /* Invert direction if stack is post-update. */
2428 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2429 if (where_pad != none)
2430 where_pad = (where_pad == downward ? upward : downward);
2431
2432 xinner = x = protect_from_queue (x, 0);
2433
2434 if (mode == BLKmode)
2435 {
2436 /* Copy a block into the stack, entirely or partially. */
2437
2438 register rtx temp;
2439 int used = partial * UNITS_PER_WORD;
2440 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2441 int skip;
2442
2443 if (size == 0)
2444 abort ();
2445
2446 used -= offset;
2447
2448 /* USED is now the # of bytes we need not copy to the stack
2449 because registers will take care of them. */
2450
2451 if (partial != 0)
2452 xinner = change_address (xinner, BLKmode,
2453 plus_constant (XEXP (xinner, 0), used));
2454
2455 /* If the partial register-part of the arg counts in its stack size,
2456 skip the part of stack space corresponding to the registers.
2457 Otherwise, start copying to the beginning of the stack space,
2458 by setting SKIP to 0. */
2459 #ifndef REG_PARM_STACK_SPACE
2460 skip = 0;
2461 #else
2462 skip = used;
2463 #endif
2464
2465 #ifdef PUSH_ROUNDING
2466 /* Do it with several push insns if that doesn't take lots of insns
2467 and if there is no difficulty with push insns that skip bytes
2468 on the stack for alignment purposes. */
2469 if (args_addr == 0
2470 && GET_CODE (size) == CONST_INT
2471 && skip == 0
2472 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2473 < MOVE_RATIO)
2474 /* Here we avoid the case of a structure whose weak alignment
2475 forces many pushes of a small amount of data,
2476 and such small pushes do rounding that causes trouble. */
2477 && ((! SLOW_UNALIGNED_ACCESS)
2478 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2479 || PUSH_ROUNDING (align) == align)
2480 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2481 {
2482 /* Push padding now if padding above and stack grows down,
2483 or if padding below and stack grows up.
2484 But if space already allocated, this has already been done. */
2485 if (extra && args_addr == 0
2486 && where_pad != none && where_pad != stack_direction)
2487 anti_adjust_stack (GEN_INT (extra));
2488
2489 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2490 INTVAL (size) - used, align);
2491 }
2492 else
2493 #endif /* PUSH_ROUNDING */
2494 {
2495 /* Otherwise make space on the stack and copy the data
2496 to the address of that space. */
2497
2498 /* Deduct words put into registers from the size we must copy. */
2499 if (partial != 0)
2500 {
2501 if (GET_CODE (size) == CONST_INT)
2502 size = GEN_INT (INTVAL (size) - used);
2503 else
2504 size = expand_binop (GET_MODE (size), sub_optab, size,
2505 GEN_INT (used), NULL_RTX, 0,
2506 OPTAB_LIB_WIDEN);
2507 }
2508
2509 /* Get the address of the stack space.
2510 In this case, we do not deal with EXTRA separately.
2511 A single stack adjust will do. */
2512 if (! args_addr)
2513 {
2514 temp = push_block (size, extra, where_pad == downward);
2515 extra = 0;
2516 }
2517 else if (GET_CODE (args_so_far) == CONST_INT)
2518 temp = memory_address (BLKmode,
2519 plus_constant (args_addr,
2520 skip + INTVAL (args_so_far)));
2521 else
2522 temp = memory_address (BLKmode,
2523 plus_constant (gen_rtx (PLUS, Pmode,
2524 args_addr, args_so_far),
2525 skip));
2526
2527 /* TEMP is the address of the block. Copy the data there. */
2528 if (GET_CODE (size) == CONST_INT
2529 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2530 < MOVE_RATIO))
2531 {
2532 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2533 INTVAL (size), align);
2534 goto ret;
2535 }
2536 /* Try the most limited insn first, because there's no point
2537 including more than one in the machine description unless
2538 the more limited one has some advantage. */
2539 #ifdef HAVE_movstrqi
2540 if (HAVE_movstrqi
2541 && GET_CODE (size) == CONST_INT
2542 && ((unsigned) INTVAL (size)
2543 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2544 {
2545 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2546 xinner, size, GEN_INT (align));
2547 if (pat != 0)
2548 {
2549 emit_insn (pat);
2550 goto ret;
2551 }
2552 }
2553 #endif
2554 #ifdef HAVE_movstrhi
2555 if (HAVE_movstrhi
2556 && GET_CODE (size) == CONST_INT
2557 && ((unsigned) INTVAL (size)
2558 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2559 {
2560 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2561 xinner, size, GEN_INT (align));
2562 if (pat != 0)
2563 {
2564 emit_insn (pat);
2565 goto ret;
2566 }
2567 }
2568 #endif
2569 #ifdef HAVE_movstrsi
2570 if (HAVE_movstrsi)
2571 {
2572 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2573 xinner, size, GEN_INT (align));
2574 if (pat != 0)
2575 {
2576 emit_insn (pat);
2577 goto ret;
2578 }
2579 }
2580 #endif
2581 #ifdef HAVE_movstrdi
2582 if (HAVE_movstrdi)
2583 {
2584 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2585 xinner, size, GEN_INT (align));
2586 if (pat != 0)
2587 {
2588 emit_insn (pat);
2589 goto ret;
2590 }
2591 }
2592 #endif
2593
2594 #ifndef ACCUMULATE_OUTGOING_ARGS
2595 /* If the source is referenced relative to the stack pointer,
2596 copy it to another register to stabilize it. We do not need
2597 to do this if we know that we won't be changing sp. */
2598
2599 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2600 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2601 temp = copy_to_reg (temp);
2602 #endif
2603
2604 /* Make inhibit_defer_pop nonzero around the library call
2605 to force it to pop the bcopy-arguments right away. */
2606 NO_DEFER_POP;
2607 #ifdef TARGET_MEM_FUNCTIONS
2608 emit_library_call (memcpy_libfunc, 0,
2609 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2610 convert_to_mode (TYPE_MODE (sizetype),
2611 size, TREE_UNSIGNED (sizetype)),
2612 TYPE_MODE (sizetype));
2613 #else
2614 emit_library_call (bcopy_libfunc, 0,
2615 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2616 convert_to_mode (TYPE_MODE (integer_type_node),
2617 size,
2618 TREE_UNSIGNED (integer_type_node)),
2619 TYPE_MODE (integer_type_node));
2620 #endif
2621 OK_DEFER_POP;
2622 }
2623 }
2624 else if (partial > 0)
2625 {
2626 /* Scalar partly in registers. */
2627
2628 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2629 int i;
2630 int not_stack;
2631 /* # words of start of argument
2632 that we must make space for but need not store. */
2633 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2634 int args_offset = INTVAL (args_so_far);
2635 int skip;
2636
2637 /* Push padding now if padding above and stack grows down,
2638 or if padding below and stack grows up.
2639 But if space already allocated, this has already been done. */
2640 if (extra && args_addr == 0
2641 && where_pad != none && where_pad != stack_direction)
2642 anti_adjust_stack (GEN_INT (extra));
2643
2644 /* If we make space by pushing it, we might as well push
2645 the real data. Otherwise, we can leave OFFSET nonzero
2646 and leave the space uninitialized. */
2647 if (args_addr == 0)
2648 offset = 0;
2649
2650 /* Now NOT_STACK gets the number of words that we don't need to
2651 allocate on the stack. */
2652 not_stack = partial - offset;
2653
2654 /* If the partial register-part of the arg counts in its stack size,
2655 skip the part of stack space corresponding to the registers.
2656 Otherwise, start copying to the beginning of the stack space,
2657 by setting SKIP to 0. */
2658 #ifndef REG_PARM_STACK_SPACE
2659 skip = 0;
2660 #else
2661 skip = not_stack;
2662 #endif
2663
2664 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2665 x = validize_mem (force_const_mem (mode, x));
2666
2667 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2668 SUBREGs of such registers are not allowed. */
2669 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2670 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2671 x = copy_to_reg (x);
2672
2673 /* Loop over all the words allocated on the stack for this arg. */
2674 /* We can do it by words, because any scalar bigger than a word
2675 has a size a multiple of a word. */
2676 #ifndef PUSH_ARGS_REVERSED
2677 for (i = not_stack; i < size; i++)
2678 #else
2679 for (i = size - 1; i >= not_stack; i--)
2680 #endif
2681 if (i >= not_stack + offset)
2682 emit_push_insn (operand_subword_force (x, i, mode),
2683 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2684 0, args_addr,
2685 GEN_INT (args_offset + ((i - not_stack + skip)
2686 * UNITS_PER_WORD)));
2687 }
2688 else
2689 {
2690 rtx addr;
2691
2692 /* Push padding now if padding above and stack grows down,
2693 or if padding below and stack grows up.
2694 But if space already allocated, this has already been done. */
2695 if (extra && args_addr == 0
2696 && where_pad != none && where_pad != stack_direction)
2697 anti_adjust_stack (GEN_INT (extra));
2698
2699 #ifdef PUSH_ROUNDING
2700 if (args_addr == 0)
2701 addr = gen_push_operand ();
2702 else
2703 #endif
2704 if (GET_CODE (args_so_far) == CONST_INT)
2705 addr
2706 = memory_address (mode,
2707 plus_constant (args_addr, INTVAL (args_so_far)));
2708 else
2709 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2710 args_so_far));
2711
2712 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2713 }
2714
2715 ret:
2716 /* If part should go in registers, copy that part
2717 into the appropriate registers. Do this now, at the end,
2718 since mem-to-mem copies above may do function calls. */
2719 if (partial > 0 && reg != 0)
2720 {
2721 /* Handle calls that pass values in multiple non-contiguous locations.
2722 The Irix 6 ABI has examples of this. */
2723 if (GET_CODE (reg) == PARALLEL)
2724 emit_group_load (reg, x);
2725 else
2726 move_block_to_reg (REGNO (reg), x, partial, mode);
2727 }
2728
2729 if (extra && args_addr == 0 && where_pad == stack_direction)
2730 anti_adjust_stack (GEN_INT (extra));
2731 }
2732 \f
2733 /* Expand an assignment that stores the value of FROM into TO.
2734 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2735 (This may contain a QUEUED rtx;
2736 if the value is constant, this rtx is a constant.)
2737 Otherwise, the returned value is NULL_RTX.
2738
2739 SUGGEST_REG is no longer actually used.
2740 It used to mean, copy the value through a register
2741 and return that register, if that is possible.
2742 We now use WANT_VALUE to decide whether to do this. */
2743
2744 rtx
2745 expand_assignment (to, from, want_value, suggest_reg)
2746 tree to, from;
2747 int want_value;
2748 int suggest_reg;
2749 {
2750 register rtx to_rtx = 0;
2751 rtx result;
2752
2753 /* Don't crash if the lhs of the assignment was erroneous. */
2754
2755 if (TREE_CODE (to) == ERROR_MARK)
2756 {
2757 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2758 return want_value ? result : NULL_RTX;
2759 }
2760
2761 if (output_bytecode)
2762 {
2763 tree dest_innermost;
2764
2765 bc_expand_expr (from);
2766 bc_emit_instruction (duplicate);
2767
2768 dest_innermost = bc_expand_address (to);
2769
2770 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2771 take care of it here. */
2772
2773 bc_store_memory (TREE_TYPE (to), dest_innermost);
2774 return NULL;
2775 }
2776
2777 /* Assignment of a structure component needs special treatment
2778 if the structure component's rtx is not simply a MEM.
2779 Assignment of an array element at a constant index, and assignment of
2780 an array element in an unaligned packed structure field, has the same
2781 problem. */
2782
2783 if (TREE_CODE (to) == COMPONENT_REF
2784 || TREE_CODE (to) == BIT_FIELD_REF
2785 || (TREE_CODE (to) == ARRAY_REF
2786 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2787 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2788 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2789 {
2790 enum machine_mode mode1;
2791 int bitsize;
2792 int bitpos;
2793 tree offset;
2794 int unsignedp;
2795 int volatilep = 0;
2796 tree tem;
2797 int alignment;
2798
2799 push_temp_slots ();
2800 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2801 &unsignedp, &volatilep, &alignment);
2802
2803 /* If we are going to use store_bit_field and extract_bit_field,
2804 make sure to_rtx will be safe for multiple use. */
2805
2806 if (mode1 == VOIDmode && want_value)
2807 tem = stabilize_reference (tem);
2808
2809 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2810 if (offset != 0)
2811 {
2812 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2813
2814 if (GET_CODE (to_rtx) != MEM)
2815 abort ();
2816 to_rtx = change_address (to_rtx, VOIDmode,
2817 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2818 force_reg (ptr_mode, offset_rtx)));
2819 }
2820 if (volatilep)
2821 {
2822 if (GET_CODE (to_rtx) == MEM)
2823 {
2824 /* When the offset is zero, to_rtx is the address of the
2825 structure we are storing into, and hence may be shared.
2826 We must make a new MEM before setting the volatile bit. */
2827 if (offset == 0)
2828 to_rtx = copy_rtx (to_rtx);
2829
2830 MEM_VOLATILE_P (to_rtx) = 1;
2831 }
2832 #if 0 /* This was turned off because, when a field is volatile
2833 in an object which is not volatile, the object may be in a register,
2834 and then we would abort over here. */
2835 else
2836 abort ();
2837 #endif
2838 }
2839
2840 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2841 (want_value
2842 /* Spurious cast makes HPUX compiler happy. */
2843 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2844 : VOIDmode),
2845 unsignedp,
2846 /* Required alignment of containing datum. */
2847 alignment,
2848 int_size_in_bytes (TREE_TYPE (tem)));
2849 preserve_temp_slots (result);
2850 free_temp_slots ();
2851 pop_temp_slots ();
2852
2853 /* If the value is meaningful, convert RESULT to the proper mode.
2854 Otherwise, return nothing. */
2855 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2856 TYPE_MODE (TREE_TYPE (from)),
2857 result,
2858 TREE_UNSIGNED (TREE_TYPE (to)))
2859 : NULL_RTX);
2860 }
2861
2862 /* If the rhs is a function call and its value is not an aggregate,
2863 call the function before we start to compute the lhs.
2864 This is needed for correct code for cases such as
2865 val = setjmp (buf) on machines where reference to val
2866 requires loading up part of an address in a separate insn.
2867
2868 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2869 a promoted variable where the zero- or sign- extension needs to be done.
2870 Handling this in the normal way is safe because no computation is done
2871 before the call. */
2872 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2874 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2875 {
2876 rtx value;
2877
2878 push_temp_slots ();
2879 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2880 if (to_rtx == 0)
2881 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2882
2883 /* Handle calls that return values in multiple non-contiguous locations.
2884 The Irix 6 ABI has examples of this. */
2885 if (GET_CODE (to_rtx) == PARALLEL)
2886 emit_group_load (to_rtx, value);
2887 else if (GET_MODE (to_rtx) == BLKmode)
2888 emit_block_move (to_rtx, value, expr_size (from),
2889 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2890 else
2891 emit_move_insn (to_rtx, value);
2892 preserve_temp_slots (to_rtx);
2893 free_temp_slots ();
2894 pop_temp_slots ();
2895 return want_value ? to_rtx : NULL_RTX;
2896 }
2897
2898 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2899 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2900
2901 if (to_rtx == 0)
2902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2903
2904 /* Don't move directly into a return register. */
2905 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2906 {
2907 rtx temp;
2908
2909 push_temp_slots ();
2910 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2911 emit_move_insn (to_rtx, temp);
2912 preserve_temp_slots (to_rtx);
2913 free_temp_slots ();
2914 pop_temp_slots ();
2915 return want_value ? to_rtx : NULL_RTX;
2916 }
2917
2918 /* In case we are returning the contents of an object which overlaps
2919 the place the value is being stored, use a safe function when copying
2920 a value through a pointer into a structure value return block. */
2921 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2922 && current_function_returns_struct
2923 && !current_function_returns_pcc_struct)
2924 {
2925 rtx from_rtx, size;
2926
2927 push_temp_slots ();
2928 size = expr_size (from);
2929 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2930
2931 #ifdef TARGET_MEM_FUNCTIONS
2932 emit_library_call (memcpy_libfunc, 0,
2933 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2934 XEXP (from_rtx, 0), Pmode,
2935 convert_to_mode (TYPE_MODE (sizetype),
2936 size, TREE_UNSIGNED (sizetype)),
2937 TYPE_MODE (sizetype));
2938 #else
2939 emit_library_call (bcopy_libfunc, 0,
2940 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2941 XEXP (to_rtx, 0), Pmode,
2942 convert_to_mode (TYPE_MODE (integer_type_node),
2943 size, TREE_UNSIGNED (integer_type_node)),
2944 TYPE_MODE (integer_type_node));
2945 #endif
2946
2947 preserve_temp_slots (to_rtx);
2948 free_temp_slots ();
2949 pop_temp_slots ();
2950 return want_value ? to_rtx : NULL_RTX;
2951 }
2952
2953 /* Compute FROM and store the value in the rtx we got. */
2954
2955 push_temp_slots ();
2956 result = store_expr (from, to_rtx, want_value);
2957 preserve_temp_slots (result);
2958 free_temp_slots ();
2959 pop_temp_slots ();
2960 return want_value ? result : NULL_RTX;
2961 }
2962
2963 /* Generate code for computing expression EXP,
2964 and storing the value into TARGET.
2965 TARGET may contain a QUEUED rtx.
2966
2967 If WANT_VALUE is nonzero, return a copy of the value
2968 not in TARGET, so that we can be sure to use the proper
2969 value in a containing expression even if TARGET has something
2970 else stored in it. If possible, we copy the value through a pseudo
2971 and return that pseudo. Or, if the value is constant, we try to
2972 return the constant. In some cases, we return a pseudo
2973 copied *from* TARGET.
2974
2975 If the mode is BLKmode then we may return TARGET itself.
2976 It turns out that in BLKmode it doesn't cause a problem.
2977 because C has no operators that could combine two different
2978 assignments into the same BLKmode object with different values
2979 with no sequence point. Will other languages need this to
2980 be more thorough?
2981
2982 If WANT_VALUE is 0, we return NULL, to make sure
2983 to catch quickly any cases where the caller uses the value
2984 and fails to set WANT_VALUE. */
2985
2986 rtx
2987 store_expr (exp, target, want_value)
2988 register tree exp;
2989 register rtx target;
2990 int want_value;
2991 {
2992 register rtx temp;
2993 int dont_return_target = 0;
2994
2995 if (TREE_CODE (exp) == COMPOUND_EXPR)
2996 {
2997 /* Perform first part of compound expression, then assign from second
2998 part. */
2999 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3000 emit_queue ();
3001 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3002 }
3003 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3004 {
3005 /* For conditional expression, get safe form of the target. Then
3006 test the condition, doing the appropriate assignment on either
3007 side. This avoids the creation of unnecessary temporaries.
3008 For non-BLKmode, it is more efficient not to do this. */
3009
3010 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3011
3012 emit_queue ();
3013 target = protect_from_queue (target, 1);
3014
3015 do_pending_stack_adjust ();
3016 NO_DEFER_POP;
3017 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3018 start_cleanup_deferal ();
3019 store_expr (TREE_OPERAND (exp, 1), target, 0);
3020 end_cleanup_deferal ();
3021 emit_queue ();
3022 emit_jump_insn (gen_jump (lab2));
3023 emit_barrier ();
3024 emit_label (lab1);
3025 start_cleanup_deferal ();
3026 store_expr (TREE_OPERAND (exp, 2), target, 0);
3027 end_cleanup_deferal ();
3028 emit_queue ();
3029 emit_label (lab2);
3030 OK_DEFER_POP;
3031
3032 return want_value ? target : NULL_RTX;
3033 }
3034 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3035 && GET_MODE (target) != BLKmode)
3036 /* If target is in memory and caller wants value in a register instead,
3037 arrange that. Pass TARGET as target for expand_expr so that,
3038 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3039 We know expand_expr will not use the target in that case.
3040 Don't do this if TARGET is volatile because we are supposed
3041 to write it and then read it. */
3042 {
3043 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3044 GET_MODE (target), 0);
3045 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3046 temp = copy_to_reg (temp);
3047 dont_return_target = 1;
3048 }
3049 else if (queued_subexp_p (target))
3050 /* If target contains a postincrement, let's not risk
3051 using it as the place to generate the rhs. */
3052 {
3053 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3054 {
3055 /* Expand EXP into a new pseudo. */
3056 temp = gen_reg_rtx (GET_MODE (target));
3057 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3058 }
3059 else
3060 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3061
3062 /* If target is volatile, ANSI requires accessing the value
3063 *from* the target, if it is accessed. So make that happen.
3064 In no case return the target itself. */
3065 if (! MEM_VOLATILE_P (target) && want_value)
3066 dont_return_target = 1;
3067 }
3068 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3069 /* If this is an scalar in a register that is stored in a wider mode
3070 than the declared mode, compute the result into its declared mode
3071 and then convert to the wider mode. Our value is the computed
3072 expression. */
3073 {
3074 /* If we don't want a value, we can do the conversion inside EXP,
3075 which will often result in some optimizations. Do the conversion
3076 in two steps: first change the signedness, if needed, then
3077 the extend. But don't do this if the type of EXP is a subtype
3078 of something else since then the conversion might involve
3079 more than just converting modes. */
3080 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3081 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3082 {
3083 if (TREE_UNSIGNED (TREE_TYPE (exp))
3084 != SUBREG_PROMOTED_UNSIGNED_P (target))
3085 exp
3086 = convert
3087 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3088 TREE_TYPE (exp)),
3089 exp);
3090
3091 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3092 SUBREG_PROMOTED_UNSIGNED_P (target)),
3093 exp);
3094 }
3095
3096 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3097
3098 /* If TEMP is a volatile MEM and we want a result value, make
3099 the access now so it gets done only once. Likewise if
3100 it contains TARGET. */
3101 if (GET_CODE (temp) == MEM && want_value
3102 && (MEM_VOLATILE_P (temp)
3103 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3104 temp = copy_to_reg (temp);
3105
3106 /* If TEMP is a VOIDmode constant, use convert_modes to make
3107 sure that we properly convert it. */
3108 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3109 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3110 TYPE_MODE (TREE_TYPE (exp)), temp,
3111 SUBREG_PROMOTED_UNSIGNED_P (target));
3112
3113 convert_move (SUBREG_REG (target), temp,
3114 SUBREG_PROMOTED_UNSIGNED_P (target));
3115 return want_value ? temp : NULL_RTX;
3116 }
3117 else
3118 {
3119 temp = expand_expr (exp, target, GET_MODE (target), 0);
3120 /* Return TARGET if it's a specified hardware register.
3121 If TARGET is a volatile mem ref, either return TARGET
3122 or return a reg copied *from* TARGET; ANSI requires this.
3123
3124 Otherwise, if TEMP is not TARGET, return TEMP
3125 if it is constant (for efficiency),
3126 or if we really want the correct value. */
3127 if (!(target && GET_CODE (target) == REG
3128 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3129 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3130 && ! rtx_equal_p (temp, target)
3131 && (CONSTANT_P (temp) || want_value))
3132 dont_return_target = 1;
3133 }
3134
3135 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3136 the same as that of TARGET, adjust the constant. This is needed, for
3137 example, in case it is a CONST_DOUBLE and we want only a word-sized
3138 value. */
3139 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3140 && TREE_CODE (exp) != ERROR_MARK
3141 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3142 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3143 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3144
3145 /* If value was not generated in the target, store it there.
3146 Convert the value to TARGET's type first if nec. */
3147
3148 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3149 {
3150 target = protect_from_queue (target, 1);
3151 if (GET_MODE (temp) != GET_MODE (target)
3152 && GET_MODE (temp) != VOIDmode)
3153 {
3154 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3155 if (dont_return_target)
3156 {
3157 /* In this case, we will return TEMP,
3158 so make sure it has the proper mode.
3159 But don't forget to store the value into TARGET. */
3160 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3161 emit_move_insn (target, temp);
3162 }
3163 else
3164 convert_move (target, temp, unsignedp);
3165 }
3166
3167 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3168 {
3169 /* Handle copying a string constant into an array.
3170 The string constant may be shorter than the array.
3171 So copy just the string's actual length, and clear the rest. */
3172 rtx size;
3173 rtx addr;
3174
3175 /* Get the size of the data type of the string,
3176 which is actually the size of the target. */
3177 size = expr_size (exp);
3178 if (GET_CODE (size) == CONST_INT
3179 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3180 emit_block_move (target, temp, size,
3181 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3182 else
3183 {
3184 /* Compute the size of the data to copy from the string. */
3185 tree copy_size
3186 = size_binop (MIN_EXPR,
3187 make_tree (sizetype, size),
3188 convert (sizetype,
3189 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3190 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3191 VOIDmode, 0);
3192 rtx label = 0;
3193
3194 /* Copy that much. */
3195 emit_block_move (target, temp, copy_size_rtx,
3196 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3197
3198 /* Figure out how much is left in TARGET that we have to clear.
3199 Do all calculations in ptr_mode. */
3200
3201 addr = XEXP (target, 0);
3202 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3203
3204 if (GET_CODE (copy_size_rtx) == CONST_INT)
3205 {
3206 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3207 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3208 }
3209 else
3210 {
3211 addr = force_reg (ptr_mode, addr);
3212 addr = expand_binop (ptr_mode, add_optab, addr,
3213 copy_size_rtx, NULL_RTX, 0,
3214 OPTAB_LIB_WIDEN);
3215
3216 size = expand_binop (ptr_mode, sub_optab, size,
3217 copy_size_rtx, NULL_RTX, 0,
3218 OPTAB_LIB_WIDEN);
3219
3220 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3221 GET_MODE (size), 0, 0);
3222 label = gen_label_rtx ();
3223 emit_jump_insn (gen_blt (label));
3224 }
3225
3226 if (size != const0_rtx)
3227 {
3228 #ifdef TARGET_MEM_FUNCTIONS
3229 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3230 addr, ptr_mode,
3231 const0_rtx, TYPE_MODE (integer_type_node),
3232 convert_to_mode (TYPE_MODE (sizetype),
3233 size,
3234 TREE_UNSIGNED (sizetype)),
3235 TYPE_MODE (sizetype));
3236 #else
3237 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3238 addr, ptr_mode,
3239 convert_to_mode (TYPE_MODE (integer_type_node),
3240 size,
3241 TREE_UNSIGNED (integer_type_node)),
3242 TYPE_MODE (integer_type_node));
3243 #endif
3244 }
3245
3246 if (label)
3247 emit_label (label);
3248 }
3249 }
3250 /* Handle calls that return values in multiple non-contiguous locations.
3251 The Irix 6 ABI has examples of this. */
3252 else if (GET_CODE (target) == PARALLEL)
3253 emit_group_load (target, temp);
3254 else if (GET_MODE (temp) == BLKmode)
3255 emit_block_move (target, temp, expr_size (exp),
3256 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3257 else
3258 emit_move_insn (target, temp);
3259 }
3260
3261 /* If we don't want a value, return NULL_RTX. */
3262 if (! want_value)
3263 return NULL_RTX;
3264
3265 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3266 ??? The latter test doesn't seem to make sense. */
3267 else if (dont_return_target && GET_CODE (temp) != MEM)
3268 return temp;
3269
3270 /* Return TARGET itself if it is a hard register. */
3271 else if (want_value && GET_MODE (target) != BLKmode
3272 && ! (GET_CODE (target) == REG
3273 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3274 return copy_to_reg (target);
3275
3276 else
3277 return target;
3278 }
3279 \f
3280 /* Return 1 if EXP just contains zeros. */
3281
3282 static int
3283 is_zeros_p (exp)
3284 tree exp;
3285 {
3286 tree elt;
3287
3288 switch (TREE_CODE (exp))
3289 {
3290 case CONVERT_EXPR:
3291 case NOP_EXPR:
3292 case NON_LVALUE_EXPR:
3293 return is_zeros_p (TREE_OPERAND (exp, 0));
3294
3295 case INTEGER_CST:
3296 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3297
3298 case COMPLEX_CST:
3299 return
3300 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3301
3302 case REAL_CST:
3303 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3304
3305 case CONSTRUCTOR:
3306 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3307 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3308 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3309 if (! is_zeros_p (TREE_VALUE (elt)))
3310 return 0;
3311
3312 return 1;
3313 }
3314
3315 return 0;
3316 }
3317
3318 /* Return 1 if EXP contains mostly (3/4) zeros. */
3319
3320 static int
3321 mostly_zeros_p (exp)
3322 tree exp;
3323 {
3324 if (TREE_CODE (exp) == CONSTRUCTOR)
3325 {
3326 int elts = 0, zeros = 0;
3327 tree elt = CONSTRUCTOR_ELTS (exp);
3328 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3329 {
3330 /* If there are no ranges of true bits, it is all zero. */
3331 return elt == NULL_TREE;
3332 }
3333 for (; elt; elt = TREE_CHAIN (elt))
3334 {
3335 /* We do not handle the case where the index is a RANGE_EXPR,
3336 so the statistic will be somewhat inaccurate.
3337 We do make a more accurate count in store_constructor itself,
3338 so since this function is only used for nested array elements,
3339 this should be close enough. */
3340 if (mostly_zeros_p (TREE_VALUE (elt)))
3341 zeros++;
3342 elts++;
3343 }
3344
3345 return 4 * zeros >= 3 * elts;
3346 }
3347
3348 return is_zeros_p (exp);
3349 }
3350 \f
3351 /* Helper function for store_constructor.
3352 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3353 TYPE is the type of the CONSTRUCTOR, not the element type.
3354 CLEARED is as for store_constructor.
3355
3356 This provides a recursive shortcut back to store_constructor when it isn't
3357 necessary to go through store_field. This is so that we can pass through
3358 the cleared field to let store_constructor know that we may not have to
3359 clear a substructure if the outer structure has already been cleared. */
3360
3361 static void
3362 store_constructor_field (target, bitsize, bitpos,
3363 mode, exp, type, cleared)
3364 rtx target;
3365 int bitsize, bitpos;
3366 enum machine_mode mode;
3367 tree exp, type;
3368 int cleared;
3369 {
3370 if (TREE_CODE (exp) == CONSTRUCTOR
3371 && bitpos % BITS_PER_UNIT == 0
3372 /* If we have a non-zero bitpos for a register target, then we just
3373 let store_field do the bitfield handling. This is unlikely to
3374 generate unnecessary clear instructions anyways. */
3375 && (bitpos == 0 || GET_CODE (target) == MEM))
3376 {
3377 if (bitpos != 0)
3378 target = change_address (target, VOIDmode,
3379 plus_constant (XEXP (target, 0),
3380 bitpos / BITS_PER_UNIT));
3381 store_constructor (exp, target, cleared);
3382 }
3383 else
3384 store_field (target, bitsize, bitpos, mode, exp,
3385 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3386 int_size_in_bytes (type));
3387 }
3388
3389 /* Store the value of constructor EXP into the rtx TARGET.
3390 TARGET is either a REG or a MEM.
3391 CLEARED is true if TARGET is known to have been zero'd. */
3392
3393 static void
3394 store_constructor (exp, target, cleared)
3395 tree exp;
3396 rtx target;
3397 int cleared;
3398 {
3399 tree type = TREE_TYPE (exp);
3400
3401 /* We know our target cannot conflict, since safe_from_p has been called. */
3402 #if 0
3403 /* Don't try copying piece by piece into a hard register
3404 since that is vulnerable to being clobbered by EXP.
3405 Instead, construct in a pseudo register and then copy it all. */
3406 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3407 {
3408 rtx temp = gen_reg_rtx (GET_MODE (target));
3409 store_constructor (exp, temp, 0);
3410 emit_move_insn (target, temp);
3411 return;
3412 }
3413 #endif
3414
3415 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3416 || TREE_CODE (type) == QUAL_UNION_TYPE)
3417 {
3418 register tree elt;
3419
3420 /* Inform later passes that the whole union value is dead. */
3421 if (TREE_CODE (type) == UNION_TYPE
3422 || TREE_CODE (type) == QUAL_UNION_TYPE)
3423 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3424
3425 /* If we are building a static constructor into a register,
3426 set the initial value as zero so we can fold the value into
3427 a constant. But if more than one register is involved,
3428 this probably loses. */
3429 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3430 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3431 {
3432 if (! cleared)
3433 emit_move_insn (target, const0_rtx);
3434
3435 cleared = 1;
3436 }
3437
3438 /* If the constructor has fewer fields than the structure
3439 or if we are initializing the structure to mostly zeros,
3440 clear the whole structure first. */
3441 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3442 != list_length (TYPE_FIELDS (type)))
3443 || mostly_zeros_p (exp))
3444 {
3445 if (! cleared)
3446 clear_storage (target, expr_size (exp),
3447 TYPE_ALIGN (type) / BITS_PER_UNIT);
3448
3449 cleared = 1;
3450 }
3451 else
3452 /* Inform later passes that the old value is dead. */
3453 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3454
3455 /* Store each element of the constructor into
3456 the corresponding field of TARGET. */
3457
3458 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3459 {
3460 register tree field = TREE_PURPOSE (elt);
3461 register enum machine_mode mode;
3462 int bitsize;
3463 int bitpos = 0;
3464 int unsignedp;
3465 tree pos, constant = 0, offset = 0;
3466 rtx to_rtx = target;
3467
3468 /* Just ignore missing fields.
3469 We cleared the whole structure, above,
3470 if any fields are missing. */
3471 if (field == 0)
3472 continue;
3473
3474 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3475 continue;
3476
3477 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3478 unsignedp = TREE_UNSIGNED (field);
3479 mode = DECL_MODE (field);
3480 if (DECL_BIT_FIELD (field))
3481 mode = VOIDmode;
3482
3483 pos = DECL_FIELD_BITPOS (field);
3484 if (TREE_CODE (pos) == INTEGER_CST)
3485 constant = pos;
3486 else if (TREE_CODE (pos) == PLUS_EXPR
3487 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3488 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3489 else
3490 offset = pos;
3491
3492 if (constant)
3493 bitpos = TREE_INT_CST_LOW (constant);
3494
3495 if (offset)
3496 {
3497 rtx offset_rtx;
3498
3499 if (contains_placeholder_p (offset))
3500 offset = build (WITH_RECORD_EXPR, sizetype,
3501 offset, exp);
3502
3503 offset = size_binop (FLOOR_DIV_EXPR, offset,
3504 size_int (BITS_PER_UNIT));
3505
3506 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3507 if (GET_CODE (to_rtx) != MEM)
3508 abort ();
3509
3510 to_rtx
3511 = change_address (to_rtx, VOIDmode,
3512 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3513 force_reg (ptr_mode, offset_rtx)));
3514 }
3515 if (TREE_READONLY (field))
3516 {
3517 if (GET_CODE (to_rtx) == MEM)
3518 to_rtx = copy_rtx (to_rtx);
3519
3520 RTX_UNCHANGING_P (to_rtx) = 1;
3521 }
3522
3523 store_constructor_field (to_rtx, bitsize, bitpos,
3524 mode, TREE_VALUE (elt), type, cleared);
3525 }
3526 }
3527 else if (TREE_CODE (type) == ARRAY_TYPE)
3528 {
3529 register tree elt;
3530 register int i;
3531 int need_to_clear;
3532 tree domain = TYPE_DOMAIN (type);
3533 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3534 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3535 tree elttype = TREE_TYPE (type);
3536
3537 /* If the constructor has fewer elements than the array,
3538 clear the whole array first. Similarly if this this is
3539 static constructor of a non-BLKmode object. */
3540 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3541 need_to_clear = 1;
3542 else
3543 {
3544 HOST_WIDE_INT count = 0, zero_count = 0;
3545 need_to_clear = 0;
3546 /* This loop is a more accurate version of the loop in
3547 mostly_zeros_p (it handles RANGE_EXPR in an index).
3548 It is also needed to check for missing elements. */
3549 for (elt = CONSTRUCTOR_ELTS (exp);
3550 elt != NULL_TREE;
3551 elt = TREE_CHAIN (elt))
3552 {
3553 tree index = TREE_PURPOSE (elt);
3554 HOST_WIDE_INT this_node_count;
3555 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3556 {
3557 tree lo_index = TREE_OPERAND (index, 0);
3558 tree hi_index = TREE_OPERAND (index, 1);
3559 if (TREE_CODE (lo_index) != INTEGER_CST
3560 || TREE_CODE (hi_index) != INTEGER_CST)
3561 {
3562 need_to_clear = 1;
3563 break;
3564 }
3565 this_node_count = TREE_INT_CST_LOW (hi_index)
3566 - TREE_INT_CST_LOW (lo_index) + 1;
3567 }
3568 else
3569 this_node_count = 1;
3570 count += this_node_count;
3571 if (mostly_zeros_p (TREE_VALUE (elt)))
3572 zero_count += this_node_count;
3573 }
3574 /* Clear the entire array first if there are any missing elements,
3575 or if the incidence of zero elements is >= 75%. */
3576 if (count < maxelt - minelt + 1
3577 || 4 * zero_count >= 3 * count)
3578 need_to_clear = 1;
3579 }
3580 if (need_to_clear)
3581 {
3582 if (! cleared)
3583 clear_storage (target, expr_size (exp),
3584 TYPE_ALIGN (type) / BITS_PER_UNIT);
3585 cleared = 1;
3586 }
3587 else
3588 /* Inform later passes that the old value is dead. */
3589 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3590
3591 /* Store each element of the constructor into
3592 the corresponding element of TARGET, determined
3593 by counting the elements. */
3594 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3595 elt;
3596 elt = TREE_CHAIN (elt), i++)
3597 {
3598 register enum machine_mode mode;
3599 int bitsize;
3600 int bitpos;
3601 int unsignedp;
3602 tree value = TREE_VALUE (elt);
3603 tree index = TREE_PURPOSE (elt);
3604 rtx xtarget = target;
3605
3606 if (cleared && is_zeros_p (value))
3607 continue;
3608
3609 mode = TYPE_MODE (elttype);
3610 bitsize = GET_MODE_BITSIZE (mode);
3611 unsignedp = TREE_UNSIGNED (elttype);
3612
3613 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3614 {
3615 tree lo_index = TREE_OPERAND (index, 0);
3616 tree hi_index = TREE_OPERAND (index, 1);
3617 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3618 struct nesting *loop;
3619 HOST_WIDE_INT lo, hi, count;
3620 tree position;
3621
3622 /* If the range is constant and "small", unroll the loop. */
3623 if (TREE_CODE (lo_index) == INTEGER_CST
3624 && TREE_CODE (hi_index) == INTEGER_CST
3625 && (lo = TREE_INT_CST_LOW (lo_index),
3626 hi = TREE_INT_CST_LOW (hi_index),
3627 count = hi - lo + 1,
3628 (GET_CODE (target) != MEM
3629 || count <= 2
3630 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3631 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3632 <= 40 * 8))))
3633 {
3634 lo -= minelt; hi -= minelt;
3635 for (; lo <= hi; lo++)
3636 {
3637 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3638 store_constructor_field (target, bitsize, bitpos,
3639 mode, value, type, cleared);
3640 }
3641 }
3642 else
3643 {
3644 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3645 loop_top = gen_label_rtx ();
3646 loop_end = gen_label_rtx ();
3647
3648 unsignedp = TREE_UNSIGNED (domain);
3649
3650 index = build_decl (VAR_DECL, NULL_TREE, domain);
3651
3652 DECL_RTL (index) = index_r
3653 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3654 &unsignedp, 0));
3655
3656 if (TREE_CODE (value) == SAVE_EXPR
3657 && SAVE_EXPR_RTL (value) == 0)
3658 {
3659 /* Make sure value gets expanded once before the
3660 loop. */
3661 expand_expr (value, const0_rtx, VOIDmode, 0);
3662 emit_queue ();
3663 }
3664 store_expr (lo_index, index_r, 0);
3665 loop = expand_start_loop (0);
3666
3667 /* Assign value to element index. */
3668 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3669 size_int (BITS_PER_UNIT));
3670 position = size_binop (MULT_EXPR,
3671 size_binop (MINUS_EXPR, index,
3672 TYPE_MIN_VALUE (domain)),
3673 position);
3674 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3675 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3676 xtarget = change_address (target, mode, addr);
3677 if (TREE_CODE (value) == CONSTRUCTOR)
3678 store_constructor (value, xtarget, cleared);
3679 else
3680 store_expr (value, xtarget, 0);
3681
3682 expand_exit_loop_if_false (loop,
3683 build (LT_EXPR, integer_type_node,
3684 index, hi_index));
3685
3686 expand_increment (build (PREINCREMENT_EXPR,
3687 TREE_TYPE (index),
3688 index, integer_one_node), 0, 0);
3689 expand_end_loop ();
3690 emit_label (loop_end);
3691
3692 /* Needed by stupid register allocation. to extend the
3693 lifetime of pseudo-regs used by target past the end
3694 of the loop. */
3695 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3696 }
3697 }
3698 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3699 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3700 {
3701 rtx pos_rtx, addr;
3702 tree position;
3703
3704 if (index == 0)
3705 index = size_int (i);
3706
3707 if (minelt)
3708 index = size_binop (MINUS_EXPR, index,
3709 TYPE_MIN_VALUE (domain));
3710 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3711 size_int (BITS_PER_UNIT));
3712 position = size_binop (MULT_EXPR, index, position);
3713 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3714 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3715 xtarget = change_address (target, mode, addr);
3716 store_expr (value, xtarget, 0);
3717 }
3718 else
3719 {
3720 if (index != 0)
3721 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3722 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3723 else
3724 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3725 store_constructor_field (target, bitsize, bitpos,
3726 mode, value, type, cleared);
3727 }
3728 }
3729 }
3730 /* set constructor assignments */
3731 else if (TREE_CODE (type) == SET_TYPE)
3732 {
3733 tree elt = CONSTRUCTOR_ELTS (exp);
3734 rtx xtarget = XEXP (target, 0);
3735 int set_word_size = TYPE_ALIGN (type);
3736 int nbytes = int_size_in_bytes (type), nbits;
3737 tree domain = TYPE_DOMAIN (type);
3738 tree domain_min, domain_max, bitlength;
3739
3740 /* The default implementation strategy is to extract the constant
3741 parts of the constructor, use that to initialize the target,
3742 and then "or" in whatever non-constant ranges we need in addition.
3743
3744 If a large set is all zero or all ones, it is
3745 probably better to set it using memset (if available) or bzero.
3746 Also, if a large set has just a single range, it may also be
3747 better to first clear all the first clear the set (using
3748 bzero/memset), and set the bits we want. */
3749
3750 /* Check for all zeros. */
3751 if (elt == NULL_TREE)
3752 {
3753 if (!cleared)
3754 clear_storage (target, expr_size (exp),
3755 TYPE_ALIGN (type) / BITS_PER_UNIT);
3756 return;
3757 }
3758
3759 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3760 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3761 bitlength = size_binop (PLUS_EXPR,
3762 size_binop (MINUS_EXPR, domain_max, domain_min),
3763 size_one_node);
3764
3765 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3766 abort ();
3767 nbits = TREE_INT_CST_LOW (bitlength);
3768
3769 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3770 are "complicated" (more than one range), initialize (the
3771 constant parts) by copying from a constant. */
3772 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3773 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3774 {
3775 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3776 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3777 char *bit_buffer = (char *) alloca (nbits);
3778 HOST_WIDE_INT word = 0;
3779 int bit_pos = 0;
3780 int ibit = 0;
3781 int offset = 0; /* In bytes from beginning of set. */
3782 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3783 for (;;)
3784 {
3785 if (bit_buffer[ibit])
3786 {
3787 if (BYTES_BIG_ENDIAN)
3788 word |= (1 << (set_word_size - 1 - bit_pos));
3789 else
3790 word |= 1 << bit_pos;
3791 }
3792 bit_pos++; ibit++;
3793 if (bit_pos >= set_word_size || ibit == nbits)
3794 {
3795 if (word != 0 || ! cleared)
3796 {
3797 rtx datum = GEN_INT (word);
3798 rtx to_rtx;
3799 /* The assumption here is that it is safe to use
3800 XEXP if the set is multi-word, but not if
3801 it's single-word. */
3802 if (GET_CODE (target) == MEM)
3803 {
3804 to_rtx = plus_constant (XEXP (target, 0), offset);
3805 to_rtx = change_address (target, mode, to_rtx);
3806 }
3807 else if (offset == 0)
3808 to_rtx = target;
3809 else
3810 abort ();
3811 emit_move_insn (to_rtx, datum);
3812 }
3813 if (ibit == nbits)
3814 break;
3815 word = 0;
3816 bit_pos = 0;
3817 offset += set_word_size / BITS_PER_UNIT;
3818 }
3819 }
3820 }
3821 else if (!cleared)
3822 {
3823 /* Don't bother clearing storage if the set is all ones. */
3824 if (TREE_CHAIN (elt) != NULL_TREE
3825 || (TREE_PURPOSE (elt) == NULL_TREE
3826 ? nbits != 1
3827 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3828 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3829 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3830 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3831 != nbits))))
3832 clear_storage (target, expr_size (exp),
3833 TYPE_ALIGN (type) / BITS_PER_UNIT);
3834 }
3835
3836 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3837 {
3838 /* start of range of element or NULL */
3839 tree startbit = TREE_PURPOSE (elt);
3840 /* end of range of element, or element value */
3841 tree endbit = TREE_VALUE (elt);
3842 HOST_WIDE_INT startb, endb;
3843 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3844
3845 bitlength_rtx = expand_expr (bitlength,
3846 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3847
3848 /* handle non-range tuple element like [ expr ] */
3849 if (startbit == NULL_TREE)
3850 {
3851 startbit = save_expr (endbit);
3852 endbit = startbit;
3853 }
3854 startbit = convert (sizetype, startbit);
3855 endbit = convert (sizetype, endbit);
3856 if (! integer_zerop (domain_min))
3857 {
3858 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3859 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3860 }
3861 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3862 EXPAND_CONST_ADDRESS);
3863 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3864 EXPAND_CONST_ADDRESS);
3865
3866 if (REG_P (target))
3867 {
3868 targetx = assign_stack_temp (GET_MODE (target),
3869 GET_MODE_SIZE (GET_MODE (target)),
3870 0);
3871 emit_move_insn (targetx, target);
3872 }
3873 else if (GET_CODE (target) == MEM)
3874 targetx = target;
3875 else
3876 abort ();
3877
3878 #ifdef TARGET_MEM_FUNCTIONS
3879 /* Optimization: If startbit and endbit are
3880 constants divisible by BITS_PER_UNIT,
3881 call memset instead. */
3882 if (TREE_CODE (startbit) == INTEGER_CST
3883 && TREE_CODE (endbit) == INTEGER_CST
3884 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3885 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3886 {
3887 emit_library_call (memset_libfunc, 0,
3888 VOIDmode, 3,
3889 plus_constant (XEXP (targetx, 0),
3890 startb / BITS_PER_UNIT),
3891 Pmode,
3892 constm1_rtx, TYPE_MODE (integer_type_node),
3893 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3894 TYPE_MODE (sizetype));
3895 }
3896 else
3897 #endif
3898 {
3899 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3900 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3901 bitlength_rtx, TYPE_MODE (sizetype),
3902 startbit_rtx, TYPE_MODE (sizetype),
3903 endbit_rtx, TYPE_MODE (sizetype));
3904 }
3905 if (REG_P (target))
3906 emit_move_insn (target, targetx);
3907 }
3908 }
3909
3910 else
3911 abort ();
3912 }
3913
3914 /* Store the value of EXP (an expression tree)
3915 into a subfield of TARGET which has mode MODE and occupies
3916 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3917 If MODE is VOIDmode, it means that we are storing into a bit-field.
3918
3919 If VALUE_MODE is VOIDmode, return nothing in particular.
3920 UNSIGNEDP is not used in this case.
3921
3922 Otherwise, return an rtx for the value stored. This rtx
3923 has mode VALUE_MODE if that is convenient to do.
3924 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3925
3926 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3927 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3928
3929 static rtx
3930 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3931 unsignedp, align, total_size)
3932 rtx target;
3933 int bitsize, bitpos;
3934 enum machine_mode mode;
3935 tree exp;
3936 enum machine_mode value_mode;
3937 int unsignedp;
3938 int align;
3939 int total_size;
3940 {
3941 HOST_WIDE_INT width_mask = 0;
3942
3943 if (bitsize < HOST_BITS_PER_WIDE_INT)
3944 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3945
3946 /* If we are storing into an unaligned field of an aligned union that is
3947 in a register, we may have the mode of TARGET being an integer mode but
3948 MODE == BLKmode. In that case, get an aligned object whose size and
3949 alignment are the same as TARGET and store TARGET into it (we can avoid
3950 the store if the field being stored is the entire width of TARGET). Then
3951 call ourselves recursively to store the field into a BLKmode version of
3952 that object. Finally, load from the object into TARGET. This is not
3953 very efficient in general, but should only be slightly more expensive
3954 than the otherwise-required unaligned accesses. Perhaps this can be
3955 cleaned up later. */
3956
3957 if (mode == BLKmode
3958 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3959 {
3960 rtx object = assign_stack_temp (GET_MODE (target),
3961 GET_MODE_SIZE (GET_MODE (target)), 0);
3962 rtx blk_object = copy_rtx (object);
3963
3964 MEM_IN_STRUCT_P (object) = 1;
3965 MEM_IN_STRUCT_P (blk_object) = 1;
3966 PUT_MODE (blk_object, BLKmode);
3967
3968 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3969 emit_move_insn (object, target);
3970
3971 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3972 align, total_size);
3973
3974 /* Even though we aren't returning target, we need to
3975 give it the updated value. */
3976 emit_move_insn (target, object);
3977
3978 return blk_object;
3979 }
3980
3981 /* If the structure is in a register or if the component
3982 is a bit field, we cannot use addressing to access it.
3983 Use bit-field techniques or SUBREG to store in it. */
3984
3985 if (mode == VOIDmode
3986 || (mode != BLKmode && ! direct_store[(int) mode])
3987 || GET_CODE (target) == REG
3988 || GET_CODE (target) == SUBREG
3989 /* If the field isn't aligned enough to store as an ordinary memref,
3990 store it as a bit field. */
3991 || (SLOW_UNALIGNED_ACCESS
3992 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3993 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3994 {
3995 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3996
3997 /* If BITSIZE is narrower than the size of the type of EXP
3998 we will be narrowing TEMP. Normally, what's wanted are the
3999 low-order bits. However, if EXP's type is a record and this is
4000 big-endian machine, we want the upper BITSIZE bits. */
4001 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4002 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4003 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4004 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4005 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4006 - bitsize),
4007 temp, 1);
4008
4009 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4010 MODE. */
4011 if (mode != VOIDmode && mode != BLKmode
4012 && mode != TYPE_MODE (TREE_TYPE (exp)))
4013 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4014
4015 /* If the modes of TARGET and TEMP are both BLKmode, both
4016 must be in memory and BITPOS must be aligned on a byte
4017 boundary. If so, we simply do a block copy. */
4018 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4019 {
4020 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4021 || bitpos % BITS_PER_UNIT != 0)
4022 abort ();
4023
4024 target = change_address (target, VOIDmode,
4025 plus_constant (XEXP (target, 0),
4026 bitpos / BITS_PER_UNIT));
4027
4028 emit_block_move (target, temp,
4029 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4030 / BITS_PER_UNIT),
4031 1);
4032
4033 return value_mode == VOIDmode ? const0_rtx : target;
4034 }
4035
4036 /* Store the value in the bitfield. */
4037 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4038 if (value_mode != VOIDmode)
4039 {
4040 /* The caller wants an rtx for the value. */
4041 /* If possible, avoid refetching from the bitfield itself. */
4042 if (width_mask != 0
4043 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4044 {
4045 tree count;
4046 enum machine_mode tmode;
4047
4048 if (unsignedp)
4049 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4050 tmode = GET_MODE (temp);
4051 if (tmode == VOIDmode)
4052 tmode = value_mode;
4053 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4054 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4055 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4056 }
4057 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4058 NULL_RTX, value_mode, 0, align,
4059 total_size);
4060 }
4061 return const0_rtx;
4062 }
4063 else
4064 {
4065 rtx addr = XEXP (target, 0);
4066 rtx to_rtx;
4067
4068 /* If a value is wanted, it must be the lhs;
4069 so make the address stable for multiple use. */
4070
4071 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4072 && ! CONSTANT_ADDRESS_P (addr)
4073 /* A frame-pointer reference is already stable. */
4074 && ! (GET_CODE (addr) == PLUS
4075 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4076 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4077 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4078 addr = copy_to_reg (addr);
4079
4080 /* Now build a reference to just the desired component. */
4081
4082 to_rtx = copy_rtx (change_address (target, mode,
4083 plus_constant (addr,
4084 (bitpos
4085 / BITS_PER_UNIT))));
4086 MEM_IN_STRUCT_P (to_rtx) = 1;
4087
4088 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4089 }
4090 }
4091 \f
4092 /* Return true if any object containing the innermost array is an unaligned
4093 packed structure field. */
4094
4095 static int
4096 get_inner_unaligned_p (exp)
4097 tree exp;
4098 {
4099 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4100
4101 while (1)
4102 {
4103 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4104 {
4105 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4106 < needed_alignment)
4107 return 1;
4108 }
4109 else if (TREE_CODE (exp) != ARRAY_REF
4110 && TREE_CODE (exp) != NON_LVALUE_EXPR
4111 && ! ((TREE_CODE (exp) == NOP_EXPR
4112 || TREE_CODE (exp) == CONVERT_EXPR)
4113 && (TYPE_MODE (TREE_TYPE (exp))
4114 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4115 break;
4116
4117 exp = TREE_OPERAND (exp, 0);
4118 }
4119
4120 return 0;
4121 }
4122
4123 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4124 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4125 ARRAY_REFs and find the ultimate containing object, which we return.
4126
4127 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4128 bit position, and *PUNSIGNEDP to the signedness of the field.
4129 If the position of the field is variable, we store a tree
4130 giving the variable offset (in units) in *POFFSET.
4131 This offset is in addition to the bit position.
4132 If the position is not variable, we store 0 in *POFFSET.
4133 We set *PALIGNMENT to the alignment in bytes of the address that will be
4134 computed. This is the alignment of the thing we return if *POFFSET
4135 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4136
4137 If any of the extraction expressions is volatile,
4138 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4139
4140 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4141 is a mode that can be used to access the field. In that case, *PBITSIZE
4142 is redundant.
4143
4144 If the field describes a variable-sized object, *PMODE is set to
4145 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4146 this case, but the address of the object can be found. */
4147
4148 tree
4149 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4150 punsignedp, pvolatilep, palignment)
4151 tree exp;
4152 int *pbitsize;
4153 int *pbitpos;
4154 tree *poffset;
4155 enum machine_mode *pmode;
4156 int *punsignedp;
4157 int *pvolatilep;
4158 int *palignment;
4159 {
4160 tree orig_exp = exp;
4161 tree size_tree = 0;
4162 enum machine_mode mode = VOIDmode;
4163 tree offset = integer_zero_node;
4164 int alignment = BIGGEST_ALIGNMENT;
4165
4166 if (TREE_CODE (exp) == COMPONENT_REF)
4167 {
4168 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4169 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4170 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4171 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4172 }
4173 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4174 {
4175 size_tree = TREE_OPERAND (exp, 1);
4176 *punsignedp = TREE_UNSIGNED (exp);
4177 }
4178 else
4179 {
4180 mode = TYPE_MODE (TREE_TYPE (exp));
4181 *pbitsize = GET_MODE_BITSIZE (mode);
4182 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4183 }
4184
4185 if (size_tree)
4186 {
4187 if (TREE_CODE (size_tree) != INTEGER_CST)
4188 mode = BLKmode, *pbitsize = -1;
4189 else
4190 *pbitsize = TREE_INT_CST_LOW (size_tree);
4191 }
4192
4193 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4194 and find the ultimate containing object. */
4195
4196 *pbitpos = 0;
4197
4198 while (1)
4199 {
4200 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4201 {
4202 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4203 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4204 : TREE_OPERAND (exp, 2));
4205 tree constant = integer_zero_node, var = pos;
4206
4207 /* If this field hasn't been filled in yet, don't go
4208 past it. This should only happen when folding expressions
4209 made during type construction. */
4210 if (pos == 0)
4211 break;
4212
4213 /* Assume here that the offset is a multiple of a unit.
4214 If not, there should be an explicitly added constant. */
4215 if (TREE_CODE (pos) == PLUS_EXPR
4216 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4217 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4218 else if (TREE_CODE (pos) == INTEGER_CST)
4219 constant = pos, var = integer_zero_node;
4220
4221 *pbitpos += TREE_INT_CST_LOW (constant);
4222 offset = size_binop (PLUS_EXPR, offset,
4223 size_binop (EXACT_DIV_EXPR, var,
4224 size_int (BITS_PER_UNIT)));
4225 }
4226
4227 else if (TREE_CODE (exp) == ARRAY_REF)
4228 {
4229 /* This code is based on the code in case ARRAY_REF in expand_expr
4230 below. We assume here that the size of an array element is
4231 always an integral multiple of BITS_PER_UNIT. */
4232
4233 tree index = TREE_OPERAND (exp, 1);
4234 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4235 tree low_bound
4236 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4237 tree index_type = TREE_TYPE (index);
4238
4239 if (! integer_zerop (low_bound))
4240 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4241
4242 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4243 {
4244 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4245 index);
4246 index_type = TREE_TYPE (index);
4247 }
4248
4249 index = fold (build (MULT_EXPR, index_type, index,
4250 convert (index_type,
4251 TYPE_SIZE (TREE_TYPE (exp)))));
4252
4253 if (TREE_CODE (index) == INTEGER_CST
4254 && TREE_INT_CST_HIGH (index) == 0)
4255 *pbitpos += TREE_INT_CST_LOW (index);
4256 else
4257 offset = size_binop (PLUS_EXPR, offset,
4258 size_binop (FLOOR_DIV_EXPR, index,
4259 size_int (BITS_PER_UNIT)));
4260 }
4261 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4262 && ! ((TREE_CODE (exp) == NOP_EXPR
4263 || TREE_CODE (exp) == CONVERT_EXPR)
4264 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4265 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4266 != UNION_TYPE))
4267 && (TYPE_MODE (TREE_TYPE (exp))
4268 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4269 break;
4270
4271 /* If any reference in the chain is volatile, the effect is volatile. */
4272 if (TREE_THIS_VOLATILE (exp))
4273 *pvolatilep = 1;
4274
4275 /* If the offset is non-constant already, then we can't assume any
4276 alignment more than the alignment here. */
4277 if (! integer_zerop (offset))
4278 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4279
4280 exp = TREE_OPERAND (exp, 0);
4281 }
4282
4283 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4284 alignment = MIN (alignment, DECL_ALIGN (exp));
4285 else if (TREE_TYPE (exp) != 0)
4286 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4287
4288 if (integer_zerop (offset))
4289 offset = 0;
4290
4291 if (offset != 0 && contains_placeholder_p (offset))
4292 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4293
4294 *pmode = mode;
4295 *poffset = offset;
4296 *palignment = alignment / BITS_PER_UNIT;
4297 return exp;
4298 }
4299 \f
4300 /* Given an rtx VALUE that may contain additions and multiplications,
4301 return an equivalent value that just refers to a register or memory.
4302 This is done by generating instructions to perform the arithmetic
4303 and returning a pseudo-register containing the value.
4304
4305 The returned value may be a REG, SUBREG, MEM or constant. */
4306
4307 rtx
4308 force_operand (value, target)
4309 rtx value, target;
4310 {
4311 register optab binoptab = 0;
4312 /* Use a temporary to force order of execution of calls to
4313 `force_operand'. */
4314 rtx tmp;
4315 register rtx op2;
4316 /* Use subtarget as the target for operand 0 of a binary operation. */
4317 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4318
4319 if (GET_CODE (value) == PLUS)
4320 binoptab = add_optab;
4321 else if (GET_CODE (value) == MINUS)
4322 binoptab = sub_optab;
4323 else if (GET_CODE (value) == MULT)
4324 {
4325 op2 = XEXP (value, 1);
4326 if (!CONSTANT_P (op2)
4327 && !(GET_CODE (op2) == REG && op2 != subtarget))
4328 subtarget = 0;
4329 tmp = force_operand (XEXP (value, 0), subtarget);
4330 return expand_mult (GET_MODE (value), tmp,
4331 force_operand (op2, NULL_RTX),
4332 target, 0);
4333 }
4334
4335 if (binoptab)
4336 {
4337 op2 = XEXP (value, 1);
4338 if (!CONSTANT_P (op2)
4339 && !(GET_CODE (op2) == REG && op2 != subtarget))
4340 subtarget = 0;
4341 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4342 {
4343 binoptab = add_optab;
4344 op2 = negate_rtx (GET_MODE (value), op2);
4345 }
4346
4347 /* Check for an addition with OP2 a constant integer and our first
4348 operand a PLUS of a virtual register and something else. In that
4349 case, we want to emit the sum of the virtual register and the
4350 constant first and then add the other value. This allows virtual
4351 register instantiation to simply modify the constant rather than
4352 creating another one around this addition. */
4353 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4354 && GET_CODE (XEXP (value, 0)) == PLUS
4355 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4356 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4357 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4358 {
4359 rtx temp = expand_binop (GET_MODE (value), binoptab,
4360 XEXP (XEXP (value, 0), 0), op2,
4361 subtarget, 0, OPTAB_LIB_WIDEN);
4362 return expand_binop (GET_MODE (value), binoptab, temp,
4363 force_operand (XEXP (XEXP (value, 0), 1), 0),
4364 target, 0, OPTAB_LIB_WIDEN);
4365 }
4366
4367 tmp = force_operand (XEXP (value, 0), subtarget);
4368 return expand_binop (GET_MODE (value), binoptab, tmp,
4369 force_operand (op2, NULL_RTX),
4370 target, 0, OPTAB_LIB_WIDEN);
4371 /* We give UNSIGNEDP = 0 to expand_binop
4372 because the only operations we are expanding here are signed ones. */
4373 }
4374 return value;
4375 }
4376 \f
4377 /* Subroutine of expand_expr:
4378 save the non-copied parts (LIST) of an expr (LHS), and return a list
4379 which can restore these values to their previous values,
4380 should something modify their storage. */
4381
4382 static tree
4383 save_noncopied_parts (lhs, list)
4384 tree lhs;
4385 tree list;
4386 {
4387 tree tail;
4388 tree parts = 0;
4389
4390 for (tail = list; tail; tail = TREE_CHAIN (tail))
4391 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4392 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4393 else
4394 {
4395 tree part = TREE_VALUE (tail);
4396 tree part_type = TREE_TYPE (part);
4397 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4398 rtx target = assign_temp (part_type, 0, 1, 1);
4399 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4400 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4401 parts = tree_cons (to_be_saved,
4402 build (RTL_EXPR, part_type, NULL_TREE,
4403 (tree) target),
4404 parts);
4405 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4406 }
4407 return parts;
4408 }
4409
4410 /* Subroutine of expand_expr:
4411 record the non-copied parts (LIST) of an expr (LHS), and return a list
4412 which specifies the initial values of these parts. */
4413
4414 static tree
4415 init_noncopied_parts (lhs, list)
4416 tree lhs;
4417 tree list;
4418 {
4419 tree tail;
4420 tree parts = 0;
4421
4422 for (tail = list; tail; tail = TREE_CHAIN (tail))
4423 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4424 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4425 else
4426 {
4427 tree part = TREE_VALUE (tail);
4428 tree part_type = TREE_TYPE (part);
4429 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4430 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4431 }
4432 return parts;
4433 }
4434
4435 /* Subroutine of expand_expr: return nonzero iff there is no way that
4436 EXP can reference X, which is being modified. */
4437
4438 static int
4439 safe_from_p (x, exp)
4440 rtx x;
4441 tree exp;
4442 {
4443 rtx exp_rtl = 0;
4444 int i, nops;
4445
4446 if (x == 0
4447 /* If EXP has varying size, we MUST use a target since we currently
4448 have no way of allocating temporaries of variable size
4449 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4450 So we assume here that something at a higher level has prevented a
4451 clash. This is somewhat bogus, but the best we can do. Only
4452 do this when X is BLKmode. */
4453 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4454 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4455 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4456 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4457 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4458 != INTEGER_CST)
4459 && GET_MODE (x) == BLKmode))
4460 return 1;
4461
4462 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4463 find the underlying pseudo. */
4464 if (GET_CODE (x) == SUBREG)
4465 {
4466 x = SUBREG_REG (x);
4467 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4468 return 0;
4469 }
4470
4471 /* If X is a location in the outgoing argument area, it is always safe. */
4472 if (GET_CODE (x) == MEM
4473 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4474 || (GET_CODE (XEXP (x, 0)) == PLUS
4475 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4476 return 1;
4477
4478 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4479 {
4480 case 'd':
4481 exp_rtl = DECL_RTL (exp);
4482 break;
4483
4484 case 'c':
4485 return 1;
4486
4487 case 'x':
4488 if (TREE_CODE (exp) == TREE_LIST)
4489 return ((TREE_VALUE (exp) == 0
4490 || safe_from_p (x, TREE_VALUE (exp)))
4491 && (TREE_CHAIN (exp) == 0
4492 || safe_from_p (x, TREE_CHAIN (exp))));
4493 else
4494 return 0;
4495
4496 case '1':
4497 return safe_from_p (x, TREE_OPERAND (exp, 0));
4498
4499 case '2':
4500 case '<':
4501 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4502 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4503
4504 case 'e':
4505 case 'r':
4506 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4507 the expression. If it is set, we conflict iff we are that rtx or
4508 both are in memory. Otherwise, we check all operands of the
4509 expression recursively. */
4510
4511 switch (TREE_CODE (exp))
4512 {
4513 case ADDR_EXPR:
4514 return (staticp (TREE_OPERAND (exp, 0))
4515 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4516
4517 case INDIRECT_REF:
4518 if (GET_CODE (x) == MEM)
4519 return 0;
4520 break;
4521
4522 case CALL_EXPR:
4523 exp_rtl = CALL_EXPR_RTL (exp);
4524 if (exp_rtl == 0)
4525 {
4526 /* Assume that the call will clobber all hard registers and
4527 all of memory. */
4528 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4529 || GET_CODE (x) == MEM)
4530 return 0;
4531 }
4532
4533 break;
4534
4535 case RTL_EXPR:
4536 /* If a sequence exists, we would have to scan every instruction
4537 in the sequence to see if it was safe. This is probably not
4538 worthwhile. */
4539 if (RTL_EXPR_SEQUENCE (exp))
4540 return 0;
4541
4542 exp_rtl = RTL_EXPR_RTL (exp);
4543 break;
4544
4545 case WITH_CLEANUP_EXPR:
4546 exp_rtl = RTL_EXPR_RTL (exp);
4547 break;
4548
4549 case CLEANUP_POINT_EXPR:
4550 return safe_from_p (x, TREE_OPERAND (exp, 0));
4551
4552 case SAVE_EXPR:
4553 exp_rtl = SAVE_EXPR_RTL (exp);
4554 break;
4555
4556 case BIND_EXPR:
4557 /* The only operand we look at is operand 1. The rest aren't
4558 part of the expression. */
4559 return safe_from_p (x, TREE_OPERAND (exp, 1));
4560
4561 case METHOD_CALL_EXPR:
4562 /* This takes a rtx argument, but shouldn't appear here. */
4563 abort ();
4564 }
4565
4566 /* If we have an rtx, we do not need to scan our operands. */
4567 if (exp_rtl)
4568 break;
4569
4570 nops = tree_code_length[(int) TREE_CODE (exp)];
4571 for (i = 0; i < nops; i++)
4572 if (TREE_OPERAND (exp, i) != 0
4573 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4574 return 0;
4575 }
4576
4577 /* If we have an rtl, find any enclosed object. Then see if we conflict
4578 with it. */
4579 if (exp_rtl)
4580 {
4581 if (GET_CODE (exp_rtl) == SUBREG)
4582 {
4583 exp_rtl = SUBREG_REG (exp_rtl);
4584 if (GET_CODE (exp_rtl) == REG
4585 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4586 return 0;
4587 }
4588
4589 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4590 are memory and EXP is not readonly. */
4591 return ! (rtx_equal_p (x, exp_rtl)
4592 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4593 && ! TREE_READONLY (exp)));
4594 }
4595
4596 /* If we reach here, it is safe. */
4597 return 1;
4598 }
4599
4600 /* Subroutine of expand_expr: return nonzero iff EXP is an
4601 expression whose type is statically determinable. */
4602
4603 static int
4604 fixed_type_p (exp)
4605 tree exp;
4606 {
4607 if (TREE_CODE (exp) == PARM_DECL
4608 || TREE_CODE (exp) == VAR_DECL
4609 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4610 || TREE_CODE (exp) == COMPONENT_REF
4611 || TREE_CODE (exp) == ARRAY_REF)
4612 return 1;
4613 return 0;
4614 }
4615
4616 /* Subroutine of expand_expr: return rtx if EXP is a
4617 variable or parameter; else return 0. */
4618
4619 static rtx
4620 var_rtx (exp)
4621 tree exp;
4622 {
4623 STRIP_NOPS (exp);
4624 switch (TREE_CODE (exp))
4625 {
4626 case PARM_DECL:
4627 case VAR_DECL:
4628 return DECL_RTL (exp);
4629 default:
4630 return 0;
4631 }
4632 }
4633 \f
4634 /* expand_expr: generate code for computing expression EXP.
4635 An rtx for the computed value is returned. The value is never null.
4636 In the case of a void EXP, const0_rtx is returned.
4637
4638 The value may be stored in TARGET if TARGET is nonzero.
4639 TARGET is just a suggestion; callers must assume that
4640 the rtx returned may not be the same as TARGET.
4641
4642 If TARGET is CONST0_RTX, it means that the value will be ignored.
4643
4644 If TMODE is not VOIDmode, it suggests generating the
4645 result in mode TMODE. But this is done only when convenient.
4646 Otherwise, TMODE is ignored and the value generated in its natural mode.
4647 TMODE is just a suggestion; callers must assume that
4648 the rtx returned may not have mode TMODE.
4649
4650 Note that TARGET may have neither TMODE nor MODE. In that case, it
4651 probably will not be used.
4652
4653 If MODIFIER is EXPAND_SUM then when EXP is an addition
4654 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4655 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4656 products as above, or REG or MEM, or constant.
4657 Ordinarily in such cases we would output mul or add instructions
4658 and then return a pseudo reg containing the sum.
4659
4660 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4661 it also marks a label as absolutely required (it can't be dead).
4662 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4663 This is used for outputting expressions used in initializers.
4664
4665 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4666 with a constant address even if that address is not normally legitimate.
4667 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4668
4669 rtx
4670 expand_expr (exp, target, tmode, modifier)
4671 register tree exp;
4672 rtx target;
4673 enum machine_mode tmode;
4674 enum expand_modifier modifier;
4675 {
4676 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4677 This is static so it will be accessible to our recursive callees. */
4678 static tree placeholder_list = 0;
4679 register rtx op0, op1, temp;
4680 tree type = TREE_TYPE (exp);
4681 int unsignedp = TREE_UNSIGNED (type);
4682 register enum machine_mode mode = TYPE_MODE (type);
4683 register enum tree_code code = TREE_CODE (exp);
4684 optab this_optab;
4685 /* Use subtarget as the target for operand 0 of a binary operation. */
4686 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4687 rtx original_target = target;
4688 /* Maybe defer this until sure not doing bytecode? */
4689 int ignore = (target == const0_rtx
4690 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4691 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4692 || code == COND_EXPR)
4693 && TREE_CODE (type) == VOID_TYPE));
4694 tree context;
4695
4696
4697 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4698 {
4699 bc_expand_expr (exp);
4700 return NULL;
4701 }
4702
4703 /* Don't use hard regs as subtargets, because the combiner
4704 can only handle pseudo regs. */
4705 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4706 subtarget = 0;
4707 /* Avoid subtargets inside loops,
4708 since they hide some invariant expressions. */
4709 if (preserve_subexpressions_p ())
4710 subtarget = 0;
4711
4712 /* If we are going to ignore this result, we need only do something
4713 if there is a side-effect somewhere in the expression. If there
4714 is, short-circuit the most common cases here. Note that we must
4715 not call expand_expr with anything but const0_rtx in case this
4716 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4717
4718 if (ignore)
4719 {
4720 if (! TREE_SIDE_EFFECTS (exp))
4721 return const0_rtx;
4722
4723 /* Ensure we reference a volatile object even if value is ignored. */
4724 if (TREE_THIS_VOLATILE (exp)
4725 && TREE_CODE (exp) != FUNCTION_DECL
4726 && mode != VOIDmode && mode != BLKmode)
4727 {
4728 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4729 if (GET_CODE (temp) == MEM)
4730 temp = copy_to_reg (temp);
4731 return const0_rtx;
4732 }
4733
4734 if (TREE_CODE_CLASS (code) == '1')
4735 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4736 VOIDmode, modifier);
4737 else if (TREE_CODE_CLASS (code) == '2'
4738 || TREE_CODE_CLASS (code) == '<')
4739 {
4740 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4741 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4742 return const0_rtx;
4743 }
4744 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4745 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4746 /* If the second operand has no side effects, just evaluate
4747 the first. */
4748 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4749 VOIDmode, modifier);
4750
4751 target = 0;
4752 }
4753
4754 /* If will do cse, generate all results into pseudo registers
4755 since 1) that allows cse to find more things
4756 and 2) otherwise cse could produce an insn the machine
4757 cannot support. */
4758
4759 if (! cse_not_expected && mode != BLKmode && target
4760 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4761 target = subtarget;
4762
4763 switch (code)
4764 {
4765 case LABEL_DECL:
4766 {
4767 tree function = decl_function_context (exp);
4768 /* Handle using a label in a containing function. */
4769 if (function != current_function_decl && function != 0)
4770 {
4771 struct function *p = find_function_data (function);
4772 /* Allocate in the memory associated with the function
4773 that the label is in. */
4774 push_obstacks (p->function_obstack,
4775 p->function_maybepermanent_obstack);
4776
4777 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4778 label_rtx (exp), p->forced_labels);
4779 pop_obstacks ();
4780 }
4781 else if (modifier == EXPAND_INITIALIZER)
4782 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4783 label_rtx (exp), forced_labels);
4784 temp = gen_rtx (MEM, FUNCTION_MODE,
4785 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4786 if (function != current_function_decl && function != 0)
4787 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4788 return temp;
4789 }
4790
4791 case PARM_DECL:
4792 if (DECL_RTL (exp) == 0)
4793 {
4794 error_with_decl (exp, "prior parameter's size depends on `%s'");
4795 return CONST0_RTX (mode);
4796 }
4797
4798 /* ... fall through ... */
4799
4800 case VAR_DECL:
4801 /* If a static var's type was incomplete when the decl was written,
4802 but the type is complete now, lay out the decl now. */
4803 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4804 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4805 {
4806 push_obstacks_nochange ();
4807 end_temporary_allocation ();
4808 layout_decl (exp, 0);
4809 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4810 pop_obstacks ();
4811 }
4812
4813 /* ... fall through ... */
4814
4815 case FUNCTION_DECL:
4816 case RESULT_DECL:
4817 if (DECL_RTL (exp) == 0)
4818 abort ();
4819
4820 /* Ensure variable marked as used even if it doesn't go through
4821 a parser. If it hasn't be used yet, write out an external
4822 definition. */
4823 if (! TREE_USED (exp))
4824 {
4825 assemble_external (exp);
4826 TREE_USED (exp) = 1;
4827 }
4828
4829 /* Show we haven't gotten RTL for this yet. */
4830 temp = 0;
4831
4832 /* Handle variables inherited from containing functions. */
4833 context = decl_function_context (exp);
4834
4835 /* We treat inline_function_decl as an alias for the current function
4836 because that is the inline function whose vars, types, etc.
4837 are being merged into the current function.
4838 See expand_inline_function. */
4839
4840 if (context != 0 && context != current_function_decl
4841 && context != inline_function_decl
4842 /* If var is static, we don't need a static chain to access it. */
4843 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4844 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4845 {
4846 rtx addr;
4847
4848 /* Mark as non-local and addressable. */
4849 DECL_NONLOCAL (exp) = 1;
4850 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4851 abort ();
4852 mark_addressable (exp);
4853 if (GET_CODE (DECL_RTL (exp)) != MEM)
4854 abort ();
4855 addr = XEXP (DECL_RTL (exp), 0);
4856 if (GET_CODE (addr) == MEM)
4857 addr = gen_rtx (MEM, Pmode,
4858 fix_lexical_addr (XEXP (addr, 0), exp));
4859 else
4860 addr = fix_lexical_addr (addr, exp);
4861 temp = change_address (DECL_RTL (exp), mode, addr);
4862 }
4863
4864 /* This is the case of an array whose size is to be determined
4865 from its initializer, while the initializer is still being parsed.
4866 See expand_decl. */
4867
4868 else if (GET_CODE (DECL_RTL (exp)) == MEM
4869 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4870 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4871 XEXP (DECL_RTL (exp), 0));
4872
4873 /* If DECL_RTL is memory, we are in the normal case and either
4874 the address is not valid or it is not a register and -fforce-addr
4875 is specified, get the address into a register. */
4876
4877 else if (GET_CODE (DECL_RTL (exp)) == MEM
4878 && modifier != EXPAND_CONST_ADDRESS
4879 && modifier != EXPAND_SUM
4880 && modifier != EXPAND_INITIALIZER
4881 && (! memory_address_p (DECL_MODE (exp),
4882 XEXP (DECL_RTL (exp), 0))
4883 || (flag_force_addr
4884 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4885 temp = change_address (DECL_RTL (exp), VOIDmode,
4886 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4887
4888 /* If we got something, return it. But first, set the alignment
4889 the address is a register. */
4890 if (temp != 0)
4891 {
4892 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4893 mark_reg_pointer (XEXP (temp, 0),
4894 DECL_ALIGN (exp) / BITS_PER_UNIT);
4895
4896 return temp;
4897 }
4898
4899 /* If the mode of DECL_RTL does not match that of the decl, it
4900 must be a promoted value. We return a SUBREG of the wanted mode,
4901 but mark it so that we know that it was already extended. */
4902
4903 if (GET_CODE (DECL_RTL (exp)) == REG
4904 && GET_MODE (DECL_RTL (exp)) != mode)
4905 {
4906 /* Get the signedness used for this variable. Ensure we get the
4907 same mode we got when the variable was declared. */
4908 if (GET_MODE (DECL_RTL (exp))
4909 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4910 abort ();
4911
4912 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4913 SUBREG_PROMOTED_VAR_P (temp) = 1;
4914 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4915 return temp;
4916 }
4917
4918 return DECL_RTL (exp);
4919
4920 case INTEGER_CST:
4921 return immed_double_const (TREE_INT_CST_LOW (exp),
4922 TREE_INT_CST_HIGH (exp),
4923 mode);
4924
4925 case CONST_DECL:
4926 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4927
4928 case REAL_CST:
4929 /* If optimized, generate immediate CONST_DOUBLE
4930 which will be turned into memory by reload if necessary.
4931
4932 We used to force a register so that loop.c could see it. But
4933 this does not allow gen_* patterns to perform optimizations with
4934 the constants. It also produces two insns in cases like "x = 1.0;".
4935 On most machines, floating-point constants are not permitted in
4936 many insns, so we'd end up copying it to a register in any case.
4937
4938 Now, we do the copying in expand_binop, if appropriate. */
4939 return immed_real_const (exp);
4940
4941 case COMPLEX_CST:
4942 case STRING_CST:
4943 if (! TREE_CST_RTL (exp))
4944 output_constant_def (exp);
4945
4946 /* TREE_CST_RTL probably contains a constant address.
4947 On RISC machines where a constant address isn't valid,
4948 make some insns to get that address into a register. */
4949 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4950 && modifier != EXPAND_CONST_ADDRESS
4951 && modifier != EXPAND_INITIALIZER
4952 && modifier != EXPAND_SUM
4953 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4954 || (flag_force_addr
4955 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4956 return change_address (TREE_CST_RTL (exp), VOIDmode,
4957 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4958 return TREE_CST_RTL (exp);
4959
4960 case SAVE_EXPR:
4961 context = decl_function_context (exp);
4962
4963 /* We treat inline_function_decl as an alias for the current function
4964 because that is the inline function whose vars, types, etc.
4965 are being merged into the current function.
4966 See expand_inline_function. */
4967 if (context == current_function_decl || context == inline_function_decl)
4968 context = 0;
4969
4970 /* If this is non-local, handle it. */
4971 if (context)
4972 {
4973 temp = SAVE_EXPR_RTL (exp);
4974 if (temp && GET_CODE (temp) == REG)
4975 {
4976 put_var_into_stack (exp);
4977 temp = SAVE_EXPR_RTL (exp);
4978 }
4979 if (temp == 0 || GET_CODE (temp) != MEM)
4980 abort ();
4981 return change_address (temp, mode,
4982 fix_lexical_addr (XEXP (temp, 0), exp));
4983 }
4984 if (SAVE_EXPR_RTL (exp) == 0)
4985 {
4986 if (mode == VOIDmode)
4987 temp = const0_rtx;
4988 else
4989 temp = assign_temp (type, 0, 0, 0);
4990
4991 SAVE_EXPR_RTL (exp) = temp;
4992 if (!optimize && GET_CODE (temp) == REG)
4993 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4994 save_expr_regs);
4995
4996 /* If the mode of TEMP does not match that of the expression, it
4997 must be a promoted value. We pass store_expr a SUBREG of the
4998 wanted mode but mark it so that we know that it was already
4999 extended. Note that `unsignedp' was modified above in
5000 this case. */
5001
5002 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5003 {
5004 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5005 SUBREG_PROMOTED_VAR_P (temp) = 1;
5006 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5007 }
5008
5009 if (temp == const0_rtx)
5010 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5011 else
5012 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5013 }
5014
5015 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5016 must be a promoted value. We return a SUBREG of the wanted mode,
5017 but mark it so that we know that it was already extended. */
5018
5019 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5020 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5021 {
5022 /* Compute the signedness and make the proper SUBREG. */
5023 promote_mode (type, mode, &unsignedp, 0);
5024 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5025 SUBREG_PROMOTED_VAR_P (temp) = 1;
5026 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5027 return temp;
5028 }
5029
5030 return SAVE_EXPR_RTL (exp);
5031
5032 case UNSAVE_EXPR:
5033 {
5034 rtx temp;
5035 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5036 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5037 return temp;
5038 }
5039
5040 case PLACEHOLDER_EXPR:
5041 /* If there is an object on the head of the placeholder list,
5042 see if some object in it's references is of type TYPE. For
5043 further information, see tree.def. */
5044 if (placeholder_list)
5045 {
5046 tree need_type = TYPE_MAIN_VARIANT (type);
5047 tree object = 0;
5048 tree old_list = placeholder_list;
5049 tree elt;
5050
5051 /* See if the object is the type that we want and does not contain
5052 this PLACEHOLDER_EXPR itself. Then see if the operand of any
5053 reference is the type we want. */
5054 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5055 == need_type)
5056 && (! contains_this_placeholder_p
5057 (TREE_PURPOSE (placeholder_list), exp)))
5058 object = TREE_PURPOSE (placeholder_list);
5059
5060 else
5061 /* Find the outermost reference that is of the type we want and
5062 that does not itself contain this PLACEHOLDER_EXPR. */
5063 for (elt = TREE_PURPOSE (placeholder_list);
5064 elt != 0
5065 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5066 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5067 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5068 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5069 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5070 || TREE_CODE (elt) == COND_EXPR)
5071 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5072 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5073 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5074 == need_type)
5075 && ! contains_this_placeholder_p (TREE_OPERAND (elt, 0),
5076 exp))
5077 {
5078 object = TREE_OPERAND (elt, 0);
5079 break;
5080 }
5081
5082 if (object != 0)
5083 {
5084 /* Expand this object skipping the list entries before
5085 it was found in case it is also a PLACEHOLDER_EXPR.
5086 In that case, we want to translate it using subsequent
5087 entries. */
5088 placeholder_list = TREE_CHAIN (placeholder_list);
5089 temp = expand_expr (object, original_target, tmode, modifier);
5090 placeholder_list = old_list;
5091 return temp;
5092 }
5093 }
5094
5095 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5096 abort ();
5097
5098 case WITH_RECORD_EXPR:
5099 /* Put the object on the placeholder list, expand our first operand,
5100 and pop the list. */
5101 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5102 placeholder_list);
5103 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5104 tmode, modifier);
5105 placeholder_list = TREE_CHAIN (placeholder_list);
5106 return target;
5107
5108 case EXIT_EXPR:
5109 expand_exit_loop_if_false (NULL_PTR,
5110 invert_truthvalue (TREE_OPERAND (exp, 0)));
5111 return const0_rtx;
5112
5113 case LOOP_EXPR:
5114 push_temp_slots ();
5115 expand_start_loop (1);
5116 expand_expr_stmt (TREE_OPERAND (exp, 0));
5117 expand_end_loop ();
5118 pop_temp_slots ();
5119
5120 return const0_rtx;
5121
5122 case BIND_EXPR:
5123 {
5124 tree vars = TREE_OPERAND (exp, 0);
5125 int vars_need_expansion = 0;
5126
5127 /* Need to open a binding contour here because
5128 if there are any cleanups they must be contained here. */
5129 expand_start_bindings (0);
5130
5131 /* Mark the corresponding BLOCK for output in its proper place. */
5132 if (TREE_OPERAND (exp, 2) != 0
5133 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5134 insert_block (TREE_OPERAND (exp, 2));
5135
5136 /* If VARS have not yet been expanded, expand them now. */
5137 while (vars)
5138 {
5139 if (DECL_RTL (vars) == 0)
5140 {
5141 vars_need_expansion = 1;
5142 expand_decl (vars);
5143 }
5144 expand_decl_init (vars);
5145 vars = TREE_CHAIN (vars);
5146 }
5147
5148 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5149
5150 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5151
5152 return temp;
5153 }
5154
5155 case RTL_EXPR:
5156 if (RTL_EXPR_SEQUENCE (exp))
5157 {
5158 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5159 abort ();
5160 emit_insns (RTL_EXPR_SEQUENCE (exp));
5161 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5162 }
5163 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5164 free_temps_for_rtl_expr (exp);
5165 return RTL_EXPR_RTL (exp);
5166
5167 case CONSTRUCTOR:
5168 /* If we don't need the result, just ensure we evaluate any
5169 subexpressions. */
5170 if (ignore)
5171 {
5172 tree elt;
5173 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5174 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5175 return const0_rtx;
5176 }
5177
5178 /* All elts simple constants => refer to a constant in memory. But
5179 if this is a non-BLKmode mode, let it store a field at a time
5180 since that should make a CONST_INT or CONST_DOUBLE when we
5181 fold. Likewise, if we have a target we can use, it is best to
5182 store directly into the target unless the type is large enough
5183 that memcpy will be used. If we are making an initializer and
5184 all operands are constant, put it in memory as well. */
5185 else if ((TREE_STATIC (exp)
5186 && ((mode == BLKmode
5187 && ! (target != 0 && safe_from_p (target, exp)))
5188 || TREE_ADDRESSABLE (exp)
5189 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5190 && (move_by_pieces_ninsns
5191 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5192 TYPE_ALIGN (type) / BITS_PER_UNIT)
5193 > MOVE_RATIO)
5194 && ! mostly_zeros_p (exp))))
5195 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5196 {
5197 rtx constructor = output_constant_def (exp);
5198 if (modifier != EXPAND_CONST_ADDRESS
5199 && modifier != EXPAND_INITIALIZER
5200 && modifier != EXPAND_SUM
5201 && (! memory_address_p (GET_MODE (constructor),
5202 XEXP (constructor, 0))
5203 || (flag_force_addr
5204 && GET_CODE (XEXP (constructor, 0)) != REG)))
5205 constructor = change_address (constructor, VOIDmode,
5206 XEXP (constructor, 0));
5207 return constructor;
5208 }
5209
5210 else
5211 {
5212 /* Handle calls that pass values in multiple non-contiguous
5213 locations. The Irix 6 ABI has examples of this. */
5214 if (target == 0 || ! safe_from_p (target, exp)
5215 || GET_CODE (target) == PARALLEL)
5216 {
5217 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5218 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5219 else
5220 target = assign_temp (type, 0, 1, 1);
5221 }
5222
5223 if (TREE_READONLY (exp))
5224 {
5225 if (GET_CODE (target) == MEM)
5226 target = copy_rtx (target);
5227
5228 RTX_UNCHANGING_P (target) = 1;
5229 }
5230
5231 store_constructor (exp, target, 0);
5232 return target;
5233 }
5234
5235 case INDIRECT_REF:
5236 {
5237 tree exp1 = TREE_OPERAND (exp, 0);
5238 tree exp2;
5239
5240 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5241 op0 = memory_address (mode, op0);
5242
5243 temp = gen_rtx (MEM, mode, op0);
5244 /* If address was computed by addition,
5245 mark this as an element of an aggregate. */
5246 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5247 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5248 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5249 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5250 || (TREE_CODE (exp1) == ADDR_EXPR
5251 && (exp2 = TREE_OPERAND (exp1, 0))
5252 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5253 MEM_IN_STRUCT_P (temp) = 1;
5254 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5255
5256 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5257 here, because, in C and C++, the fact that a location is accessed
5258 through a pointer to const does not mean that the value there can
5259 never change. Languages where it can never change should
5260 also set TREE_STATIC. */
5261 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5262 return temp;
5263 }
5264
5265 case ARRAY_REF:
5266 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5267 abort ();
5268
5269 {
5270 tree array = TREE_OPERAND (exp, 0);
5271 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5272 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5273 tree index = TREE_OPERAND (exp, 1);
5274 tree index_type = TREE_TYPE (index);
5275 int i;
5276
5277 if (TREE_CODE (low_bound) != INTEGER_CST
5278 && contains_placeholder_p (low_bound))
5279 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5280
5281 /* Optimize the special-case of a zero lower bound.
5282
5283 We convert the low_bound to sizetype to avoid some problems
5284 with constant folding. (E.g. suppose the lower bound is 1,
5285 and its mode is QI. Without the conversion, (ARRAY
5286 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5287 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5288
5289 But sizetype isn't quite right either (especially if
5290 the lowbound is negative). FIXME */
5291
5292 if (! integer_zerop (low_bound))
5293 index = fold (build (MINUS_EXPR, index_type, index,
5294 convert (sizetype, low_bound)));
5295
5296 if ((TREE_CODE (index) != INTEGER_CST
5297 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5298 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5299 {
5300 /* Nonconstant array index or nonconstant element size, and
5301 not an array in an unaligned (packed) structure field.
5302 Generate the tree for *(&array+index) and expand that,
5303 except do it in a language-independent way
5304 and don't complain about non-lvalue arrays.
5305 `mark_addressable' should already have been called
5306 for any array for which this case will be reached. */
5307
5308 /* Don't forget the const or volatile flag from the array
5309 element. */
5310 tree variant_type = build_type_variant (type,
5311 TREE_READONLY (exp),
5312 TREE_THIS_VOLATILE (exp));
5313 tree array_adr = build1 (ADDR_EXPR,
5314 build_pointer_type (variant_type), array);
5315 tree elt;
5316 tree size = size_in_bytes (type);
5317
5318 /* Convert the integer argument to a type the same size as sizetype
5319 so the multiply won't overflow spuriously. */
5320 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5321 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5322 index);
5323
5324 if (TREE_CODE (size) != INTEGER_CST
5325 && contains_placeholder_p (size))
5326 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5327
5328 /* Don't think the address has side effects
5329 just because the array does.
5330 (In some cases the address might have side effects,
5331 and we fail to record that fact here. However, it should not
5332 matter, since expand_expr should not care.) */
5333 TREE_SIDE_EFFECTS (array_adr) = 0;
5334
5335 elt
5336 = build1
5337 (INDIRECT_REF, type,
5338 fold (build (PLUS_EXPR,
5339 TYPE_POINTER_TO (variant_type),
5340 array_adr,
5341 fold
5342 (build1
5343 (NOP_EXPR,
5344 TYPE_POINTER_TO (variant_type),
5345 fold (build (MULT_EXPR, TREE_TYPE (index),
5346 index,
5347 convert (TREE_TYPE (index),
5348 size))))))));;
5349
5350 /* Volatility, etc., of new expression is same as old
5351 expression. */
5352 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5353 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5354 TREE_READONLY (elt) = TREE_READONLY (exp);
5355
5356 return expand_expr (elt, target, tmode, modifier);
5357 }
5358
5359 /* Fold an expression like: "foo"[2].
5360 This is not done in fold so it won't happen inside &.
5361 Don't fold if this is for wide characters since it's too
5362 difficult to do correctly and this is a very rare case. */
5363
5364 if (TREE_CODE (array) == STRING_CST
5365 && TREE_CODE (index) == INTEGER_CST
5366 && !TREE_INT_CST_HIGH (index)
5367 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5368 && GET_MODE_CLASS (mode) == MODE_INT
5369 && GET_MODE_SIZE (mode) == 1)
5370 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5371
5372 /* If this is a constant index into a constant array,
5373 just get the value from the array. Handle both the cases when
5374 we have an explicit constructor and when our operand is a variable
5375 that was declared const. */
5376
5377 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5378 {
5379 if (TREE_CODE (index) == INTEGER_CST
5380 && TREE_INT_CST_HIGH (index) == 0)
5381 {
5382 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5383
5384 i = TREE_INT_CST_LOW (index);
5385 while (elem && i--)
5386 elem = TREE_CHAIN (elem);
5387 if (elem)
5388 return expand_expr (fold (TREE_VALUE (elem)), target,
5389 tmode, modifier);
5390 }
5391 }
5392
5393 else if (optimize >= 1
5394 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5395 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5396 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5397 {
5398 if (TREE_CODE (index) == INTEGER_CST
5399 && TREE_INT_CST_HIGH (index) == 0)
5400 {
5401 tree init = DECL_INITIAL (array);
5402
5403 i = TREE_INT_CST_LOW (index);
5404 if (TREE_CODE (init) == CONSTRUCTOR)
5405 {
5406 tree elem = CONSTRUCTOR_ELTS (init);
5407
5408 while (elem
5409 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5410 elem = TREE_CHAIN (elem);
5411 if (elem)
5412 return expand_expr (fold (TREE_VALUE (elem)), target,
5413 tmode, modifier);
5414 }
5415 else if (TREE_CODE (init) == STRING_CST
5416 && i < TREE_STRING_LENGTH (init))
5417 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5418 }
5419 }
5420 }
5421
5422 /* Treat array-ref with constant index as a component-ref. */
5423
5424 case COMPONENT_REF:
5425 case BIT_FIELD_REF:
5426 /* If the operand is a CONSTRUCTOR, we can just extract the
5427 appropriate field if it is present. Don't do this if we have
5428 already written the data since we want to refer to that copy
5429 and varasm.c assumes that's what we'll do. */
5430 if (code != ARRAY_REF
5431 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5432 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5433 {
5434 tree elt;
5435
5436 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5437 elt = TREE_CHAIN (elt))
5438 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5439 /* We can normally use the value of the field in the
5440 CONSTRUCTOR. However, if this is a bitfield in
5441 an integral mode that we can fit in a HOST_WIDE_INT,
5442 we must mask only the number of bits in the bitfield,
5443 since this is done implicitly by the constructor. If
5444 the bitfield does not meet either of those conditions,
5445 we can't do this optimization. */
5446 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5447 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5448 == MODE_INT)
5449 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5450 <= HOST_BITS_PER_WIDE_INT))))
5451 {
5452 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5453 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5454 {
5455 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5456 enum machine_mode imode
5457 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5458
5459 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5460 {
5461 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5462 op0 = expand_and (op0, op1, target);
5463 }
5464 else
5465 {
5466 tree count
5467 = build_int_2 (imode - bitsize, 0);
5468
5469 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5470 target, 0);
5471 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5472 target, 0);
5473 }
5474 }
5475
5476 return op0;
5477 }
5478 }
5479
5480 {
5481 enum machine_mode mode1;
5482 int bitsize;
5483 int bitpos;
5484 tree offset;
5485 int volatilep = 0;
5486 int alignment;
5487 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5488 &mode1, &unsignedp, &volatilep,
5489 &alignment);
5490
5491 /* If we got back the original object, something is wrong. Perhaps
5492 we are evaluating an expression too early. In any event, don't
5493 infinitely recurse. */
5494 if (tem == exp)
5495 abort ();
5496
5497 /* If TEM's type is a union of variable size, pass TARGET to the inner
5498 computation, since it will need a temporary and TARGET is known
5499 to have to do. This occurs in unchecked conversion in Ada. */
5500
5501 op0 = expand_expr (tem,
5502 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5503 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5504 != INTEGER_CST)
5505 ? target : NULL_RTX),
5506 VOIDmode,
5507 modifier == EXPAND_INITIALIZER ? modifier : 0);
5508
5509 /* If this is a constant, put it into a register if it is a
5510 legitimate constant and memory if it isn't. */
5511 if (CONSTANT_P (op0))
5512 {
5513 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5514 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5515 op0 = force_reg (mode, op0);
5516 else
5517 op0 = validize_mem (force_const_mem (mode, op0));
5518 }
5519
5520 if (offset != 0)
5521 {
5522 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5523
5524 if (GET_CODE (op0) != MEM)
5525 abort ();
5526 op0 = change_address (op0, VOIDmode,
5527 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5528 force_reg (ptr_mode, offset_rtx)));
5529 }
5530
5531 /* Don't forget about volatility even if this is a bitfield. */
5532 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5533 {
5534 op0 = copy_rtx (op0);
5535 MEM_VOLATILE_P (op0) = 1;
5536 }
5537
5538 /* In cases where an aligned union has an unaligned object
5539 as a field, we might be extracting a BLKmode value from
5540 an integer-mode (e.g., SImode) object. Handle this case
5541 by doing the extract into an object as wide as the field
5542 (which we know to be the width of a basic mode), then
5543 storing into memory, and changing the mode to BLKmode.
5544 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5545 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5546 if (mode1 == VOIDmode
5547 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5548 || (modifier != EXPAND_CONST_ADDRESS
5549 && modifier != EXPAND_INITIALIZER
5550 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5551 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5552 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5553 /* If the field isn't aligned enough to fetch as a memref,
5554 fetch it as a bit field. */
5555 || (SLOW_UNALIGNED_ACCESS
5556 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5557 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5558 {
5559 enum machine_mode ext_mode = mode;
5560
5561 if (ext_mode == BLKmode)
5562 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5563
5564 if (ext_mode == BLKmode)
5565 {
5566 /* In this case, BITPOS must start at a byte boundary and
5567 TARGET, if specified, must be a MEM. */
5568 if (GET_CODE (op0) != MEM
5569 || (target != 0 && GET_CODE (target) != MEM)
5570 || bitpos % BITS_PER_UNIT != 0)
5571 abort ();
5572
5573 op0 = change_address (op0, VOIDmode,
5574 plus_constant (XEXP (op0, 0),
5575 bitpos / BITS_PER_UNIT));
5576 if (target == 0)
5577 target = assign_temp (type, 0, 1, 1);
5578
5579 emit_block_move (target, op0,
5580 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5581 / BITS_PER_UNIT),
5582 1);
5583
5584 return target;
5585 }
5586
5587 op0 = validize_mem (op0);
5588
5589 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5590 mark_reg_pointer (XEXP (op0, 0), alignment);
5591
5592 op0 = extract_bit_field (op0, bitsize, bitpos,
5593 unsignedp, target, ext_mode, ext_mode,
5594 alignment,
5595 int_size_in_bytes (TREE_TYPE (tem)));
5596
5597 /* If the result is a record type and BITSIZE is narrower than
5598 the mode of OP0, an integral mode, and this is a big endian
5599 machine, we must put the field into the high-order bits. */
5600 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5601 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5602 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5603 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5604 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5605 - bitsize),
5606 op0, 1);
5607
5608 if (mode == BLKmode)
5609 {
5610 rtx new = assign_stack_temp (ext_mode,
5611 bitsize / BITS_PER_UNIT, 0);
5612
5613 emit_move_insn (new, op0);
5614 op0 = copy_rtx (new);
5615 PUT_MODE (op0, BLKmode);
5616 MEM_IN_STRUCT_P (op0) = 1;
5617 }
5618
5619 return op0;
5620 }
5621
5622 /* If the result is BLKmode, use that to access the object
5623 now as well. */
5624 if (mode == BLKmode)
5625 mode1 = BLKmode;
5626
5627 /* Get a reference to just this component. */
5628 if (modifier == EXPAND_CONST_ADDRESS
5629 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5630 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5631 (bitpos / BITS_PER_UNIT)));
5632 else
5633 op0 = change_address (op0, mode1,
5634 plus_constant (XEXP (op0, 0),
5635 (bitpos / BITS_PER_UNIT)));
5636 if (GET_CODE (XEXP (op0, 0)) == REG)
5637 mark_reg_pointer (XEXP (op0, 0), alignment);
5638
5639 MEM_IN_STRUCT_P (op0) = 1;
5640 MEM_VOLATILE_P (op0) |= volatilep;
5641 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5642 || modifier == EXPAND_CONST_ADDRESS
5643 || modifier == EXPAND_INITIALIZER)
5644 return op0;
5645 else if (target == 0)
5646 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5647
5648 convert_move (target, op0, unsignedp);
5649 return target;
5650 }
5651
5652 /* Intended for a reference to a buffer of a file-object in Pascal.
5653 But it's not certain that a special tree code will really be
5654 necessary for these. INDIRECT_REF might work for them. */
5655 case BUFFER_REF:
5656 abort ();
5657
5658 case IN_EXPR:
5659 {
5660 /* Pascal set IN expression.
5661
5662 Algorithm:
5663 rlo = set_low - (set_low%bits_per_word);
5664 the_word = set [ (index - rlo)/bits_per_word ];
5665 bit_index = index % bits_per_word;
5666 bitmask = 1 << bit_index;
5667 return !!(the_word & bitmask); */
5668
5669 tree set = TREE_OPERAND (exp, 0);
5670 tree index = TREE_OPERAND (exp, 1);
5671 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5672 tree set_type = TREE_TYPE (set);
5673 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5674 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5675 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5676 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5677 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5678 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5679 rtx setaddr = XEXP (setval, 0);
5680 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5681 rtx rlow;
5682 rtx diff, quo, rem, addr, bit, result;
5683
5684 preexpand_calls (exp);
5685
5686 /* If domain is empty, answer is no. Likewise if index is constant
5687 and out of bounds. */
5688 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5689 && TREE_CODE (set_low_bound) == INTEGER_CST
5690 && tree_int_cst_lt (set_high_bound, set_low_bound)
5691 || (TREE_CODE (index) == INTEGER_CST
5692 && TREE_CODE (set_low_bound) == INTEGER_CST
5693 && tree_int_cst_lt (index, set_low_bound))
5694 || (TREE_CODE (set_high_bound) == INTEGER_CST
5695 && TREE_CODE (index) == INTEGER_CST
5696 && tree_int_cst_lt (set_high_bound, index))))
5697 return const0_rtx;
5698
5699 if (target == 0)
5700 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5701
5702 /* If we get here, we have to generate the code for both cases
5703 (in range and out of range). */
5704
5705 op0 = gen_label_rtx ();
5706 op1 = gen_label_rtx ();
5707
5708 if (! (GET_CODE (index_val) == CONST_INT
5709 && GET_CODE (lo_r) == CONST_INT))
5710 {
5711 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5712 GET_MODE (index_val), iunsignedp, 0);
5713 emit_jump_insn (gen_blt (op1));
5714 }
5715
5716 if (! (GET_CODE (index_val) == CONST_INT
5717 && GET_CODE (hi_r) == CONST_INT))
5718 {
5719 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5720 GET_MODE (index_val), iunsignedp, 0);
5721 emit_jump_insn (gen_bgt (op1));
5722 }
5723
5724 /* Calculate the element number of bit zero in the first word
5725 of the set. */
5726 if (GET_CODE (lo_r) == CONST_INT)
5727 rlow = GEN_INT (INTVAL (lo_r)
5728 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5729 else
5730 rlow = expand_binop (index_mode, and_optab, lo_r,
5731 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5732 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5733
5734 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5735 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5736
5737 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5738 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5739 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5740 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5741
5742 addr = memory_address (byte_mode,
5743 expand_binop (index_mode, add_optab, diff,
5744 setaddr, NULL_RTX, iunsignedp,
5745 OPTAB_LIB_WIDEN));
5746
5747 /* Extract the bit we want to examine */
5748 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5749 gen_rtx (MEM, byte_mode, addr),
5750 make_tree (TREE_TYPE (index), rem),
5751 NULL_RTX, 1);
5752 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5753 GET_MODE (target) == byte_mode ? target : 0,
5754 1, OPTAB_LIB_WIDEN);
5755
5756 if (result != target)
5757 convert_move (target, result, 1);
5758
5759 /* Output the code to handle the out-of-range case. */
5760 emit_jump (op0);
5761 emit_label (op1);
5762 emit_move_insn (target, const0_rtx);
5763 emit_label (op0);
5764 return target;
5765 }
5766
5767 case WITH_CLEANUP_EXPR:
5768 if (RTL_EXPR_RTL (exp) == 0)
5769 {
5770 RTL_EXPR_RTL (exp)
5771 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5772 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
5773
5774 /* That's it for this cleanup. */
5775 TREE_OPERAND (exp, 2) = 0;
5776 }
5777 return RTL_EXPR_RTL (exp);
5778
5779 case CLEANUP_POINT_EXPR:
5780 {
5781 extern int temp_slot_level;
5782 /* Start a new binding layer that will keep track of all cleanup
5783 actions to be performed. */
5784 expand_start_bindings (0);
5785
5786 target_temp_slot_level = temp_slot_level;
5787
5788 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5789 /* If we're going to use this value, load it up now. */
5790 if (! ignore)
5791 op0 = force_not_mem (op0);
5792 preserve_temp_slots (op0);
5793 expand_end_bindings (NULL_TREE, 0, 0);
5794 }
5795 return op0;
5796
5797 case CALL_EXPR:
5798 /* Check for a built-in function. */
5799 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5800 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5801 == FUNCTION_DECL)
5802 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5803 return expand_builtin (exp, target, subtarget, tmode, ignore);
5804
5805 /* If this call was expanded already by preexpand_calls,
5806 just return the result we got. */
5807 if (CALL_EXPR_RTL (exp) != 0)
5808 return CALL_EXPR_RTL (exp);
5809
5810 return expand_call (exp, target, ignore);
5811
5812 case NON_LVALUE_EXPR:
5813 case NOP_EXPR:
5814 case CONVERT_EXPR:
5815 case REFERENCE_EXPR:
5816 if (TREE_CODE (type) == UNION_TYPE)
5817 {
5818 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5819 if (target == 0)
5820 {
5821 if (mode != BLKmode)
5822 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5823 else
5824 target = assign_temp (type, 0, 1, 1);
5825 }
5826
5827 if (GET_CODE (target) == MEM)
5828 /* Store data into beginning of memory target. */
5829 store_expr (TREE_OPERAND (exp, 0),
5830 change_address (target, TYPE_MODE (valtype), 0), 0);
5831
5832 else if (GET_CODE (target) == REG)
5833 /* Store this field into a union of the proper type. */
5834 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5835 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5836 VOIDmode, 0, 1,
5837 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5838 else
5839 abort ();
5840
5841 /* Return the entire union. */
5842 return target;
5843 }
5844
5845 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5846 {
5847 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5848 modifier);
5849
5850 /* If the signedness of the conversion differs and OP0 is
5851 a promoted SUBREG, clear that indication since we now
5852 have to do the proper extension. */
5853 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5854 && GET_CODE (op0) == SUBREG)
5855 SUBREG_PROMOTED_VAR_P (op0) = 0;
5856
5857 return op0;
5858 }
5859
5860 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5861 if (GET_MODE (op0) == mode)
5862 return op0;
5863
5864 /* If OP0 is a constant, just convert it into the proper mode. */
5865 if (CONSTANT_P (op0))
5866 return
5867 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5868 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5869
5870 if (modifier == EXPAND_INITIALIZER)
5871 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5872
5873 if (target == 0)
5874 return
5875 convert_to_mode (mode, op0,
5876 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5877 else
5878 convert_move (target, op0,
5879 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5880 return target;
5881
5882 case PLUS_EXPR:
5883 /* We come here from MINUS_EXPR when the second operand is a
5884 constant. */
5885 plus_expr:
5886 this_optab = add_optab;
5887
5888 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5889 something else, make sure we add the register to the constant and
5890 then to the other thing. This case can occur during strength
5891 reduction and doing it this way will produce better code if the
5892 frame pointer or argument pointer is eliminated.
5893
5894 fold-const.c will ensure that the constant is always in the inner
5895 PLUS_EXPR, so the only case we need to do anything about is if
5896 sp, ap, or fp is our second argument, in which case we must swap
5897 the innermost first argument and our second argument. */
5898
5899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5900 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5901 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5902 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5903 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5904 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5905 {
5906 tree t = TREE_OPERAND (exp, 1);
5907
5908 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5909 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5910 }
5911
5912 /* If the result is to be ptr_mode and we are adding an integer to
5913 something, we might be forming a constant. So try to use
5914 plus_constant. If it produces a sum and we can't accept it,
5915 use force_operand. This allows P = &ARR[const] to generate
5916 efficient code on machines where a SYMBOL_REF is not a valid
5917 address.
5918
5919 If this is an EXPAND_SUM call, always return the sum. */
5920 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5921 || mode == ptr_mode)
5922 {
5923 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5924 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5925 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5926 {
5927 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5928 EXPAND_SUM);
5929 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5930 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5931 op1 = force_operand (op1, target);
5932 return op1;
5933 }
5934
5935 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5936 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5937 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5938 {
5939 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5940 EXPAND_SUM);
5941 if (! CONSTANT_P (op0))
5942 {
5943 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5944 VOIDmode, modifier);
5945 /* Don't go to both_summands if modifier
5946 says it's not right to return a PLUS. */
5947 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5948 goto binop2;
5949 goto both_summands;
5950 }
5951 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5952 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5953 op0 = force_operand (op0, target);
5954 return op0;
5955 }
5956 }
5957
5958 /* No sense saving up arithmetic to be done
5959 if it's all in the wrong mode to form part of an address.
5960 And force_operand won't know whether to sign-extend or
5961 zero-extend. */
5962 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5963 || mode != ptr_mode)
5964 goto binop;
5965
5966 preexpand_calls (exp);
5967 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5968 subtarget = 0;
5969
5970 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5971 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5972
5973 both_summands:
5974 /* Make sure any term that's a sum with a constant comes last. */
5975 if (GET_CODE (op0) == PLUS
5976 && CONSTANT_P (XEXP (op0, 1)))
5977 {
5978 temp = op0;
5979 op0 = op1;
5980 op1 = temp;
5981 }
5982 /* If adding to a sum including a constant,
5983 associate it to put the constant outside. */
5984 if (GET_CODE (op1) == PLUS
5985 && CONSTANT_P (XEXP (op1, 1)))
5986 {
5987 rtx constant_term = const0_rtx;
5988
5989 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5990 if (temp != 0)
5991 op0 = temp;
5992 /* Ensure that MULT comes first if there is one. */
5993 else if (GET_CODE (op0) == MULT)
5994 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5995 else
5996 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5997
5998 /* Let's also eliminate constants from op0 if possible. */
5999 op0 = eliminate_constant_term (op0, &constant_term);
6000
6001 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6002 their sum should be a constant. Form it into OP1, since the
6003 result we want will then be OP0 + OP1. */
6004
6005 temp = simplify_binary_operation (PLUS, mode, constant_term,
6006 XEXP (op1, 1));
6007 if (temp != 0)
6008 op1 = temp;
6009 else
6010 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6011 }
6012
6013 /* Put a constant term last and put a multiplication first. */
6014 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6015 temp = op1, op1 = op0, op0 = temp;
6016
6017 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6018 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6019
6020 case MINUS_EXPR:
6021 /* For initializers, we are allowed to return a MINUS of two
6022 symbolic constants. Here we handle all cases when both operands
6023 are constant. */
6024 /* Handle difference of two symbolic constants,
6025 for the sake of an initializer. */
6026 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6027 && really_constant_p (TREE_OPERAND (exp, 0))
6028 && really_constant_p (TREE_OPERAND (exp, 1)))
6029 {
6030 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6031 VOIDmode, modifier);
6032 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6033 VOIDmode, modifier);
6034
6035 /* If the last operand is a CONST_INT, use plus_constant of
6036 the negated constant. Else make the MINUS. */
6037 if (GET_CODE (op1) == CONST_INT)
6038 return plus_constant (op0, - INTVAL (op1));
6039 else
6040 return gen_rtx (MINUS, mode, op0, op1);
6041 }
6042 /* Convert A - const to A + (-const). */
6043 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6044 {
6045 tree negated = fold (build1 (NEGATE_EXPR, type,
6046 TREE_OPERAND (exp, 1)));
6047
6048 /* Deal with the case where we can't negate the constant
6049 in TYPE. */
6050 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6051 {
6052 tree newtype = signed_type (type);
6053 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6054 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6055 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6056
6057 if (! TREE_OVERFLOW (newneg))
6058 return expand_expr (convert (type,
6059 build (PLUS_EXPR, newtype,
6060 newop0, newneg)),
6061 target, tmode, modifier);
6062 }
6063 else
6064 {
6065 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6066 goto plus_expr;
6067 }
6068 }
6069 this_optab = sub_optab;
6070 goto binop;
6071
6072 case MULT_EXPR:
6073 preexpand_calls (exp);
6074 /* If first operand is constant, swap them.
6075 Thus the following special case checks need only
6076 check the second operand. */
6077 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6078 {
6079 register tree t1 = TREE_OPERAND (exp, 0);
6080 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6081 TREE_OPERAND (exp, 1) = t1;
6082 }
6083
6084 /* Attempt to return something suitable for generating an
6085 indexed address, for machines that support that. */
6086
6087 if (modifier == EXPAND_SUM && mode == ptr_mode
6088 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6089 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6090 {
6091 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6092
6093 /* Apply distributive law if OP0 is x+c. */
6094 if (GET_CODE (op0) == PLUS
6095 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6096 return gen_rtx (PLUS, mode,
6097 gen_rtx (MULT, mode, XEXP (op0, 0),
6098 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6099 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6100 * INTVAL (XEXP (op0, 1))));
6101
6102 if (GET_CODE (op0) != REG)
6103 op0 = force_operand (op0, NULL_RTX);
6104 if (GET_CODE (op0) != REG)
6105 op0 = copy_to_mode_reg (mode, op0);
6106
6107 return gen_rtx (MULT, mode, op0,
6108 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6109 }
6110
6111 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6112 subtarget = 0;
6113
6114 /* Check for multiplying things that have been extended
6115 from a narrower type. If this machine supports multiplying
6116 in that narrower type with a result in the desired type,
6117 do it that way, and avoid the explicit type-conversion. */
6118 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6119 && TREE_CODE (type) == INTEGER_TYPE
6120 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6121 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6122 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6123 && int_fits_type_p (TREE_OPERAND (exp, 1),
6124 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6125 /* Don't use a widening multiply if a shift will do. */
6126 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6127 > HOST_BITS_PER_WIDE_INT)
6128 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6129 ||
6130 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6131 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6132 ==
6133 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6134 /* If both operands are extended, they must either both
6135 be zero-extended or both be sign-extended. */
6136 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6137 ==
6138 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6139 {
6140 enum machine_mode innermode
6141 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6142 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6143 ? smul_widen_optab : umul_widen_optab);
6144 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6145 ? umul_widen_optab : smul_widen_optab);
6146 if (mode == GET_MODE_WIDER_MODE (innermode))
6147 {
6148 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6149 {
6150 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6151 NULL_RTX, VOIDmode, 0);
6152 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6153 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6154 VOIDmode, 0);
6155 else
6156 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6157 NULL_RTX, VOIDmode, 0);
6158 goto binop2;
6159 }
6160 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6161 && innermode == word_mode)
6162 {
6163 rtx htem;
6164 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6165 NULL_RTX, VOIDmode, 0);
6166 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6167 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6168 VOIDmode, 0);
6169 else
6170 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6171 NULL_RTX, VOIDmode, 0);
6172 temp = expand_binop (mode, other_optab, op0, op1, target,
6173 unsignedp, OPTAB_LIB_WIDEN);
6174 htem = expand_mult_highpart_adjust (innermode,
6175 gen_highpart (innermode, temp),
6176 op0, op1,
6177 gen_highpart (innermode, temp),
6178 unsignedp);
6179 emit_move_insn (gen_highpart (innermode, temp), htem);
6180 return temp;
6181 }
6182 }
6183 }
6184 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6185 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6186 return expand_mult (mode, op0, op1, target, unsignedp);
6187
6188 case TRUNC_DIV_EXPR:
6189 case FLOOR_DIV_EXPR:
6190 case CEIL_DIV_EXPR:
6191 case ROUND_DIV_EXPR:
6192 case EXACT_DIV_EXPR:
6193 preexpand_calls (exp);
6194 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6195 subtarget = 0;
6196 /* Possible optimization: compute the dividend with EXPAND_SUM
6197 then if the divisor is constant can optimize the case
6198 where some terms of the dividend have coeffs divisible by it. */
6199 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6200 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6201 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6202
6203 case RDIV_EXPR:
6204 this_optab = flodiv_optab;
6205 goto binop;
6206
6207 case TRUNC_MOD_EXPR:
6208 case FLOOR_MOD_EXPR:
6209 case CEIL_MOD_EXPR:
6210 case ROUND_MOD_EXPR:
6211 preexpand_calls (exp);
6212 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6213 subtarget = 0;
6214 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6215 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6216 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6217
6218 case FIX_ROUND_EXPR:
6219 case FIX_FLOOR_EXPR:
6220 case FIX_CEIL_EXPR:
6221 abort (); /* Not used for C. */
6222
6223 case FIX_TRUNC_EXPR:
6224 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6225 if (target == 0)
6226 target = gen_reg_rtx (mode);
6227 expand_fix (target, op0, unsignedp);
6228 return target;
6229
6230 case FLOAT_EXPR:
6231 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6232 if (target == 0)
6233 target = gen_reg_rtx (mode);
6234 /* expand_float can't figure out what to do if FROM has VOIDmode.
6235 So give it the correct mode. With -O, cse will optimize this. */
6236 if (GET_MODE (op0) == VOIDmode)
6237 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6238 op0);
6239 expand_float (target, op0,
6240 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6241 return target;
6242
6243 case NEGATE_EXPR:
6244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6245 temp = expand_unop (mode, neg_optab, op0, target, 0);
6246 if (temp == 0)
6247 abort ();
6248 return temp;
6249
6250 case ABS_EXPR:
6251 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6252
6253 /* Handle complex values specially. */
6254 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6255 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6256 return expand_complex_abs (mode, op0, target, unsignedp);
6257
6258 /* Unsigned abs is simply the operand. Testing here means we don't
6259 risk generating incorrect code below. */
6260 if (TREE_UNSIGNED (type))
6261 return op0;
6262
6263 return expand_abs (mode, op0, target, unsignedp,
6264 safe_from_p (target, TREE_OPERAND (exp, 0)));
6265
6266 case MAX_EXPR:
6267 case MIN_EXPR:
6268 target = original_target;
6269 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6270 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6271 || GET_MODE (target) != mode
6272 || (GET_CODE (target) == REG
6273 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6274 target = gen_reg_rtx (mode);
6275 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6276 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6277
6278 /* First try to do it with a special MIN or MAX instruction.
6279 If that does not win, use a conditional jump to select the proper
6280 value. */
6281 this_optab = (TREE_UNSIGNED (type)
6282 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6283 : (code == MIN_EXPR ? smin_optab : smax_optab));
6284
6285 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6286 OPTAB_WIDEN);
6287 if (temp != 0)
6288 return temp;
6289
6290 /* At this point, a MEM target is no longer useful; we will get better
6291 code without it. */
6292
6293 if (GET_CODE (target) == MEM)
6294 target = gen_reg_rtx (mode);
6295
6296 if (target != op0)
6297 emit_move_insn (target, op0);
6298
6299 op0 = gen_label_rtx ();
6300
6301 /* If this mode is an integer too wide to compare properly,
6302 compare word by word. Rely on cse to optimize constant cases. */
6303 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6304 {
6305 if (code == MAX_EXPR)
6306 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6307 target, op1, NULL_RTX, op0);
6308 else
6309 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6310 op1, target, NULL_RTX, op0);
6311 emit_move_insn (target, op1);
6312 }
6313 else
6314 {
6315 if (code == MAX_EXPR)
6316 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6317 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6318 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6319 else
6320 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6321 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6322 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6323 if (temp == const0_rtx)
6324 emit_move_insn (target, op1);
6325 else if (temp != const_true_rtx)
6326 {
6327 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6328 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6329 else
6330 abort ();
6331 emit_move_insn (target, op1);
6332 }
6333 }
6334 emit_label (op0);
6335 return target;
6336
6337 case BIT_NOT_EXPR:
6338 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6339 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6340 if (temp == 0)
6341 abort ();
6342 return temp;
6343
6344 case FFS_EXPR:
6345 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6346 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6347 if (temp == 0)
6348 abort ();
6349 return temp;
6350
6351 /* ??? Can optimize bitwise operations with one arg constant.
6352 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6353 and (a bitwise1 b) bitwise2 b (etc)
6354 but that is probably not worth while. */
6355
6356 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6357 boolean values when we want in all cases to compute both of them. In
6358 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6359 as actual zero-or-1 values and then bitwise anding. In cases where
6360 there cannot be any side effects, better code would be made by
6361 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6362 how to recognize those cases. */
6363
6364 case TRUTH_AND_EXPR:
6365 case BIT_AND_EXPR:
6366 this_optab = and_optab;
6367 goto binop;
6368
6369 case TRUTH_OR_EXPR:
6370 case BIT_IOR_EXPR:
6371 this_optab = ior_optab;
6372 goto binop;
6373
6374 case TRUTH_XOR_EXPR:
6375 case BIT_XOR_EXPR:
6376 this_optab = xor_optab;
6377 goto binop;
6378
6379 case LSHIFT_EXPR:
6380 case RSHIFT_EXPR:
6381 case LROTATE_EXPR:
6382 case RROTATE_EXPR:
6383 preexpand_calls (exp);
6384 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6385 subtarget = 0;
6386 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6387 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6388 unsignedp);
6389
6390 /* Could determine the answer when only additive constants differ. Also,
6391 the addition of one can be handled by changing the condition. */
6392 case LT_EXPR:
6393 case LE_EXPR:
6394 case GT_EXPR:
6395 case GE_EXPR:
6396 case EQ_EXPR:
6397 case NE_EXPR:
6398 preexpand_calls (exp);
6399 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6400 if (temp != 0)
6401 return temp;
6402
6403 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6404 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6405 && original_target
6406 && GET_CODE (original_target) == REG
6407 && (GET_MODE (original_target)
6408 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6409 {
6410 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6411 VOIDmode, 0);
6412
6413 if (temp != original_target)
6414 temp = copy_to_reg (temp);
6415
6416 op1 = gen_label_rtx ();
6417 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6418 GET_MODE (temp), unsignedp, 0);
6419 emit_jump_insn (gen_beq (op1));
6420 emit_move_insn (temp, const1_rtx);
6421 emit_label (op1);
6422 return temp;
6423 }
6424
6425 /* If no set-flag instruction, must generate a conditional
6426 store into a temporary variable. Drop through
6427 and handle this like && and ||. */
6428
6429 case TRUTH_ANDIF_EXPR:
6430 case TRUTH_ORIF_EXPR:
6431 if (! ignore
6432 && (target == 0 || ! safe_from_p (target, exp)
6433 /* Make sure we don't have a hard reg (such as function's return
6434 value) live across basic blocks, if not optimizing. */
6435 || (!optimize && GET_CODE (target) == REG
6436 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6437 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6438
6439 if (target)
6440 emit_clr_insn (target);
6441
6442 op1 = gen_label_rtx ();
6443 jumpifnot (exp, op1);
6444
6445 if (target)
6446 emit_0_to_1_insn (target);
6447
6448 emit_label (op1);
6449 return ignore ? const0_rtx : target;
6450
6451 case TRUTH_NOT_EXPR:
6452 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6453 /* The parser is careful to generate TRUTH_NOT_EXPR
6454 only with operands that are always zero or one. */
6455 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6456 target, 1, OPTAB_LIB_WIDEN);
6457 if (temp == 0)
6458 abort ();
6459 return temp;
6460
6461 case COMPOUND_EXPR:
6462 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6463 emit_queue ();
6464 return expand_expr (TREE_OPERAND (exp, 1),
6465 (ignore ? const0_rtx : target),
6466 VOIDmode, 0);
6467
6468 case COND_EXPR:
6469 /* If we would have a "singleton" (see below) were it not for a
6470 conversion in each arm, bring that conversion back out. */
6471 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6472 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6473 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6474 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6475 {
6476 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6477 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6478
6479 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6480 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6481 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6482 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6483 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6484 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6485 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6486 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6487 return expand_expr (build1 (NOP_EXPR, type,
6488 build (COND_EXPR, TREE_TYPE (true),
6489 TREE_OPERAND (exp, 0),
6490 true, false)),
6491 target, tmode, modifier);
6492 }
6493
6494 {
6495 /* Note that COND_EXPRs whose type is a structure or union
6496 are required to be constructed to contain assignments of
6497 a temporary variable, so that we can evaluate them here
6498 for side effect only. If type is void, we must do likewise. */
6499
6500 /* If an arm of the branch requires a cleanup,
6501 only that cleanup is performed. */
6502
6503 tree singleton = 0;
6504 tree binary_op = 0, unary_op = 0;
6505
6506 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6507 convert it to our mode, if necessary. */
6508 if (integer_onep (TREE_OPERAND (exp, 1))
6509 && integer_zerop (TREE_OPERAND (exp, 2))
6510 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6511 {
6512 if (ignore)
6513 {
6514 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6515 modifier);
6516 return const0_rtx;
6517 }
6518
6519 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6520 if (GET_MODE (op0) == mode)
6521 return op0;
6522
6523 if (target == 0)
6524 target = gen_reg_rtx (mode);
6525 convert_move (target, op0, unsignedp);
6526 return target;
6527 }
6528
6529 /* Check for X ? A + B : A. If we have this, we can copy A to the
6530 output and conditionally add B. Similarly for unary operations.
6531 Don't do this if X has side-effects because those side effects
6532 might affect A or B and the "?" operation is a sequence point in
6533 ANSI. (operand_equal_p tests for side effects.) */
6534
6535 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6536 && operand_equal_p (TREE_OPERAND (exp, 2),
6537 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6538 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6539 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6540 && operand_equal_p (TREE_OPERAND (exp, 1),
6541 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6542 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6543 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6544 && operand_equal_p (TREE_OPERAND (exp, 2),
6545 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6546 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6547 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6548 && operand_equal_p (TREE_OPERAND (exp, 1),
6549 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6550 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6551
6552 /* If we are not to produce a result, we have no target. Otherwise,
6553 if a target was specified use it; it will not be used as an
6554 intermediate target unless it is safe. If no target, use a
6555 temporary. */
6556
6557 if (ignore)
6558 temp = 0;
6559 else if (original_target
6560 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6561 || (singleton && GET_CODE (original_target) == REG
6562 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6563 && original_target == var_rtx (singleton)))
6564 && GET_MODE (original_target) == mode
6565 && ! (GET_CODE (original_target) == MEM
6566 && MEM_VOLATILE_P (original_target)))
6567 temp = original_target;
6568 else if (TREE_ADDRESSABLE (type))
6569 abort ();
6570 else
6571 temp = assign_temp (type, 0, 0, 1);
6572
6573 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6574 do the test of X as a store-flag operation, do this as
6575 A + ((X != 0) << log C). Similarly for other simple binary
6576 operators. Only do for C == 1 if BRANCH_COST is low. */
6577 if (temp && singleton && binary_op
6578 && (TREE_CODE (binary_op) == PLUS_EXPR
6579 || TREE_CODE (binary_op) == MINUS_EXPR
6580 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6581 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6582 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6583 : integer_onep (TREE_OPERAND (binary_op, 1)))
6584 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6585 {
6586 rtx result;
6587 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6588 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6589 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6590 : xor_optab);
6591
6592 /* If we had X ? A : A + 1, do this as A + (X == 0).
6593
6594 We have to invert the truth value here and then put it
6595 back later if do_store_flag fails. We cannot simply copy
6596 TREE_OPERAND (exp, 0) to another variable and modify that
6597 because invert_truthvalue can modify the tree pointed to
6598 by its argument. */
6599 if (singleton == TREE_OPERAND (exp, 1))
6600 TREE_OPERAND (exp, 0)
6601 = invert_truthvalue (TREE_OPERAND (exp, 0));
6602
6603 result = do_store_flag (TREE_OPERAND (exp, 0),
6604 (safe_from_p (temp, singleton)
6605 ? temp : NULL_RTX),
6606 mode, BRANCH_COST <= 1);
6607
6608 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6609 result = expand_shift (LSHIFT_EXPR, mode, result,
6610 build_int_2 (tree_log2
6611 (TREE_OPERAND
6612 (binary_op, 1)),
6613 0),
6614 (safe_from_p (temp, singleton)
6615 ? temp : NULL_RTX), 0);
6616
6617 if (result)
6618 {
6619 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6620 return expand_binop (mode, boptab, op1, result, temp,
6621 unsignedp, OPTAB_LIB_WIDEN);
6622 }
6623 else if (singleton == TREE_OPERAND (exp, 1))
6624 TREE_OPERAND (exp, 0)
6625 = invert_truthvalue (TREE_OPERAND (exp, 0));
6626 }
6627
6628 do_pending_stack_adjust ();
6629 NO_DEFER_POP;
6630 op0 = gen_label_rtx ();
6631
6632 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6633 {
6634 if (temp != 0)
6635 {
6636 /* If the target conflicts with the other operand of the
6637 binary op, we can't use it. Also, we can't use the target
6638 if it is a hard register, because evaluating the condition
6639 might clobber it. */
6640 if ((binary_op
6641 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6642 || (GET_CODE (temp) == REG
6643 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6644 temp = gen_reg_rtx (mode);
6645 store_expr (singleton, temp, 0);
6646 }
6647 else
6648 expand_expr (singleton,
6649 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6650 if (singleton == TREE_OPERAND (exp, 1))
6651 jumpif (TREE_OPERAND (exp, 0), op0);
6652 else
6653 jumpifnot (TREE_OPERAND (exp, 0), op0);
6654
6655 start_cleanup_deferal ();
6656 if (binary_op && temp == 0)
6657 /* Just touch the other operand. */
6658 expand_expr (TREE_OPERAND (binary_op, 1),
6659 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6660 else if (binary_op)
6661 store_expr (build (TREE_CODE (binary_op), type,
6662 make_tree (type, temp),
6663 TREE_OPERAND (binary_op, 1)),
6664 temp, 0);
6665 else
6666 store_expr (build1 (TREE_CODE (unary_op), type,
6667 make_tree (type, temp)),
6668 temp, 0);
6669 op1 = op0;
6670 }
6671 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6672 comparison operator. If we have one of these cases, set the
6673 output to A, branch on A (cse will merge these two references),
6674 then set the output to FOO. */
6675 else if (temp
6676 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6677 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6678 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6679 TREE_OPERAND (exp, 1), 0)
6680 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6681 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6682 {
6683 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6684 temp = gen_reg_rtx (mode);
6685 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6686 jumpif (TREE_OPERAND (exp, 0), op0);
6687
6688 start_cleanup_deferal ();
6689 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6690 op1 = op0;
6691 }
6692 else if (temp
6693 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6694 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6695 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6696 TREE_OPERAND (exp, 2), 0)
6697 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6698 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6699 {
6700 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6701 temp = gen_reg_rtx (mode);
6702 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6703 jumpifnot (TREE_OPERAND (exp, 0), op0);
6704
6705 start_cleanup_deferal ();
6706 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6707 op1 = op0;
6708 }
6709 else
6710 {
6711 op1 = gen_label_rtx ();
6712 jumpifnot (TREE_OPERAND (exp, 0), op0);
6713
6714 start_cleanup_deferal ();
6715 if (temp != 0)
6716 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6717 else
6718 expand_expr (TREE_OPERAND (exp, 1),
6719 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6720 end_cleanup_deferal ();
6721 emit_queue ();
6722 emit_jump_insn (gen_jump (op1));
6723 emit_barrier ();
6724 emit_label (op0);
6725 start_cleanup_deferal ();
6726 if (temp != 0)
6727 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6728 else
6729 expand_expr (TREE_OPERAND (exp, 2),
6730 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6731 }
6732
6733 end_cleanup_deferal ();
6734
6735 emit_queue ();
6736 emit_label (op1);
6737 OK_DEFER_POP;
6738
6739 return temp;
6740 }
6741
6742 case TARGET_EXPR:
6743 {
6744 /* Something needs to be initialized, but we didn't know
6745 where that thing was when building the tree. For example,
6746 it could be the return value of a function, or a parameter
6747 to a function which lays down in the stack, or a temporary
6748 variable which must be passed by reference.
6749
6750 We guarantee that the expression will either be constructed
6751 or copied into our original target. */
6752
6753 tree slot = TREE_OPERAND (exp, 0);
6754 tree cleanups = NULL_TREE;
6755 tree exp1;
6756 rtx temp;
6757
6758 if (TREE_CODE (slot) != VAR_DECL)
6759 abort ();
6760
6761 if (! ignore)
6762 target = original_target;
6763
6764 if (target == 0)
6765 {
6766 if (DECL_RTL (slot) != 0)
6767 {
6768 target = DECL_RTL (slot);
6769 /* If we have already expanded the slot, so don't do
6770 it again. (mrs) */
6771 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6772 return target;
6773 }
6774 else
6775 {
6776 target = assign_temp (type, 2, 1, 1);
6777 /* All temp slots at this level must not conflict. */
6778 preserve_temp_slots (target);
6779 DECL_RTL (slot) = target;
6780
6781 /* Since SLOT is not known to the called function
6782 to belong to its stack frame, we must build an explicit
6783 cleanup. This case occurs when we must build up a reference
6784 to pass the reference as an argument. In this case,
6785 it is very likely that such a reference need not be
6786 built here. */
6787
6788 if (TREE_OPERAND (exp, 2) == 0)
6789 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6790 cleanups = TREE_OPERAND (exp, 2);
6791 }
6792 }
6793 else
6794 {
6795 /* This case does occur, when expanding a parameter which
6796 needs to be constructed on the stack. The target
6797 is the actual stack address that we want to initialize.
6798 The function we call will perform the cleanup in this case. */
6799
6800 /* If we have already assigned it space, use that space,
6801 not target that we were passed in, as our target
6802 parameter is only a hint. */
6803 if (DECL_RTL (slot) != 0)
6804 {
6805 target = DECL_RTL (slot);
6806 /* If we have already expanded the slot, so don't do
6807 it again. (mrs) */
6808 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6809 return target;
6810 }
6811
6812 DECL_RTL (slot) = target;
6813 }
6814
6815 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6816 /* Mark it as expanded. */
6817 TREE_OPERAND (exp, 1) = NULL_TREE;
6818
6819 store_expr (exp1, target, 0);
6820
6821 expand_decl_cleanup (NULL_TREE, cleanups);
6822
6823 return target;
6824 }
6825
6826 case INIT_EXPR:
6827 {
6828 tree lhs = TREE_OPERAND (exp, 0);
6829 tree rhs = TREE_OPERAND (exp, 1);
6830 tree noncopied_parts = 0;
6831 tree lhs_type = TREE_TYPE (lhs);
6832
6833 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6834 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6835 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6836 TYPE_NONCOPIED_PARTS (lhs_type));
6837 while (noncopied_parts != 0)
6838 {
6839 expand_assignment (TREE_VALUE (noncopied_parts),
6840 TREE_PURPOSE (noncopied_parts), 0, 0);
6841 noncopied_parts = TREE_CHAIN (noncopied_parts);
6842 }
6843 return temp;
6844 }
6845
6846 case MODIFY_EXPR:
6847 {
6848 /* If lhs is complex, expand calls in rhs before computing it.
6849 That's so we don't compute a pointer and save it over a call.
6850 If lhs is simple, compute it first so we can give it as a
6851 target if the rhs is just a call. This avoids an extra temp and copy
6852 and that prevents a partial-subsumption which makes bad code.
6853 Actually we could treat component_ref's of vars like vars. */
6854
6855 tree lhs = TREE_OPERAND (exp, 0);
6856 tree rhs = TREE_OPERAND (exp, 1);
6857 tree noncopied_parts = 0;
6858 tree lhs_type = TREE_TYPE (lhs);
6859
6860 temp = 0;
6861
6862 if (TREE_CODE (lhs) != VAR_DECL
6863 && TREE_CODE (lhs) != RESULT_DECL
6864 && TREE_CODE (lhs) != PARM_DECL
6865 && ! (TREE_CODE (lhs) == INDIRECT_REF
6866 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
6867 preexpand_calls (exp);
6868
6869 /* Check for |= or &= of a bitfield of size one into another bitfield
6870 of size 1. In this case, (unless we need the result of the
6871 assignment) we can do this more efficiently with a
6872 test followed by an assignment, if necessary.
6873
6874 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6875 things change so we do, this code should be enhanced to
6876 support it. */
6877 if (ignore
6878 && TREE_CODE (lhs) == COMPONENT_REF
6879 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6880 || TREE_CODE (rhs) == BIT_AND_EXPR)
6881 && TREE_OPERAND (rhs, 0) == lhs
6882 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6883 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6884 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6885 {
6886 rtx label = gen_label_rtx ();
6887
6888 do_jump (TREE_OPERAND (rhs, 1),
6889 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6890 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6891 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6892 (TREE_CODE (rhs) == BIT_IOR_EXPR
6893 ? integer_one_node
6894 : integer_zero_node)),
6895 0, 0);
6896 do_pending_stack_adjust ();
6897 emit_label (label);
6898 return const0_rtx;
6899 }
6900
6901 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6902 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6903 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6904 TYPE_NONCOPIED_PARTS (lhs_type));
6905
6906 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6907 while (noncopied_parts != 0)
6908 {
6909 expand_assignment (TREE_PURPOSE (noncopied_parts),
6910 TREE_VALUE (noncopied_parts), 0, 0);
6911 noncopied_parts = TREE_CHAIN (noncopied_parts);
6912 }
6913 return temp;
6914 }
6915
6916 case PREINCREMENT_EXPR:
6917 case PREDECREMENT_EXPR:
6918 return expand_increment (exp, 0, ignore);
6919
6920 case POSTINCREMENT_EXPR:
6921 case POSTDECREMENT_EXPR:
6922 /* Faster to treat as pre-increment if result is not used. */
6923 return expand_increment (exp, ! ignore, ignore);
6924
6925 case ADDR_EXPR:
6926 /* If nonzero, TEMP will be set to the address of something that might
6927 be a MEM corresponding to a stack slot. */
6928 temp = 0;
6929
6930 /* Are we taking the address of a nested function? */
6931 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6932 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6933 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
6934 {
6935 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6936 op0 = force_operand (op0, target);
6937 }
6938 /* If we are taking the address of something erroneous, just
6939 return a zero. */
6940 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6941 return const0_rtx;
6942 else
6943 {
6944 /* We make sure to pass const0_rtx down if we came in with
6945 ignore set, to avoid doing the cleanups twice for something. */
6946 op0 = expand_expr (TREE_OPERAND (exp, 0),
6947 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6948 (modifier == EXPAND_INITIALIZER
6949 ? modifier : EXPAND_CONST_ADDRESS));
6950
6951 /* If we are going to ignore the result, OP0 will have been set
6952 to const0_rtx, so just return it. Don't get confused and
6953 think we are taking the address of the constant. */
6954 if (ignore)
6955 return op0;
6956
6957 op0 = protect_from_queue (op0, 0);
6958
6959 /* We would like the object in memory. If it is a constant,
6960 we can have it be statically allocated into memory. For
6961 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6962 memory and store the value into it. */
6963
6964 if (CONSTANT_P (op0))
6965 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6966 op0);
6967 else if (GET_CODE (op0) == MEM)
6968 {
6969 mark_temp_addr_taken (op0);
6970 temp = XEXP (op0, 0);
6971 }
6972
6973 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6974 || GET_CODE (op0) == CONCAT)
6975 {
6976 /* If this object is in a register, it must be not
6977 be BLKmode. */
6978 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6979 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6980
6981 mark_temp_addr_taken (memloc);
6982 emit_move_insn (memloc, op0);
6983 op0 = memloc;
6984 }
6985
6986 if (GET_CODE (op0) != MEM)
6987 abort ();
6988
6989 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6990 {
6991 temp = XEXP (op0, 0);
6992 #ifdef POINTERS_EXTEND_UNSIGNED
6993 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6994 && mode == ptr_mode)
6995 temp = convert_memory_address (ptr_mode, temp);
6996 #endif
6997 return temp;
6998 }
6999
7000 op0 = force_operand (XEXP (op0, 0), target);
7001 }
7002
7003 if (flag_force_addr && GET_CODE (op0) != REG)
7004 op0 = force_reg (Pmode, op0);
7005
7006 if (GET_CODE (op0) == REG
7007 && ! REG_USERVAR_P (op0))
7008 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7009
7010 /* If we might have had a temp slot, add an equivalent address
7011 for it. */
7012 if (temp != 0)
7013 update_temp_slot_address (temp, op0);
7014
7015 #ifdef POINTERS_EXTEND_UNSIGNED
7016 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7017 && mode == ptr_mode)
7018 op0 = convert_memory_address (ptr_mode, op0);
7019 #endif
7020
7021 return op0;
7022
7023 case ENTRY_VALUE_EXPR:
7024 abort ();
7025
7026 /* COMPLEX type for Extended Pascal & Fortran */
7027 case COMPLEX_EXPR:
7028 {
7029 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7030 rtx insns;
7031
7032 /* Get the rtx code of the operands. */
7033 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7034 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7035
7036 if (! target)
7037 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7038
7039 start_sequence ();
7040
7041 /* Move the real (op0) and imaginary (op1) parts to their location. */
7042 emit_move_insn (gen_realpart (mode, target), op0);
7043 emit_move_insn (gen_imagpart (mode, target), op1);
7044
7045 insns = get_insns ();
7046 end_sequence ();
7047
7048 /* Complex construction should appear as a single unit. */
7049 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7050 each with a separate pseudo as destination.
7051 It's not correct for flow to treat them as a unit. */
7052 if (GET_CODE (target) != CONCAT)
7053 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7054 else
7055 emit_insns (insns);
7056
7057 return target;
7058 }
7059
7060 case REALPART_EXPR:
7061 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7062 return gen_realpart (mode, op0);
7063
7064 case IMAGPART_EXPR:
7065 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7066 return gen_imagpart (mode, op0);
7067
7068 case CONJ_EXPR:
7069 {
7070 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7071 rtx imag_t;
7072 rtx insns;
7073
7074 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7075
7076 if (! target)
7077 target = gen_reg_rtx (mode);
7078
7079 start_sequence ();
7080
7081 /* Store the realpart and the negated imagpart to target. */
7082 emit_move_insn (gen_realpart (partmode, target),
7083 gen_realpart (partmode, op0));
7084
7085 imag_t = gen_imagpart (partmode, target);
7086 temp = expand_unop (partmode, neg_optab,
7087 gen_imagpart (partmode, op0), imag_t, 0);
7088 if (temp != imag_t)
7089 emit_move_insn (imag_t, temp);
7090
7091 insns = get_insns ();
7092 end_sequence ();
7093
7094 /* Conjugate should appear as a single unit
7095 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7096 each with a separate pseudo as destination.
7097 It's not correct for flow to treat them as a unit. */
7098 if (GET_CODE (target) != CONCAT)
7099 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7100 else
7101 emit_insns (insns);
7102
7103 return target;
7104 }
7105
7106 case TRY_CATCH_EXPR:
7107 {
7108 tree handler = TREE_OPERAND (exp, 1);
7109
7110 expand_eh_region_start ();
7111
7112 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7113
7114 expand_eh_region_end (handler);
7115
7116 return op0;
7117 }
7118
7119 case POPDCC_EXPR:
7120 {
7121 rtx dcc = get_dynamic_cleanup_chain ();
7122 emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
7123 return const0_rtx;
7124 }
7125
7126 case POPDHC_EXPR:
7127 {
7128 rtx dhc = get_dynamic_handler_chain ();
7129 emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
7130 return const0_rtx;
7131 }
7132
7133 case ERROR_MARK:
7134 op0 = CONST0_RTX (tmode);
7135 if (op0 != 0)
7136 return op0;
7137 return const0_rtx;
7138
7139 default:
7140 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7141 }
7142
7143 /* Here to do an ordinary binary operator, generating an instruction
7144 from the optab already placed in `this_optab'. */
7145 binop:
7146 preexpand_calls (exp);
7147 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7148 subtarget = 0;
7149 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7150 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7151 binop2:
7152 temp = expand_binop (mode, this_optab, op0, op1, target,
7153 unsignedp, OPTAB_LIB_WIDEN);
7154 if (temp == 0)
7155 abort ();
7156 return temp;
7157 }
7158
7159
7160 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7161
7162 void
7163 bc_expand_expr (exp)
7164 tree exp;
7165 {
7166 enum tree_code code;
7167 tree type, arg0;
7168 rtx r;
7169 struct binary_operator *binoptab;
7170 struct unary_operator *unoptab;
7171 struct increment_operator *incroptab;
7172 struct bc_label *lab, *lab1;
7173 enum bytecode_opcode opcode;
7174
7175
7176 code = TREE_CODE (exp);
7177
7178 switch (code)
7179 {
7180 case PARM_DECL:
7181
7182 if (DECL_RTL (exp) == 0)
7183 {
7184 error_with_decl (exp, "prior parameter's size depends on `%s'");
7185 return;
7186 }
7187
7188 bc_load_parmaddr (DECL_RTL (exp));
7189 bc_load_memory (TREE_TYPE (exp), exp);
7190
7191 return;
7192
7193 case VAR_DECL:
7194
7195 if (DECL_RTL (exp) == 0)
7196 abort ();
7197
7198 #if 0
7199 if (BYTECODE_LABEL (DECL_RTL (exp)))
7200 bc_load_externaddr (DECL_RTL (exp));
7201 else
7202 bc_load_localaddr (DECL_RTL (exp));
7203 #endif
7204 if (TREE_PUBLIC (exp))
7205 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7206 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7207 else
7208 bc_load_localaddr (DECL_RTL (exp));
7209
7210 bc_load_memory (TREE_TYPE (exp), exp);
7211 return;
7212
7213 case INTEGER_CST:
7214
7215 #ifdef DEBUG_PRINT_CODE
7216 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7217 #endif
7218 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7219 ? SImode
7220 : TYPE_MODE (TREE_TYPE (exp)))],
7221 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7222 return;
7223
7224 case REAL_CST:
7225
7226 #if 0
7227 #ifdef DEBUG_PRINT_CODE
7228 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7229 #endif
7230 /* FIX THIS: find a better way to pass real_cst's. -bson */
7231 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7232 (double) TREE_REAL_CST (exp));
7233 #else
7234 abort ();
7235 #endif
7236
7237 return;
7238
7239 case CALL_EXPR:
7240
7241 /* We build a call description vector describing the type of
7242 the return value and of the arguments; this call vector,
7243 together with a pointer to a location for the return value
7244 and the base of the argument list, is passed to the low
7245 level machine dependent call subroutine, which is responsible
7246 for putting the arguments wherever real functions expect
7247 them, as well as getting the return value back. */
7248 {
7249 tree calldesc = 0, arg;
7250 int nargs = 0, i;
7251 rtx retval;
7252
7253 /* Push the evaluated args on the evaluation stack in reverse
7254 order. Also make an entry for each arg in the calldesc
7255 vector while we're at it. */
7256
7257 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7258
7259 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7260 {
7261 ++nargs;
7262 bc_expand_expr (TREE_VALUE (arg));
7263
7264 calldesc = tree_cons ((tree) 0,
7265 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7266 calldesc);
7267 calldesc = tree_cons ((tree) 0,
7268 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7269 calldesc);
7270 }
7271
7272 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7273
7274 /* Allocate a location for the return value and push its
7275 address on the evaluation stack. Also make an entry
7276 at the front of the calldesc for the return value type. */
7277
7278 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7279 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7280 bc_load_localaddr (retval);
7281
7282 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7283 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7284
7285 /* Prepend the argument count. */
7286 calldesc = tree_cons ((tree) 0,
7287 build_int_2 (nargs, 0),
7288 calldesc);
7289
7290 /* Push the address of the call description vector on the stack. */
7291 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7292 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7293 build_index_type (build_int_2 (nargs * 2, 0)));
7294 r = output_constant_def (calldesc);
7295 bc_load_externaddr (r);
7296
7297 /* Push the address of the function to be called. */
7298 bc_expand_expr (TREE_OPERAND (exp, 0));
7299
7300 /* Call the function, popping its address and the calldesc vector
7301 address off the evaluation stack in the process. */
7302 bc_emit_instruction (call);
7303
7304 /* Pop the arguments off the stack. */
7305 bc_adjust_stack (nargs);
7306
7307 /* Load the return value onto the stack. */
7308 bc_load_localaddr (retval);
7309 bc_load_memory (type, TREE_OPERAND (exp, 0));
7310 }
7311 return;
7312
7313 case SAVE_EXPR:
7314
7315 if (!SAVE_EXPR_RTL (exp))
7316 {
7317 /* First time around: copy to local variable */
7318 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7319 TYPE_ALIGN (TREE_TYPE(exp)));
7320 bc_expand_expr (TREE_OPERAND (exp, 0));
7321 bc_emit_instruction (duplicate);
7322
7323 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7324 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7325 }
7326 else
7327 {
7328 /* Consecutive reference: use saved copy */
7329 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7330 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7331 }
7332 return;
7333
7334 #if 0
7335 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7336 how are they handled instead? */
7337 case LET_STMT:
7338
7339 TREE_USED (exp) = 1;
7340 bc_expand_expr (STMT_BODY (exp));
7341 return;
7342 #endif
7343
7344 case NOP_EXPR:
7345 case CONVERT_EXPR:
7346
7347 bc_expand_expr (TREE_OPERAND (exp, 0));
7348 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7349 return;
7350
7351 case MODIFY_EXPR:
7352
7353 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7354 return;
7355
7356 case ADDR_EXPR:
7357
7358 bc_expand_address (TREE_OPERAND (exp, 0));
7359 return;
7360
7361 case INDIRECT_REF:
7362
7363 bc_expand_expr (TREE_OPERAND (exp, 0));
7364 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7365 return;
7366
7367 case ARRAY_REF:
7368
7369 bc_expand_expr (bc_canonicalize_array_ref (exp));
7370 return;
7371
7372 case COMPONENT_REF:
7373
7374 bc_expand_component_address (exp);
7375
7376 /* If we have a bitfield, generate a proper load */
7377 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7378 return;
7379
7380 case COMPOUND_EXPR:
7381
7382 bc_expand_expr (TREE_OPERAND (exp, 0));
7383 bc_emit_instruction (drop);
7384 bc_expand_expr (TREE_OPERAND (exp, 1));
7385 return;
7386
7387 case COND_EXPR:
7388
7389 bc_expand_expr (TREE_OPERAND (exp, 0));
7390 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7391 lab = bc_get_bytecode_label ();
7392 bc_emit_bytecode (xjumpifnot);
7393 bc_emit_bytecode_labelref (lab);
7394
7395 #ifdef DEBUG_PRINT_CODE
7396 fputc ('\n', stderr);
7397 #endif
7398 bc_expand_expr (TREE_OPERAND (exp, 1));
7399 lab1 = bc_get_bytecode_label ();
7400 bc_emit_bytecode (jump);
7401 bc_emit_bytecode_labelref (lab1);
7402
7403 #ifdef DEBUG_PRINT_CODE
7404 fputc ('\n', stderr);
7405 #endif
7406
7407 bc_emit_bytecode_labeldef (lab);
7408 bc_expand_expr (TREE_OPERAND (exp, 2));
7409 bc_emit_bytecode_labeldef (lab1);
7410 return;
7411
7412 case TRUTH_ANDIF_EXPR:
7413
7414 opcode = xjumpifnot;
7415 goto andorif;
7416
7417 case TRUTH_ORIF_EXPR:
7418
7419 opcode = xjumpif;
7420 goto andorif;
7421
7422 case PLUS_EXPR:
7423
7424 binoptab = optab_plus_expr;
7425 goto binop;
7426
7427 case MINUS_EXPR:
7428
7429 binoptab = optab_minus_expr;
7430 goto binop;
7431
7432 case MULT_EXPR:
7433
7434 binoptab = optab_mult_expr;
7435 goto binop;
7436
7437 case TRUNC_DIV_EXPR:
7438 case FLOOR_DIV_EXPR:
7439 case CEIL_DIV_EXPR:
7440 case ROUND_DIV_EXPR:
7441 case EXACT_DIV_EXPR:
7442
7443 binoptab = optab_trunc_div_expr;
7444 goto binop;
7445
7446 case TRUNC_MOD_EXPR:
7447 case FLOOR_MOD_EXPR:
7448 case CEIL_MOD_EXPR:
7449 case ROUND_MOD_EXPR:
7450
7451 binoptab = optab_trunc_mod_expr;
7452 goto binop;
7453
7454 case FIX_ROUND_EXPR:
7455 case FIX_FLOOR_EXPR:
7456 case FIX_CEIL_EXPR:
7457 abort (); /* Not used for C. */
7458
7459 case FIX_TRUNC_EXPR:
7460 case FLOAT_EXPR:
7461 case MAX_EXPR:
7462 case MIN_EXPR:
7463 case FFS_EXPR:
7464 case LROTATE_EXPR:
7465 case RROTATE_EXPR:
7466 abort (); /* FIXME */
7467
7468 case RDIV_EXPR:
7469
7470 binoptab = optab_rdiv_expr;
7471 goto binop;
7472
7473 case BIT_AND_EXPR:
7474
7475 binoptab = optab_bit_and_expr;
7476 goto binop;
7477
7478 case BIT_IOR_EXPR:
7479
7480 binoptab = optab_bit_ior_expr;
7481 goto binop;
7482
7483 case BIT_XOR_EXPR:
7484
7485 binoptab = optab_bit_xor_expr;
7486 goto binop;
7487
7488 case LSHIFT_EXPR:
7489
7490 binoptab = optab_lshift_expr;
7491 goto binop;
7492
7493 case RSHIFT_EXPR:
7494
7495 binoptab = optab_rshift_expr;
7496 goto binop;
7497
7498 case TRUTH_AND_EXPR:
7499
7500 binoptab = optab_truth_and_expr;
7501 goto binop;
7502
7503 case TRUTH_OR_EXPR:
7504
7505 binoptab = optab_truth_or_expr;
7506 goto binop;
7507
7508 case LT_EXPR:
7509
7510 binoptab = optab_lt_expr;
7511 goto binop;
7512
7513 case LE_EXPR:
7514
7515 binoptab = optab_le_expr;
7516 goto binop;
7517
7518 case GE_EXPR:
7519
7520 binoptab = optab_ge_expr;
7521 goto binop;
7522
7523 case GT_EXPR:
7524
7525 binoptab = optab_gt_expr;
7526 goto binop;
7527
7528 case EQ_EXPR:
7529
7530 binoptab = optab_eq_expr;
7531 goto binop;
7532
7533 case NE_EXPR:
7534
7535 binoptab = optab_ne_expr;
7536 goto binop;
7537
7538 case NEGATE_EXPR:
7539
7540 unoptab = optab_negate_expr;
7541 goto unop;
7542
7543 case BIT_NOT_EXPR:
7544
7545 unoptab = optab_bit_not_expr;
7546 goto unop;
7547
7548 case TRUTH_NOT_EXPR:
7549
7550 unoptab = optab_truth_not_expr;
7551 goto unop;
7552
7553 case PREDECREMENT_EXPR:
7554
7555 incroptab = optab_predecrement_expr;
7556 goto increment;
7557
7558 case PREINCREMENT_EXPR:
7559
7560 incroptab = optab_preincrement_expr;
7561 goto increment;
7562
7563 case POSTDECREMENT_EXPR:
7564
7565 incroptab = optab_postdecrement_expr;
7566 goto increment;
7567
7568 case POSTINCREMENT_EXPR:
7569
7570 incroptab = optab_postincrement_expr;
7571 goto increment;
7572
7573 case CONSTRUCTOR:
7574
7575 bc_expand_constructor (exp);
7576 return;
7577
7578 case ERROR_MARK:
7579 case RTL_EXPR:
7580
7581 return;
7582
7583 case BIND_EXPR:
7584 {
7585 tree vars = TREE_OPERAND (exp, 0);
7586 int vars_need_expansion = 0;
7587
7588 /* Need to open a binding contour here because
7589 if there are any cleanups they most be contained here. */
7590 expand_start_bindings (0);
7591
7592 /* Mark the corresponding BLOCK for output. */
7593 if (TREE_OPERAND (exp, 2) != 0)
7594 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7595
7596 /* If VARS have not yet been expanded, expand them now. */
7597 while (vars)
7598 {
7599 if (DECL_RTL (vars) == 0)
7600 {
7601 vars_need_expansion = 1;
7602 expand_decl (vars);
7603 }
7604 expand_decl_init (vars);
7605 vars = TREE_CHAIN (vars);
7606 }
7607
7608 bc_expand_expr (TREE_OPERAND (exp, 1));
7609
7610 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7611
7612 return;
7613 }
7614 }
7615
7616 abort ();
7617
7618 binop:
7619
7620 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7621 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7622 return;
7623
7624
7625 unop:
7626
7627 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7628 return;
7629
7630
7631 andorif:
7632
7633 bc_expand_expr (TREE_OPERAND (exp, 0));
7634 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7635 lab = bc_get_bytecode_label ();
7636
7637 bc_emit_instruction (duplicate);
7638 bc_emit_bytecode (opcode);
7639 bc_emit_bytecode_labelref (lab);
7640
7641 #ifdef DEBUG_PRINT_CODE
7642 fputc ('\n', stderr);
7643 #endif
7644
7645 bc_emit_instruction (drop);
7646
7647 bc_expand_expr (TREE_OPERAND (exp, 1));
7648 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7649 bc_emit_bytecode_labeldef (lab);
7650 return;
7651
7652
7653 increment:
7654
7655 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7656
7657 /* Push the quantum. */
7658 bc_expand_expr (TREE_OPERAND (exp, 1));
7659
7660 /* Convert it to the lvalue's type. */
7661 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7662
7663 /* Push the address of the lvalue */
7664 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7665
7666 /* Perform actual increment */
7667 bc_expand_increment (incroptab, type);
7668 return;
7669 }
7670 \f
7671 /* Return the alignment in bits of EXP, a pointer valued expression.
7672 But don't return more than MAX_ALIGN no matter what.
7673 The alignment returned is, by default, the alignment of the thing that
7674 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7675
7676 Otherwise, look at the expression to see if we can do better, i.e., if the
7677 expression is actually pointing at an object whose alignment is tighter. */
7678
7679 static int
7680 get_pointer_alignment (exp, max_align)
7681 tree exp;
7682 unsigned max_align;
7683 {
7684 unsigned align, inner;
7685
7686 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7687 return 0;
7688
7689 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7690 align = MIN (align, max_align);
7691
7692 while (1)
7693 {
7694 switch (TREE_CODE (exp))
7695 {
7696 case NOP_EXPR:
7697 case CONVERT_EXPR:
7698 case NON_LVALUE_EXPR:
7699 exp = TREE_OPERAND (exp, 0);
7700 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7701 return align;
7702 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7703 align = MIN (inner, max_align);
7704 break;
7705
7706 case PLUS_EXPR:
7707 /* If sum of pointer + int, restrict our maximum alignment to that
7708 imposed by the integer. If not, we can't do any better than
7709 ALIGN. */
7710 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7711 return align;
7712
7713 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7714 & (max_align - 1))
7715 != 0)
7716 max_align >>= 1;
7717
7718 exp = TREE_OPERAND (exp, 0);
7719 break;
7720
7721 case ADDR_EXPR:
7722 /* See what we are pointing at and look at its alignment. */
7723 exp = TREE_OPERAND (exp, 0);
7724 if (TREE_CODE (exp) == FUNCTION_DECL)
7725 align = FUNCTION_BOUNDARY;
7726 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7727 align = DECL_ALIGN (exp);
7728 #ifdef CONSTANT_ALIGNMENT
7729 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7730 align = CONSTANT_ALIGNMENT (exp, align);
7731 #endif
7732 return MIN (align, max_align);
7733
7734 default:
7735 return align;
7736 }
7737 }
7738 }
7739 \f
7740 /* Return the tree node and offset if a given argument corresponds to
7741 a string constant. */
7742
7743 static tree
7744 string_constant (arg, ptr_offset)
7745 tree arg;
7746 tree *ptr_offset;
7747 {
7748 STRIP_NOPS (arg);
7749
7750 if (TREE_CODE (arg) == ADDR_EXPR
7751 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7752 {
7753 *ptr_offset = integer_zero_node;
7754 return TREE_OPERAND (arg, 0);
7755 }
7756 else if (TREE_CODE (arg) == PLUS_EXPR)
7757 {
7758 tree arg0 = TREE_OPERAND (arg, 0);
7759 tree arg1 = TREE_OPERAND (arg, 1);
7760
7761 STRIP_NOPS (arg0);
7762 STRIP_NOPS (arg1);
7763
7764 if (TREE_CODE (arg0) == ADDR_EXPR
7765 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7766 {
7767 *ptr_offset = arg1;
7768 return TREE_OPERAND (arg0, 0);
7769 }
7770 else if (TREE_CODE (arg1) == ADDR_EXPR
7771 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7772 {
7773 *ptr_offset = arg0;
7774 return TREE_OPERAND (arg1, 0);
7775 }
7776 }
7777
7778 return 0;
7779 }
7780
7781 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7782 way, because it could contain a zero byte in the middle.
7783 TREE_STRING_LENGTH is the size of the character array, not the string.
7784
7785 Unfortunately, string_constant can't access the values of const char
7786 arrays with initializers, so neither can we do so here. */
7787
7788 static tree
7789 c_strlen (src)
7790 tree src;
7791 {
7792 tree offset_node;
7793 int offset, max;
7794 char *ptr;
7795
7796 src = string_constant (src, &offset_node);
7797 if (src == 0)
7798 return 0;
7799 max = TREE_STRING_LENGTH (src);
7800 ptr = TREE_STRING_POINTER (src);
7801 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7802 {
7803 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7804 compute the offset to the following null if we don't know where to
7805 start searching for it. */
7806 int i;
7807 for (i = 0; i < max; i++)
7808 if (ptr[i] == 0)
7809 return 0;
7810 /* We don't know the starting offset, but we do know that the string
7811 has no internal zero bytes. We can assume that the offset falls
7812 within the bounds of the string; otherwise, the programmer deserves
7813 what he gets. Subtract the offset from the length of the string,
7814 and return that. */
7815 /* This would perhaps not be valid if we were dealing with named
7816 arrays in addition to literal string constants. */
7817 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7818 }
7819
7820 /* We have a known offset into the string. Start searching there for
7821 a null character. */
7822 if (offset_node == 0)
7823 offset = 0;
7824 else
7825 {
7826 /* Did we get a long long offset? If so, punt. */
7827 if (TREE_INT_CST_HIGH (offset_node) != 0)
7828 return 0;
7829 offset = TREE_INT_CST_LOW (offset_node);
7830 }
7831 /* If the offset is known to be out of bounds, warn, and call strlen at
7832 runtime. */
7833 if (offset < 0 || offset > max)
7834 {
7835 warning ("offset outside bounds of constant string");
7836 return 0;
7837 }
7838 /* Use strlen to search for the first zero byte. Since any strings
7839 constructed with build_string will have nulls appended, we win even
7840 if we get handed something like (char[4])"abcd".
7841
7842 Since OFFSET is our starting index into the string, no further
7843 calculation is needed. */
7844 return size_int (strlen (ptr + offset));
7845 }
7846
7847 rtx
7848 expand_builtin_return_addr (fndecl_code, count, tem)
7849 enum built_in_function fndecl_code;
7850 int count;
7851 rtx tem;
7852 {
7853 int i;
7854
7855 /* Some machines need special handling before we can access
7856 arbitrary frames. For example, on the sparc, we must first flush
7857 all register windows to the stack. */
7858 #ifdef SETUP_FRAME_ADDRESSES
7859 SETUP_FRAME_ADDRESSES ();
7860 #endif
7861
7862 /* On the sparc, the return address is not in the frame, it is in a
7863 register. There is no way to access it off of the current frame
7864 pointer, but it can be accessed off the previous frame pointer by
7865 reading the value from the register window save area. */
7866 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7867 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7868 count--;
7869 #endif
7870
7871 /* Scan back COUNT frames to the specified frame. */
7872 for (i = 0; i < count; i++)
7873 {
7874 /* Assume the dynamic chain pointer is in the word that the
7875 frame address points to, unless otherwise specified. */
7876 #ifdef DYNAMIC_CHAIN_ADDRESS
7877 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7878 #endif
7879 tem = memory_address (Pmode, tem);
7880 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7881 }
7882
7883 /* For __builtin_frame_address, return what we've got. */
7884 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7885 return tem;
7886
7887 /* For __builtin_return_address, Get the return address from that
7888 frame. */
7889 #ifdef RETURN_ADDR_RTX
7890 tem = RETURN_ADDR_RTX (count, tem);
7891 #else
7892 tem = memory_address (Pmode,
7893 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7894 tem = gen_rtx (MEM, Pmode, tem);
7895 #endif
7896 return tem;
7897 }
7898
7899 /* __builtin_setjmp is passed a pointer to an array of five words (not
7900 all will be used on all machines). It operates similarly to the C
7901 library function of the same name, but is more efficient. Much of
7902 the code below (and for longjmp) is copied from the handling of
7903 non-local gotos.
7904
7905 NOTE: This is intended for use by GNAT and the exception handling
7906 scheme in the compiler and will only work in the method used by
7907 them. */
7908
7909 rtx
7910 expand_builtin_setjmp (buf_addr, target)
7911 rtx buf_addr;
7912 rtx target;
7913 {
7914 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
7915 enum machine_mode sa_mode = Pmode, value_mode;
7916 rtx stack_save;
7917 int old_inhibit_defer_pop = inhibit_defer_pop;
7918 int return_pops
7919 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
7920 build_function_type (void_type_node, NULL_TREE),
7921 0);
7922 rtx next_arg_reg;
7923 CUMULATIVE_ARGS args_so_far;
7924 rtx op0;
7925 int i;
7926
7927 value_mode = TYPE_MODE (integer_type_node);
7928
7929 #ifdef POINTERS_EXTEND_UNSIGNED
7930 buf_addr = convert_memory_address (Pmode, buf_addr);
7931 #endif
7932
7933 buf_addr = force_reg (Pmode, buf_addr);
7934
7935 if (target == 0 || GET_CODE (target) != REG
7936 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7937 target = gen_reg_rtx (value_mode);
7938
7939 emit_queue ();
7940
7941 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
7942 current_function_calls_setjmp = 1;
7943
7944 /* We store the frame pointer and the address of lab1 in the buffer
7945 and use the rest of it for the stack save area, which is
7946 machine-dependent. */
7947 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
7948 virtual_stack_vars_rtx);
7949 emit_move_insn
7950 (validize_mem (gen_rtx (MEM, Pmode,
7951 plus_constant (buf_addr,
7952 GET_MODE_SIZE (Pmode)))),
7953 gen_rtx (LABEL_REF, Pmode, lab1));
7954
7955 #ifdef HAVE_save_stack_nonlocal
7956 if (HAVE_save_stack_nonlocal)
7957 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
7958 #endif
7959
7960 stack_save = gen_rtx (MEM, sa_mode,
7961 plus_constant (buf_addr,
7962 2 * GET_MODE_SIZE (Pmode)));
7963 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
7964
7965 #ifdef HAVE_setjmp
7966 if (HAVE_setjmp)
7967 emit_insn (gen_setjmp ());
7968 #endif
7969
7970 /* Set TARGET to zero and branch around the other case. */
7971 emit_move_insn (target, const0_rtx);
7972 emit_jump_insn (gen_jump (lab2));
7973 emit_barrier ();
7974 emit_label (lab1);
7975
7976 /* Note that setjmp clobbers FP when we get here, so we have to make
7977 sure it's marked as used by this function. */
7978 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
7979
7980 /* Mark the static chain as clobbered here so life information
7981 doesn't get messed up for it. */
7982 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
7983
7984 /* Now put in the code to restore the frame pointer, and argument
7985 pointer, if needed. The code below is from expand_end_bindings
7986 in stmt.c; see detailed documentation there. */
7987 #ifdef HAVE_nonlocal_goto
7988 if (! HAVE_nonlocal_goto)
7989 #endif
7990 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
7991
7992 /* Do we need to do something like:
7993
7994 current_function_has_nonlocal_label = 1;
7995
7996 here? It seems like we might have to, or some subset of that
7997 functionality, but I am unsure. (mrs) */
7998
7999 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8000 if (fixed_regs[ARG_POINTER_REGNUM])
8001 {
8002 #ifdef ELIMINABLE_REGS
8003 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8004
8005 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8006 if (elim_regs[i].from == ARG_POINTER_REGNUM
8007 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8008 break;
8009
8010 if (i == sizeof elim_regs / sizeof elim_regs [0])
8011 #endif
8012 {
8013 /* Now restore our arg pointer from the address at which it
8014 was saved in our stack frame.
8015 If there hasn't be space allocated for it yet, make
8016 some now. */
8017 if (arg_pointer_save_area == 0)
8018 arg_pointer_save_area
8019 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8020 emit_move_insn (virtual_incoming_args_rtx,
8021 copy_to_reg (arg_pointer_save_area));
8022 }
8023 }
8024 #endif
8025
8026 #ifdef HAVE_nonlocal_goto_receiver
8027 if (HAVE_nonlocal_goto_receiver)
8028 emit_insn (gen_nonlocal_goto_receiver ());
8029 #endif
8030 /* The static chain pointer contains the address of dummy function.
8031 We need to call it here to handle some PIC cases of restoring a
8032 global pointer. Then return 1. */
8033 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8034
8035 /* We can't actually call emit_library_call here, so do everything
8036 it does, which isn't much for a libfunc with no args. */
8037 op0 = memory_address (FUNCTION_MODE, op0);
8038
8039 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8040 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8041 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8042
8043 #ifndef ACCUMULATE_OUTGOING_ARGS
8044 #ifdef HAVE_call_pop
8045 if (HAVE_call_pop)
8046 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8047 const0_rtx, next_arg_reg,
8048 GEN_INT (return_pops)));
8049 else
8050 #endif
8051 #endif
8052
8053 #ifdef HAVE_call
8054 if (HAVE_call)
8055 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8056 const0_rtx, next_arg_reg, const0_rtx));
8057 else
8058 #endif
8059 abort ();
8060
8061 emit_move_insn (target, const1_rtx);
8062 emit_label (lab2);
8063 return target;
8064 }
8065
8066 \f
8067 /* Expand an expression EXP that calls a built-in function,
8068 with result going to TARGET if that's convenient
8069 (and in mode MODE if that's convenient).
8070 SUBTARGET may be used as the target for computing one of EXP's operands.
8071 IGNORE is nonzero if the value is to be ignored. */
8072
8073 #define CALLED_AS_BUILT_IN(NODE) \
8074 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8075
8076 static rtx
8077 expand_builtin (exp, target, subtarget, mode, ignore)
8078 tree exp;
8079 rtx target;
8080 rtx subtarget;
8081 enum machine_mode mode;
8082 int ignore;
8083 {
8084 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8085 tree arglist = TREE_OPERAND (exp, 1);
8086 rtx op0;
8087 rtx lab1, insns;
8088 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8089 optab builtin_optab;
8090
8091 switch (DECL_FUNCTION_CODE (fndecl))
8092 {
8093 case BUILT_IN_ABS:
8094 case BUILT_IN_LABS:
8095 case BUILT_IN_FABS:
8096 /* build_function_call changes these into ABS_EXPR. */
8097 abort ();
8098
8099 case BUILT_IN_SIN:
8100 case BUILT_IN_COS:
8101 /* Treat these like sqrt, but only if the user asks for them. */
8102 if (! flag_fast_math)
8103 break;
8104 case BUILT_IN_FSQRT:
8105 /* If not optimizing, call the library function. */
8106 if (! optimize)
8107 break;
8108
8109 if (arglist == 0
8110 /* Arg could be wrong type if user redeclared this fcn wrong. */
8111 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8112 break;
8113
8114 /* Stabilize and compute the argument. */
8115 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8116 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8117 {
8118 exp = copy_node (exp);
8119 arglist = copy_node (arglist);
8120 TREE_OPERAND (exp, 1) = arglist;
8121 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8122 }
8123 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8124
8125 /* Make a suitable register to place result in. */
8126 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8127
8128 emit_queue ();
8129 start_sequence ();
8130
8131 switch (DECL_FUNCTION_CODE (fndecl))
8132 {
8133 case BUILT_IN_SIN:
8134 builtin_optab = sin_optab; break;
8135 case BUILT_IN_COS:
8136 builtin_optab = cos_optab; break;
8137 case BUILT_IN_FSQRT:
8138 builtin_optab = sqrt_optab; break;
8139 default:
8140 abort ();
8141 }
8142
8143 /* Compute into TARGET.
8144 Set TARGET to wherever the result comes back. */
8145 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8146 builtin_optab, op0, target, 0);
8147
8148 /* If we were unable to expand via the builtin, stop the
8149 sequence (without outputting the insns) and break, causing
8150 a call the the library function. */
8151 if (target == 0)
8152 {
8153 end_sequence ();
8154 break;
8155 }
8156
8157 /* Check the results by default. But if flag_fast_math is turned on,
8158 then assume sqrt will always be called with valid arguments. */
8159
8160 if (! flag_fast_math)
8161 {
8162 /* Don't define the builtin FP instructions
8163 if your machine is not IEEE. */
8164 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8165 abort ();
8166
8167 lab1 = gen_label_rtx ();
8168
8169 /* Test the result; if it is NaN, set errno=EDOM because
8170 the argument was not in the domain. */
8171 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8172 emit_jump_insn (gen_beq (lab1));
8173
8174 #ifdef TARGET_EDOM
8175 {
8176 #ifdef GEN_ERRNO_RTX
8177 rtx errno_rtx = GEN_ERRNO_RTX;
8178 #else
8179 rtx errno_rtx
8180 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8181 #endif
8182
8183 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8184 }
8185 #else
8186 /* We can't set errno=EDOM directly; let the library call do it.
8187 Pop the arguments right away in case the call gets deleted. */
8188 NO_DEFER_POP;
8189 expand_call (exp, target, 0);
8190 OK_DEFER_POP;
8191 #endif
8192
8193 emit_label (lab1);
8194 }
8195
8196 /* Output the entire sequence. */
8197 insns = get_insns ();
8198 end_sequence ();
8199 emit_insns (insns);
8200
8201 return target;
8202
8203 /* __builtin_apply_args returns block of memory allocated on
8204 the stack into which is stored the arg pointer, structure
8205 value address, static chain, and all the registers that might
8206 possibly be used in performing a function call. The code is
8207 moved to the start of the function so the incoming values are
8208 saved. */
8209 case BUILT_IN_APPLY_ARGS:
8210 /* Don't do __builtin_apply_args more than once in a function.
8211 Save the result of the first call and reuse it. */
8212 if (apply_args_value != 0)
8213 return apply_args_value;
8214 {
8215 /* When this function is called, it means that registers must be
8216 saved on entry to this function. So we migrate the
8217 call to the first insn of this function. */
8218 rtx temp;
8219 rtx seq;
8220
8221 start_sequence ();
8222 temp = expand_builtin_apply_args ();
8223 seq = get_insns ();
8224 end_sequence ();
8225
8226 apply_args_value = temp;
8227
8228 /* Put the sequence after the NOTE that starts the function.
8229 If this is inside a SEQUENCE, make the outer-level insn
8230 chain current, so the code is placed at the start of the
8231 function. */
8232 push_topmost_sequence ();
8233 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8234 pop_topmost_sequence ();
8235 return temp;
8236 }
8237
8238 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8239 FUNCTION with a copy of the parameters described by
8240 ARGUMENTS, and ARGSIZE. It returns a block of memory
8241 allocated on the stack into which is stored all the registers
8242 that might possibly be used for returning the result of a
8243 function. ARGUMENTS is the value returned by
8244 __builtin_apply_args. ARGSIZE is the number of bytes of
8245 arguments that must be copied. ??? How should this value be
8246 computed? We'll also need a safe worst case value for varargs
8247 functions. */
8248 case BUILT_IN_APPLY:
8249 if (arglist == 0
8250 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8251 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8252 || TREE_CHAIN (arglist) == 0
8253 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8254 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8255 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8256 return const0_rtx;
8257 else
8258 {
8259 int i;
8260 tree t;
8261 rtx ops[3];
8262
8263 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8264 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8265
8266 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8267 }
8268
8269 /* __builtin_return (RESULT) causes the function to return the
8270 value described by RESULT. RESULT is address of the block of
8271 memory returned by __builtin_apply. */
8272 case BUILT_IN_RETURN:
8273 if (arglist
8274 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8275 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8276 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8277 NULL_RTX, VOIDmode, 0));
8278 return const0_rtx;
8279
8280 case BUILT_IN_SAVEREGS:
8281 /* Don't do __builtin_saveregs more than once in a function.
8282 Save the result of the first call and reuse it. */
8283 if (saveregs_value != 0)
8284 return saveregs_value;
8285 {
8286 /* When this function is called, it means that registers must be
8287 saved on entry to this function. So we migrate the
8288 call to the first insn of this function. */
8289 rtx temp;
8290 rtx seq;
8291
8292 /* Now really call the function. `expand_call' does not call
8293 expand_builtin, so there is no danger of infinite recursion here. */
8294 start_sequence ();
8295
8296 #ifdef EXPAND_BUILTIN_SAVEREGS
8297 /* Do whatever the machine needs done in this case. */
8298 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8299 #else
8300 /* The register where the function returns its value
8301 is likely to have something else in it, such as an argument.
8302 So preserve that register around the call. */
8303
8304 if (value_mode != VOIDmode)
8305 {
8306 rtx valreg = hard_libcall_value (value_mode);
8307 rtx saved_valreg = gen_reg_rtx (value_mode);
8308
8309 emit_move_insn (saved_valreg, valreg);
8310 temp = expand_call (exp, target, ignore);
8311 emit_move_insn (valreg, saved_valreg);
8312 }
8313 else
8314 /* Generate the call, putting the value in a pseudo. */
8315 temp = expand_call (exp, target, ignore);
8316 #endif
8317
8318 seq = get_insns ();
8319 end_sequence ();
8320
8321 saveregs_value = temp;
8322
8323 /* Put the sequence after the NOTE that starts the function.
8324 If this is inside a SEQUENCE, make the outer-level insn
8325 chain current, so the code is placed at the start of the
8326 function. */
8327 push_topmost_sequence ();
8328 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8329 pop_topmost_sequence ();
8330 return temp;
8331 }
8332
8333 /* __builtin_args_info (N) returns word N of the arg space info
8334 for the current function. The number and meanings of words
8335 is controlled by the definition of CUMULATIVE_ARGS. */
8336 case BUILT_IN_ARGS_INFO:
8337 {
8338 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8339 int i;
8340 int *word_ptr = (int *) &current_function_args_info;
8341 tree type, elts, result;
8342
8343 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8344 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8345 __FILE__, __LINE__);
8346
8347 if (arglist != 0)
8348 {
8349 tree arg = TREE_VALUE (arglist);
8350 if (TREE_CODE (arg) != INTEGER_CST)
8351 error ("argument of `__builtin_args_info' must be constant");
8352 else
8353 {
8354 int wordnum = TREE_INT_CST_LOW (arg);
8355
8356 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8357 error ("argument of `__builtin_args_info' out of range");
8358 else
8359 return GEN_INT (word_ptr[wordnum]);
8360 }
8361 }
8362 else
8363 error ("missing argument in `__builtin_args_info'");
8364
8365 return const0_rtx;
8366
8367 #if 0
8368 for (i = 0; i < nwords; i++)
8369 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8370
8371 type = build_array_type (integer_type_node,
8372 build_index_type (build_int_2 (nwords, 0)));
8373 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8374 TREE_CONSTANT (result) = 1;
8375 TREE_STATIC (result) = 1;
8376 result = build (INDIRECT_REF, build_pointer_type (type), result);
8377 TREE_CONSTANT (result) = 1;
8378 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8379 #endif
8380 }
8381
8382 /* Return the address of the first anonymous stack arg. */
8383 case BUILT_IN_NEXT_ARG:
8384 {
8385 tree fntype = TREE_TYPE (current_function_decl);
8386
8387 if ((TYPE_ARG_TYPES (fntype) == 0
8388 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8389 == void_type_node))
8390 && ! current_function_varargs)
8391 {
8392 error ("`va_start' used in function with fixed args");
8393 return const0_rtx;
8394 }
8395
8396 if (arglist)
8397 {
8398 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8399 tree arg = TREE_VALUE (arglist);
8400
8401 /* Strip off all nops for the sake of the comparison. This
8402 is not quite the same as STRIP_NOPS. It does more.
8403 We must also strip off INDIRECT_EXPR for C++ reference
8404 parameters. */
8405 while (TREE_CODE (arg) == NOP_EXPR
8406 || TREE_CODE (arg) == CONVERT_EXPR
8407 || TREE_CODE (arg) == NON_LVALUE_EXPR
8408 || TREE_CODE (arg) == INDIRECT_REF)
8409 arg = TREE_OPERAND (arg, 0);
8410 if (arg != last_parm)
8411 warning ("second parameter of `va_start' not last named argument");
8412 }
8413 else if (! current_function_varargs)
8414 /* Evidently an out of date version of <stdarg.h>; can't validate
8415 va_start's second argument, but can still work as intended. */
8416 warning ("`__builtin_next_arg' called without an argument");
8417 }
8418
8419 return expand_binop (Pmode, add_optab,
8420 current_function_internal_arg_pointer,
8421 current_function_arg_offset_rtx,
8422 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8423
8424 case BUILT_IN_CLASSIFY_TYPE:
8425 if (arglist != 0)
8426 {
8427 tree type = TREE_TYPE (TREE_VALUE (arglist));
8428 enum tree_code code = TREE_CODE (type);
8429 if (code == VOID_TYPE)
8430 return GEN_INT (void_type_class);
8431 if (code == INTEGER_TYPE)
8432 return GEN_INT (integer_type_class);
8433 if (code == CHAR_TYPE)
8434 return GEN_INT (char_type_class);
8435 if (code == ENUMERAL_TYPE)
8436 return GEN_INT (enumeral_type_class);
8437 if (code == BOOLEAN_TYPE)
8438 return GEN_INT (boolean_type_class);
8439 if (code == POINTER_TYPE)
8440 return GEN_INT (pointer_type_class);
8441 if (code == REFERENCE_TYPE)
8442 return GEN_INT (reference_type_class);
8443 if (code == OFFSET_TYPE)
8444 return GEN_INT (offset_type_class);
8445 if (code == REAL_TYPE)
8446 return GEN_INT (real_type_class);
8447 if (code == COMPLEX_TYPE)
8448 return GEN_INT (complex_type_class);
8449 if (code == FUNCTION_TYPE)
8450 return GEN_INT (function_type_class);
8451 if (code == METHOD_TYPE)
8452 return GEN_INT (method_type_class);
8453 if (code == RECORD_TYPE)
8454 return GEN_INT (record_type_class);
8455 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8456 return GEN_INT (union_type_class);
8457 if (code == ARRAY_TYPE)
8458 {
8459 if (TYPE_STRING_FLAG (type))
8460 return GEN_INT (string_type_class);
8461 else
8462 return GEN_INT (array_type_class);
8463 }
8464 if (code == SET_TYPE)
8465 return GEN_INT (set_type_class);
8466 if (code == FILE_TYPE)
8467 return GEN_INT (file_type_class);
8468 if (code == LANG_TYPE)
8469 return GEN_INT (lang_type_class);
8470 }
8471 return GEN_INT (no_type_class);
8472
8473 case BUILT_IN_CONSTANT_P:
8474 if (arglist == 0)
8475 return const0_rtx;
8476 else
8477 {
8478 tree arg = TREE_VALUE (arglist);
8479
8480 STRIP_NOPS (arg);
8481 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8482 || (TREE_CODE (arg) == ADDR_EXPR
8483 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8484 ? const1_rtx : const0_rtx);
8485 }
8486
8487 case BUILT_IN_FRAME_ADDRESS:
8488 /* The argument must be a nonnegative integer constant.
8489 It counts the number of frames to scan up the stack.
8490 The value is the address of that frame. */
8491 case BUILT_IN_RETURN_ADDRESS:
8492 /* The argument must be a nonnegative integer constant.
8493 It counts the number of frames to scan up the stack.
8494 The value is the return address saved in that frame. */
8495 if (arglist == 0)
8496 /* Warning about missing arg was already issued. */
8497 return const0_rtx;
8498 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8499 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8500 {
8501 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8502 error ("invalid arg to `__builtin_frame_address'");
8503 else
8504 error ("invalid arg to `__builtin_return_address'");
8505 return const0_rtx;
8506 }
8507 else
8508 {
8509 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8510 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8511 hard_frame_pointer_rtx);
8512
8513 /* For __builtin_frame_address, return what we've got. */
8514 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8515 return tem;
8516
8517 if (GET_CODE (tem) != REG)
8518 tem = copy_to_reg (tem);
8519 return tem;
8520 }
8521
8522 case BUILT_IN_ALLOCA:
8523 if (arglist == 0
8524 /* Arg could be non-integer if user redeclared this fcn wrong. */
8525 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8526 break;
8527
8528 /* Compute the argument. */
8529 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8530
8531 /* Allocate the desired space. */
8532 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8533
8534 case BUILT_IN_FFS:
8535 /* If not optimizing, call the library function. */
8536 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8537 break;
8538
8539 if (arglist == 0
8540 /* Arg could be non-integer if user redeclared this fcn wrong. */
8541 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8542 break;
8543
8544 /* Compute the argument. */
8545 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8546 /* Compute ffs, into TARGET if possible.
8547 Set TARGET to wherever the result comes back. */
8548 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8549 ffs_optab, op0, target, 1);
8550 if (target == 0)
8551 abort ();
8552 return target;
8553
8554 case BUILT_IN_STRLEN:
8555 /* If not optimizing, call the library function. */
8556 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8557 break;
8558
8559 if (arglist == 0
8560 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8561 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8562 break;
8563 else
8564 {
8565 tree src = TREE_VALUE (arglist);
8566 tree len = c_strlen (src);
8567
8568 int align
8569 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8570
8571 rtx result, src_rtx, char_rtx;
8572 enum machine_mode insn_mode = value_mode, char_mode;
8573 enum insn_code icode;
8574
8575 /* If the length is known, just return it. */
8576 if (len != 0)
8577 return expand_expr (len, target, mode, 0);
8578
8579 /* If SRC is not a pointer type, don't do this operation inline. */
8580 if (align == 0)
8581 break;
8582
8583 /* Call a function if we can't compute strlen in the right mode. */
8584
8585 while (insn_mode != VOIDmode)
8586 {
8587 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8588 if (icode != CODE_FOR_nothing)
8589 break;
8590
8591 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8592 }
8593 if (insn_mode == VOIDmode)
8594 break;
8595
8596 /* Make a place to write the result of the instruction. */
8597 result = target;
8598 if (! (result != 0
8599 && GET_CODE (result) == REG
8600 && GET_MODE (result) == insn_mode
8601 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8602 result = gen_reg_rtx (insn_mode);
8603
8604 /* Make sure the operands are acceptable to the predicates. */
8605
8606 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8607 result = gen_reg_rtx (insn_mode);
8608
8609 src_rtx = memory_address (BLKmode,
8610 expand_expr (src, NULL_RTX, ptr_mode,
8611 EXPAND_NORMAL));
8612 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8613 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8614
8615 char_rtx = const0_rtx;
8616 char_mode = insn_operand_mode[(int)icode][2];
8617 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8618 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8619
8620 emit_insn (GEN_FCN (icode) (result,
8621 gen_rtx (MEM, BLKmode, src_rtx),
8622 char_rtx, GEN_INT (align)));
8623
8624 /* Return the value in the proper mode for this function. */
8625 if (GET_MODE (result) == value_mode)
8626 return result;
8627 else if (target != 0)
8628 {
8629 convert_move (target, result, 0);
8630 return target;
8631 }
8632 else
8633 return convert_to_mode (value_mode, result, 0);
8634 }
8635
8636 case BUILT_IN_STRCPY:
8637 /* If not optimizing, call the library function. */
8638 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8639 break;
8640
8641 if (arglist == 0
8642 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8643 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8644 || TREE_CHAIN (arglist) == 0
8645 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8646 break;
8647 else
8648 {
8649 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8650
8651 if (len == 0)
8652 break;
8653
8654 len = size_binop (PLUS_EXPR, len, integer_one_node);
8655
8656 chainon (arglist, build_tree_list (NULL_TREE, len));
8657 }
8658
8659 /* Drops in. */
8660 case BUILT_IN_MEMCPY:
8661 /* If not optimizing, call the library function. */
8662 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8663 break;
8664
8665 if (arglist == 0
8666 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8667 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8668 || TREE_CHAIN (arglist) == 0
8669 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8670 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8671 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8672 break;
8673 else
8674 {
8675 tree dest = TREE_VALUE (arglist);
8676 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8677 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8678 tree type;
8679
8680 int src_align
8681 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8682 int dest_align
8683 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8684 rtx dest_rtx, dest_mem, src_mem;
8685
8686 /* If either SRC or DEST is not a pointer type, don't do
8687 this operation in-line. */
8688 if (src_align == 0 || dest_align == 0)
8689 {
8690 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8691 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8692 break;
8693 }
8694
8695 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8696 dest_mem = gen_rtx (MEM, BLKmode,
8697 memory_address (BLKmode, dest_rtx));
8698 /* There could be a void* cast on top of the object. */
8699 while (TREE_CODE (dest) == NOP_EXPR)
8700 dest = TREE_OPERAND (dest, 0);
8701 type = TREE_TYPE (TREE_TYPE (dest));
8702 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8703 src_mem = gen_rtx (MEM, BLKmode,
8704 memory_address (BLKmode,
8705 expand_expr (src, NULL_RTX,
8706 ptr_mode,
8707 EXPAND_SUM)));
8708 /* There could be a void* cast on top of the object. */
8709 while (TREE_CODE (src) == NOP_EXPR)
8710 src = TREE_OPERAND (src, 0);
8711 type = TREE_TYPE (TREE_TYPE (src));
8712 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8713
8714 /* Copy word part most expediently. */
8715 emit_block_move (dest_mem, src_mem,
8716 expand_expr (len, NULL_RTX, VOIDmode, 0),
8717 MIN (src_align, dest_align));
8718 return force_operand (dest_rtx, NULL_RTX);
8719 }
8720
8721 case BUILT_IN_MEMSET:
8722 /* If not optimizing, call the library function. */
8723 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8724 break;
8725
8726 if (arglist == 0
8727 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8728 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8729 || TREE_CHAIN (arglist) == 0
8730 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8731 != INTEGER_TYPE)
8732 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8733 || (INTEGER_CST
8734 != (TREE_CODE (TREE_TYPE
8735 (TREE_VALUE
8736 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8737 break;
8738 else
8739 {
8740 tree dest = TREE_VALUE (arglist);
8741 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8742 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8743 tree type;
8744
8745 int dest_align
8746 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8747 rtx dest_rtx, dest_mem;
8748
8749 /* If DEST is not a pointer type, don't do this
8750 operation in-line. */
8751 if (dest_align == 0)
8752 break;
8753
8754 /* If VAL is not 0, don't do this operation in-line. */
8755 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8756 break;
8757
8758 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8759 dest_mem = gen_rtx (MEM, BLKmode,
8760 memory_address (BLKmode, dest_rtx));
8761 /* There could be a void* cast on top of the object. */
8762 while (TREE_CODE (dest) == NOP_EXPR)
8763 dest = TREE_OPERAND (dest, 0);
8764 type = TREE_TYPE (TREE_TYPE (dest));
8765 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8766
8767 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8768 dest_align);
8769
8770 return force_operand (dest_rtx, NULL_RTX);
8771 }
8772
8773 /* These comparison functions need an instruction that returns an actual
8774 index. An ordinary compare that just sets the condition codes
8775 is not enough. */
8776 #ifdef HAVE_cmpstrsi
8777 case BUILT_IN_STRCMP:
8778 /* If not optimizing, call the library function. */
8779 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8780 break;
8781
8782 if (arglist == 0
8783 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8784 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8785 || TREE_CHAIN (arglist) == 0
8786 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8787 break;
8788 else if (!HAVE_cmpstrsi)
8789 break;
8790 {
8791 tree arg1 = TREE_VALUE (arglist);
8792 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8793 tree offset;
8794 tree len, len2;
8795
8796 len = c_strlen (arg1);
8797 if (len)
8798 len = size_binop (PLUS_EXPR, integer_one_node, len);
8799 len2 = c_strlen (arg2);
8800 if (len2)
8801 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8802
8803 /* If we don't have a constant length for the first, use the length
8804 of the second, if we know it. We don't require a constant for
8805 this case; some cost analysis could be done if both are available
8806 but neither is constant. For now, assume they're equally cheap.
8807
8808 If both strings have constant lengths, use the smaller. This
8809 could arise if optimization results in strcpy being called with
8810 two fixed strings, or if the code was machine-generated. We should
8811 add some code to the `memcmp' handler below to deal with such
8812 situations, someday. */
8813 if (!len || TREE_CODE (len) != INTEGER_CST)
8814 {
8815 if (len2)
8816 len = len2;
8817 else if (len == 0)
8818 break;
8819 }
8820 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8821 {
8822 if (tree_int_cst_lt (len2, len))
8823 len = len2;
8824 }
8825
8826 chainon (arglist, build_tree_list (NULL_TREE, len));
8827 }
8828
8829 /* Drops in. */
8830 case BUILT_IN_MEMCMP:
8831 /* If not optimizing, call the library function. */
8832 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8833 break;
8834
8835 if (arglist == 0
8836 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8837 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8838 || TREE_CHAIN (arglist) == 0
8839 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8840 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8841 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8842 break;
8843 else if (!HAVE_cmpstrsi)
8844 break;
8845 {
8846 tree arg1 = TREE_VALUE (arglist);
8847 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8848 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8849 rtx result;
8850
8851 int arg1_align
8852 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8853 int arg2_align
8854 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8855 enum machine_mode insn_mode
8856 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8857
8858 /* If we don't have POINTER_TYPE, call the function. */
8859 if (arg1_align == 0 || arg2_align == 0)
8860 {
8861 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8862 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8863 break;
8864 }
8865
8866 /* Make a place to write the result of the instruction. */
8867 result = target;
8868 if (! (result != 0
8869 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8870 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8871 result = gen_reg_rtx (insn_mode);
8872
8873 emit_insn (gen_cmpstrsi (result,
8874 gen_rtx (MEM, BLKmode,
8875 expand_expr (arg1, NULL_RTX,
8876 ptr_mode,
8877 EXPAND_NORMAL)),
8878 gen_rtx (MEM, BLKmode,
8879 expand_expr (arg2, NULL_RTX,
8880 ptr_mode,
8881 EXPAND_NORMAL)),
8882 expand_expr (len, NULL_RTX, VOIDmode, 0),
8883 GEN_INT (MIN (arg1_align, arg2_align))));
8884
8885 /* Return the value in the proper mode for this function. */
8886 mode = TYPE_MODE (TREE_TYPE (exp));
8887 if (GET_MODE (result) == mode)
8888 return result;
8889 else if (target != 0)
8890 {
8891 convert_move (target, result, 0);
8892 return target;
8893 }
8894 else
8895 return convert_to_mode (mode, result, 0);
8896 }
8897 #else
8898 case BUILT_IN_STRCMP:
8899 case BUILT_IN_MEMCMP:
8900 break;
8901 #endif
8902
8903 case BUILT_IN_SETJMP:
8904 if (arglist == 0
8905 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8906 break;
8907
8908 {
8909 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8910 VOIDmode, 0);
8911 return expand_builtin_setjmp (buf_addr, target);
8912 }
8913
8914 /* __builtin_longjmp is passed a pointer to an array of five words
8915 and a value, which is a dummy. It's similar to the C library longjmp
8916 function but works with __builtin_setjmp above. */
8917 case BUILT_IN_LONGJMP:
8918 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8919 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8920 break;
8921
8922 {
8923 tree dummy_id = get_identifier ("__dummy");
8924 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8925 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8926 #ifdef POINTERS_EXTEND_UNSIGNED
8927 rtx buf_addr
8928 = force_reg (Pmode,
8929 convert_memory_address
8930 (Pmode,
8931 expand_expr (TREE_VALUE (arglist),
8932 NULL_RTX, VOIDmode, 0)));
8933 #else
8934 rtx buf_addr
8935 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8936 NULL_RTX,
8937 VOIDmode, 0));
8938 #endif
8939 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8940 rtx lab = gen_rtx (MEM, Pmode,
8941 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8942 enum machine_mode sa_mode
8943 #ifdef HAVE_save_stack_nonlocal
8944 = (HAVE_save_stack_nonlocal
8945 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8946 : Pmode);
8947 #else
8948 = Pmode;
8949 #endif
8950 rtx stack = gen_rtx (MEM, sa_mode,
8951 plus_constant (buf_addr,
8952 2 * GET_MODE_SIZE (Pmode)));
8953
8954 DECL_EXTERNAL (dummy_decl) = 1;
8955 TREE_PUBLIC (dummy_decl) = 1;
8956 make_decl_rtl (dummy_decl, NULL_PTR, 1);
8957
8958 /* Expand the second expression just for side-effects. */
8959 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8960 const0_rtx, VOIDmode, 0);
8961
8962 assemble_external (dummy_decl);
8963
8964 /* Pick up FP, label, and SP from the block and jump. This code is
8965 from expand_goto in stmt.c; see there for detailed comments. */
8966 #if HAVE_nonlocal_goto
8967 if (HAVE_nonlocal_goto)
8968 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8969 XEXP (DECL_RTL (dummy_decl), 0)));
8970 else
8971 #endif
8972 {
8973 lab = copy_to_reg (lab);
8974 emit_move_insn (hard_frame_pointer_rtx, fp);
8975 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8976
8977 /* Put in the static chain register the address of the dummy
8978 function. */
8979 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
8980 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8981 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8982 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8983 emit_indirect_jump (lab);
8984 }
8985
8986 return const0_rtx;
8987 }
8988
8989 default: /* just do library call, if unknown builtin */
8990 error ("built-in function `%s' not currently supported",
8991 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8992 }
8993
8994 /* The switch statement above can drop through to cause the function
8995 to be called normally. */
8996
8997 return expand_call (exp, target, ignore);
8998 }
8999 \f
9000 /* Built-in functions to perform an untyped call and return. */
9001
9002 /* For each register that may be used for calling a function, this
9003 gives a mode used to copy the register's value. VOIDmode indicates
9004 the register is not used for calling a function. If the machine
9005 has register windows, this gives only the outbound registers.
9006 INCOMING_REGNO gives the corresponding inbound register. */
9007 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9008
9009 /* For each register that may be used for returning values, this gives
9010 a mode used to copy the register's value. VOIDmode indicates the
9011 register is not used for returning values. If the machine has
9012 register windows, this gives only the outbound registers.
9013 INCOMING_REGNO gives the corresponding inbound register. */
9014 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9015
9016 /* For each register that may be used for calling a function, this
9017 gives the offset of that register into the block returned by
9018 __builtin_apply_args. 0 indicates that the register is not
9019 used for calling a function. */
9020 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9021
9022 /* Return the offset of register REGNO into the block returned by
9023 __builtin_apply_args. This is not declared static, since it is
9024 needed in objc-act.c. */
9025
9026 int
9027 apply_args_register_offset (regno)
9028 int regno;
9029 {
9030 apply_args_size ();
9031
9032 /* Arguments are always put in outgoing registers (in the argument
9033 block) if such make sense. */
9034 #ifdef OUTGOING_REGNO
9035 regno = OUTGOING_REGNO(regno);
9036 #endif
9037 return apply_args_reg_offset[regno];
9038 }
9039
9040 /* Return the size required for the block returned by __builtin_apply_args,
9041 and initialize apply_args_mode. */
9042
9043 static int
9044 apply_args_size ()
9045 {
9046 static int size = -1;
9047 int align, regno;
9048 enum machine_mode mode;
9049
9050 /* The values computed by this function never change. */
9051 if (size < 0)
9052 {
9053 /* The first value is the incoming arg-pointer. */
9054 size = GET_MODE_SIZE (Pmode);
9055
9056 /* The second value is the structure value address unless this is
9057 passed as an "invisible" first argument. */
9058 if (struct_value_rtx)
9059 size += GET_MODE_SIZE (Pmode);
9060
9061 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9062 if (FUNCTION_ARG_REGNO_P (regno))
9063 {
9064 /* Search for the proper mode for copying this register's
9065 value. I'm not sure this is right, but it works so far. */
9066 enum machine_mode best_mode = VOIDmode;
9067
9068 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9069 mode != VOIDmode;
9070 mode = GET_MODE_WIDER_MODE (mode))
9071 if (HARD_REGNO_MODE_OK (regno, mode)
9072 && HARD_REGNO_NREGS (regno, mode) == 1)
9073 best_mode = mode;
9074
9075 if (best_mode == VOIDmode)
9076 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9077 mode != VOIDmode;
9078 mode = GET_MODE_WIDER_MODE (mode))
9079 if (HARD_REGNO_MODE_OK (regno, mode)
9080 && (mov_optab->handlers[(int) mode].insn_code
9081 != CODE_FOR_nothing))
9082 best_mode = mode;
9083
9084 mode = best_mode;
9085 if (mode == VOIDmode)
9086 abort ();
9087
9088 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9089 if (size % align != 0)
9090 size = CEIL (size, align) * align;
9091 apply_args_reg_offset[regno] = size;
9092 size += GET_MODE_SIZE (mode);
9093 apply_args_mode[regno] = mode;
9094 }
9095 else
9096 {
9097 apply_args_mode[regno] = VOIDmode;
9098 apply_args_reg_offset[regno] = 0;
9099 }
9100 }
9101 return size;
9102 }
9103
9104 /* Return the size required for the block returned by __builtin_apply,
9105 and initialize apply_result_mode. */
9106
9107 static int
9108 apply_result_size ()
9109 {
9110 static int size = -1;
9111 int align, regno;
9112 enum machine_mode mode;
9113
9114 /* The values computed by this function never change. */
9115 if (size < 0)
9116 {
9117 size = 0;
9118
9119 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9120 if (FUNCTION_VALUE_REGNO_P (regno))
9121 {
9122 /* Search for the proper mode for copying this register's
9123 value. I'm not sure this is right, but it works so far. */
9124 enum machine_mode best_mode = VOIDmode;
9125
9126 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9127 mode != TImode;
9128 mode = GET_MODE_WIDER_MODE (mode))
9129 if (HARD_REGNO_MODE_OK (regno, mode))
9130 best_mode = mode;
9131
9132 if (best_mode == VOIDmode)
9133 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9134 mode != VOIDmode;
9135 mode = GET_MODE_WIDER_MODE (mode))
9136 if (HARD_REGNO_MODE_OK (regno, mode)
9137 && (mov_optab->handlers[(int) mode].insn_code
9138 != CODE_FOR_nothing))
9139 best_mode = mode;
9140
9141 mode = best_mode;
9142 if (mode == VOIDmode)
9143 abort ();
9144
9145 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9146 if (size % align != 0)
9147 size = CEIL (size, align) * align;
9148 size += GET_MODE_SIZE (mode);
9149 apply_result_mode[regno] = mode;
9150 }
9151 else
9152 apply_result_mode[regno] = VOIDmode;
9153
9154 /* Allow targets that use untyped_call and untyped_return to override
9155 the size so that machine-specific information can be stored here. */
9156 #ifdef APPLY_RESULT_SIZE
9157 size = APPLY_RESULT_SIZE;
9158 #endif
9159 }
9160 return size;
9161 }
9162
9163 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9164 /* Create a vector describing the result block RESULT. If SAVEP is true,
9165 the result block is used to save the values; otherwise it is used to
9166 restore the values. */
9167
9168 static rtx
9169 result_vector (savep, result)
9170 int savep;
9171 rtx result;
9172 {
9173 int regno, size, align, nelts;
9174 enum machine_mode mode;
9175 rtx reg, mem;
9176 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9177
9178 size = nelts = 0;
9179 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9180 if ((mode = apply_result_mode[regno]) != VOIDmode)
9181 {
9182 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9183 if (size % align != 0)
9184 size = CEIL (size, align) * align;
9185 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9186 mem = change_address (result, mode,
9187 plus_constant (XEXP (result, 0), size));
9188 savevec[nelts++] = (savep
9189 ? gen_rtx (SET, VOIDmode, mem, reg)
9190 : gen_rtx (SET, VOIDmode, reg, mem));
9191 size += GET_MODE_SIZE (mode);
9192 }
9193 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9194 }
9195 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9196
9197 /* Save the state required to perform an untyped call with the same
9198 arguments as were passed to the current function. */
9199
9200 static rtx
9201 expand_builtin_apply_args ()
9202 {
9203 rtx registers;
9204 int size, align, regno;
9205 enum machine_mode mode;
9206
9207 /* Create a block where the arg-pointer, structure value address,
9208 and argument registers can be saved. */
9209 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9210
9211 /* Walk past the arg-pointer and structure value address. */
9212 size = GET_MODE_SIZE (Pmode);
9213 if (struct_value_rtx)
9214 size += GET_MODE_SIZE (Pmode);
9215
9216 /* Save each register used in calling a function to the block. */
9217 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9218 if ((mode = apply_args_mode[regno]) != VOIDmode)
9219 {
9220 rtx tem;
9221
9222 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9223 if (size % align != 0)
9224 size = CEIL (size, align) * align;
9225
9226 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9227
9228 #ifdef STACK_REGS
9229 /* For reg-stack.c's stack register household.
9230 Compare with a similar piece of code in function.c. */
9231
9232 emit_insn (gen_rtx (USE, mode, tem));
9233 #endif
9234
9235 emit_move_insn (change_address (registers, mode,
9236 plus_constant (XEXP (registers, 0),
9237 size)),
9238 tem);
9239 size += GET_MODE_SIZE (mode);
9240 }
9241
9242 /* Save the arg pointer to the block. */
9243 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9244 copy_to_reg (virtual_incoming_args_rtx));
9245 size = GET_MODE_SIZE (Pmode);
9246
9247 /* Save the structure value address unless this is passed as an
9248 "invisible" first argument. */
9249 if (struct_value_incoming_rtx)
9250 {
9251 emit_move_insn (change_address (registers, Pmode,
9252 plus_constant (XEXP (registers, 0),
9253 size)),
9254 copy_to_reg (struct_value_incoming_rtx));
9255 size += GET_MODE_SIZE (Pmode);
9256 }
9257
9258 /* Return the address of the block. */
9259 return copy_addr_to_reg (XEXP (registers, 0));
9260 }
9261
9262 /* Perform an untyped call and save the state required to perform an
9263 untyped return of whatever value was returned by the given function. */
9264
9265 static rtx
9266 expand_builtin_apply (function, arguments, argsize)
9267 rtx function, arguments, argsize;
9268 {
9269 int size, align, regno;
9270 enum machine_mode mode;
9271 rtx incoming_args, result, reg, dest, call_insn;
9272 rtx old_stack_level = 0;
9273 rtx call_fusage = 0;
9274
9275 /* Create a block where the return registers can be saved. */
9276 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9277
9278 /* ??? The argsize value should be adjusted here. */
9279
9280 /* Fetch the arg pointer from the ARGUMENTS block. */
9281 incoming_args = gen_reg_rtx (Pmode);
9282 emit_move_insn (incoming_args,
9283 gen_rtx (MEM, Pmode, arguments));
9284 #ifndef STACK_GROWS_DOWNWARD
9285 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9286 incoming_args, 0, OPTAB_LIB_WIDEN);
9287 #endif
9288
9289 /* Perform postincrements before actually calling the function. */
9290 emit_queue ();
9291
9292 /* Push a new argument block and copy the arguments. */
9293 do_pending_stack_adjust ();
9294 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9295
9296 /* Push a block of memory onto the stack to store the memory arguments.
9297 Save the address in a register, and copy the memory arguments. ??? I
9298 haven't figured out how the calling convention macros effect this,
9299 but it's likely that the source and/or destination addresses in
9300 the block copy will need updating in machine specific ways. */
9301 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9302 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9303 gen_rtx (MEM, BLKmode, incoming_args),
9304 argsize,
9305 PARM_BOUNDARY / BITS_PER_UNIT);
9306
9307 /* Refer to the argument block. */
9308 apply_args_size ();
9309 arguments = gen_rtx (MEM, BLKmode, arguments);
9310
9311 /* Walk past the arg-pointer and structure value address. */
9312 size = GET_MODE_SIZE (Pmode);
9313 if (struct_value_rtx)
9314 size += GET_MODE_SIZE (Pmode);
9315
9316 /* Restore each of the registers previously saved. Make USE insns
9317 for each of these registers for use in making the call. */
9318 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9319 if ((mode = apply_args_mode[regno]) != VOIDmode)
9320 {
9321 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9322 if (size % align != 0)
9323 size = CEIL (size, align) * align;
9324 reg = gen_rtx (REG, mode, regno);
9325 emit_move_insn (reg,
9326 change_address (arguments, mode,
9327 plus_constant (XEXP (arguments, 0),
9328 size)));
9329
9330 use_reg (&call_fusage, reg);
9331 size += GET_MODE_SIZE (mode);
9332 }
9333
9334 /* Restore the structure value address unless this is passed as an
9335 "invisible" first argument. */
9336 size = GET_MODE_SIZE (Pmode);
9337 if (struct_value_rtx)
9338 {
9339 rtx value = gen_reg_rtx (Pmode);
9340 emit_move_insn (value,
9341 change_address (arguments, Pmode,
9342 plus_constant (XEXP (arguments, 0),
9343 size)));
9344 emit_move_insn (struct_value_rtx, value);
9345 if (GET_CODE (struct_value_rtx) == REG)
9346 use_reg (&call_fusage, struct_value_rtx);
9347 size += GET_MODE_SIZE (Pmode);
9348 }
9349
9350 /* All arguments and registers used for the call are set up by now! */
9351 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9352
9353 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9354 and we don't want to load it into a register as an optimization,
9355 because prepare_call_address already did it if it should be done. */
9356 if (GET_CODE (function) != SYMBOL_REF)
9357 function = memory_address (FUNCTION_MODE, function);
9358
9359 /* Generate the actual call instruction and save the return value. */
9360 #ifdef HAVE_untyped_call
9361 if (HAVE_untyped_call)
9362 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9363 result, result_vector (1, result)));
9364 else
9365 #endif
9366 #ifdef HAVE_call_value
9367 if (HAVE_call_value)
9368 {
9369 rtx valreg = 0;
9370
9371 /* Locate the unique return register. It is not possible to
9372 express a call that sets more than one return register using
9373 call_value; use untyped_call for that. In fact, untyped_call
9374 only needs to save the return registers in the given block. */
9375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9376 if ((mode = apply_result_mode[regno]) != VOIDmode)
9377 {
9378 if (valreg)
9379 abort (); /* HAVE_untyped_call required. */
9380 valreg = gen_rtx (REG, mode, regno);
9381 }
9382
9383 emit_call_insn (gen_call_value (valreg,
9384 gen_rtx (MEM, FUNCTION_MODE, function),
9385 const0_rtx, NULL_RTX, const0_rtx));
9386
9387 emit_move_insn (change_address (result, GET_MODE (valreg),
9388 XEXP (result, 0)),
9389 valreg);
9390 }
9391 else
9392 #endif
9393 abort ();
9394
9395 /* Find the CALL insn we just emitted. */
9396 for (call_insn = get_last_insn ();
9397 call_insn && GET_CODE (call_insn) != CALL_INSN;
9398 call_insn = PREV_INSN (call_insn))
9399 ;
9400
9401 if (! call_insn)
9402 abort ();
9403
9404 /* Put the register usage information on the CALL. If there is already
9405 some usage information, put ours at the end. */
9406 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9407 {
9408 rtx link;
9409
9410 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9411 link = XEXP (link, 1))
9412 ;
9413
9414 XEXP (link, 1) = call_fusage;
9415 }
9416 else
9417 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9418
9419 /* Restore the stack. */
9420 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9421
9422 /* Return the address of the result block. */
9423 return copy_addr_to_reg (XEXP (result, 0));
9424 }
9425
9426 /* Perform an untyped return. */
9427
9428 static void
9429 expand_builtin_return (result)
9430 rtx result;
9431 {
9432 int size, align, regno;
9433 enum machine_mode mode;
9434 rtx reg;
9435 rtx call_fusage = 0;
9436
9437 apply_result_size ();
9438 result = gen_rtx (MEM, BLKmode, result);
9439
9440 #ifdef HAVE_untyped_return
9441 if (HAVE_untyped_return)
9442 {
9443 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9444 emit_barrier ();
9445 return;
9446 }
9447 #endif
9448
9449 /* Restore the return value and note that each value is used. */
9450 size = 0;
9451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9452 if ((mode = apply_result_mode[regno]) != VOIDmode)
9453 {
9454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9455 if (size % align != 0)
9456 size = CEIL (size, align) * align;
9457 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9458 emit_move_insn (reg,
9459 change_address (result, mode,
9460 plus_constant (XEXP (result, 0),
9461 size)));
9462
9463 push_to_sequence (call_fusage);
9464 emit_insn (gen_rtx (USE, VOIDmode, reg));
9465 call_fusage = get_insns ();
9466 end_sequence ();
9467 size += GET_MODE_SIZE (mode);
9468 }
9469
9470 /* Put the USE insns before the return. */
9471 emit_insns (call_fusage);
9472
9473 /* Return whatever values was restored by jumping directly to the end
9474 of the function. */
9475 expand_null_return ();
9476 }
9477 \f
9478 /* Expand code for a post- or pre- increment or decrement
9479 and return the RTX for the result.
9480 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9481
9482 static rtx
9483 expand_increment (exp, post, ignore)
9484 register tree exp;
9485 int post, ignore;
9486 {
9487 register rtx op0, op1;
9488 register rtx temp, value;
9489 register tree incremented = TREE_OPERAND (exp, 0);
9490 optab this_optab = add_optab;
9491 int icode;
9492 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9493 int op0_is_copy = 0;
9494 int single_insn = 0;
9495 /* 1 means we can't store into OP0 directly,
9496 because it is a subreg narrower than a word,
9497 and we don't dare clobber the rest of the word. */
9498 int bad_subreg = 0;
9499
9500 if (output_bytecode)
9501 {
9502 bc_expand_expr (exp);
9503 return NULL_RTX;
9504 }
9505
9506 /* Stabilize any component ref that might need to be
9507 evaluated more than once below. */
9508 if (!post
9509 || TREE_CODE (incremented) == BIT_FIELD_REF
9510 || (TREE_CODE (incremented) == COMPONENT_REF
9511 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9512 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9513 incremented = stabilize_reference (incremented);
9514 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9515 ones into save exprs so that they don't accidentally get evaluated
9516 more than once by the code below. */
9517 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9518 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9519 incremented = save_expr (incremented);
9520
9521 /* Compute the operands as RTX.
9522 Note whether OP0 is the actual lvalue or a copy of it:
9523 I believe it is a copy iff it is a register or subreg
9524 and insns were generated in computing it. */
9525
9526 temp = get_last_insn ();
9527 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9528
9529 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9530 in place but instead must do sign- or zero-extension during assignment,
9531 so we copy it into a new register and let the code below use it as
9532 a copy.
9533
9534 Note that we can safely modify this SUBREG since it is know not to be
9535 shared (it was made by the expand_expr call above). */
9536
9537 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9538 {
9539 if (post)
9540 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9541 else
9542 bad_subreg = 1;
9543 }
9544 else if (GET_CODE (op0) == SUBREG
9545 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9546 {
9547 /* We cannot increment this SUBREG in place. If we are
9548 post-incrementing, get a copy of the old value. Otherwise,
9549 just mark that we cannot increment in place. */
9550 if (post)
9551 op0 = copy_to_reg (op0);
9552 else
9553 bad_subreg = 1;
9554 }
9555
9556 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9557 && temp != get_last_insn ());
9558 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9559
9560 /* Decide whether incrementing or decrementing. */
9561 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9562 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9563 this_optab = sub_optab;
9564
9565 /* Convert decrement by a constant into a negative increment. */
9566 if (this_optab == sub_optab
9567 && GET_CODE (op1) == CONST_INT)
9568 {
9569 op1 = GEN_INT (- INTVAL (op1));
9570 this_optab = add_optab;
9571 }
9572
9573 /* For a preincrement, see if we can do this with a single instruction. */
9574 if (!post)
9575 {
9576 icode = (int) this_optab->handlers[(int) mode].insn_code;
9577 if (icode != (int) CODE_FOR_nothing
9578 /* Make sure that OP0 is valid for operands 0 and 1
9579 of the insn we want to queue. */
9580 && (*insn_operand_predicate[icode][0]) (op0, mode)
9581 && (*insn_operand_predicate[icode][1]) (op0, mode)
9582 && (*insn_operand_predicate[icode][2]) (op1, mode))
9583 single_insn = 1;
9584 }
9585
9586 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9587 then we cannot just increment OP0. We must therefore contrive to
9588 increment the original value. Then, for postincrement, we can return
9589 OP0 since it is a copy of the old value. For preincrement, expand here
9590 unless we can do it with a single insn.
9591
9592 Likewise if storing directly into OP0 would clobber high bits
9593 we need to preserve (bad_subreg). */
9594 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9595 {
9596 /* This is the easiest way to increment the value wherever it is.
9597 Problems with multiple evaluation of INCREMENTED are prevented
9598 because either (1) it is a component_ref or preincrement,
9599 in which case it was stabilized above, or (2) it is an array_ref
9600 with constant index in an array in a register, which is
9601 safe to reevaluate. */
9602 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9603 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9604 ? MINUS_EXPR : PLUS_EXPR),
9605 TREE_TYPE (exp),
9606 incremented,
9607 TREE_OPERAND (exp, 1));
9608
9609 while (TREE_CODE (incremented) == NOP_EXPR
9610 || TREE_CODE (incremented) == CONVERT_EXPR)
9611 {
9612 newexp = convert (TREE_TYPE (incremented), newexp);
9613 incremented = TREE_OPERAND (incremented, 0);
9614 }
9615
9616 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9617 return post ? op0 : temp;
9618 }
9619
9620 if (post)
9621 {
9622 /* We have a true reference to the value in OP0.
9623 If there is an insn to add or subtract in this mode, queue it.
9624 Queueing the increment insn avoids the register shuffling
9625 that often results if we must increment now and first save
9626 the old value for subsequent use. */
9627
9628 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9629 op0 = stabilize (op0);
9630 #endif
9631
9632 icode = (int) this_optab->handlers[(int) mode].insn_code;
9633 if (icode != (int) CODE_FOR_nothing
9634 /* Make sure that OP0 is valid for operands 0 and 1
9635 of the insn we want to queue. */
9636 && (*insn_operand_predicate[icode][0]) (op0, mode)
9637 && (*insn_operand_predicate[icode][1]) (op0, mode))
9638 {
9639 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9640 op1 = force_reg (mode, op1);
9641
9642 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9643 }
9644 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9645 {
9646 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9647 rtx temp, result;
9648
9649 op0 = change_address (op0, VOIDmode, addr);
9650 temp = force_reg (GET_MODE (op0), op0);
9651 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9652 op1 = force_reg (mode, op1);
9653
9654 /* The increment queue is LIFO, thus we have to `queue'
9655 the instructions in reverse order. */
9656 enqueue_insn (op0, gen_move_insn (op0, temp));
9657 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9658 return result;
9659 }
9660 }
9661
9662 /* Preincrement, or we can't increment with one simple insn. */
9663 if (post)
9664 /* Save a copy of the value before inc or dec, to return it later. */
9665 temp = value = copy_to_reg (op0);
9666 else
9667 /* Arrange to return the incremented value. */
9668 /* Copy the rtx because expand_binop will protect from the queue,
9669 and the results of that would be invalid for us to return
9670 if our caller does emit_queue before using our result. */
9671 temp = copy_rtx (value = op0);
9672
9673 /* Increment however we can. */
9674 op1 = expand_binop (mode, this_optab, value, op1, op0,
9675 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9676 /* Make sure the value is stored into OP0. */
9677 if (op1 != op0)
9678 emit_move_insn (op0, op1);
9679
9680 return temp;
9681 }
9682 \f
9683 /* Expand all function calls contained within EXP, innermost ones first.
9684 But don't look within expressions that have sequence points.
9685 For each CALL_EXPR, record the rtx for its value
9686 in the CALL_EXPR_RTL field. */
9687
9688 static void
9689 preexpand_calls (exp)
9690 tree exp;
9691 {
9692 register int nops, i;
9693 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9694
9695 if (! do_preexpand_calls)
9696 return;
9697
9698 /* Only expressions and references can contain calls. */
9699
9700 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9701 return;
9702
9703 switch (TREE_CODE (exp))
9704 {
9705 case CALL_EXPR:
9706 /* Do nothing if already expanded. */
9707 if (CALL_EXPR_RTL (exp) != 0
9708 /* Do nothing if the call returns a variable-sized object. */
9709 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9710 /* Do nothing to built-in functions. */
9711 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9712 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9713 == FUNCTION_DECL)
9714 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9715 return;
9716
9717 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9718 return;
9719
9720 case COMPOUND_EXPR:
9721 case COND_EXPR:
9722 case TRUTH_ANDIF_EXPR:
9723 case TRUTH_ORIF_EXPR:
9724 /* If we find one of these, then we can be sure
9725 the adjust will be done for it (since it makes jumps).
9726 Do it now, so that if this is inside an argument
9727 of a function, we don't get the stack adjustment
9728 after some other args have already been pushed. */
9729 do_pending_stack_adjust ();
9730 return;
9731
9732 case BLOCK:
9733 case RTL_EXPR:
9734 case WITH_CLEANUP_EXPR:
9735 case CLEANUP_POINT_EXPR:
9736 return;
9737
9738 case SAVE_EXPR:
9739 if (SAVE_EXPR_RTL (exp) != 0)
9740 return;
9741 }
9742
9743 nops = tree_code_length[(int) TREE_CODE (exp)];
9744 for (i = 0; i < nops; i++)
9745 if (TREE_OPERAND (exp, i) != 0)
9746 {
9747 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9748 if (type == 'e' || type == '<' || type == '1' || type == '2'
9749 || type == 'r')
9750 preexpand_calls (TREE_OPERAND (exp, i));
9751 }
9752 }
9753 \f
9754 /* At the start of a function, record that we have no previously-pushed
9755 arguments waiting to be popped. */
9756
9757 void
9758 init_pending_stack_adjust ()
9759 {
9760 pending_stack_adjust = 0;
9761 }
9762
9763 /* When exiting from function, if safe, clear out any pending stack adjust
9764 so the adjustment won't get done. */
9765
9766 void
9767 clear_pending_stack_adjust ()
9768 {
9769 #ifdef EXIT_IGNORE_STACK
9770 if (optimize > 0
9771 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9772 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9773 && ! flag_inline_functions)
9774 pending_stack_adjust = 0;
9775 #endif
9776 }
9777
9778 /* Pop any previously-pushed arguments that have not been popped yet. */
9779
9780 void
9781 do_pending_stack_adjust ()
9782 {
9783 if (inhibit_defer_pop == 0)
9784 {
9785 if (pending_stack_adjust != 0)
9786 adjust_stack (GEN_INT (pending_stack_adjust));
9787 pending_stack_adjust = 0;
9788 }
9789 }
9790 \f
9791 /* Expand conditional expressions. */
9792
9793 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9794 LABEL is an rtx of code CODE_LABEL, in this function and all the
9795 functions here. */
9796
9797 void
9798 jumpifnot (exp, label)
9799 tree exp;
9800 rtx label;
9801 {
9802 do_jump (exp, label, NULL_RTX);
9803 }
9804
9805 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9806
9807 void
9808 jumpif (exp, label)
9809 tree exp;
9810 rtx label;
9811 {
9812 do_jump (exp, NULL_RTX, label);
9813 }
9814
9815 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9816 the result is zero, or IF_TRUE_LABEL if the result is one.
9817 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9818 meaning fall through in that case.
9819
9820 do_jump always does any pending stack adjust except when it does not
9821 actually perform a jump. An example where there is no jump
9822 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9823
9824 This function is responsible for optimizing cases such as
9825 &&, || and comparison operators in EXP. */
9826
9827 void
9828 do_jump (exp, if_false_label, if_true_label)
9829 tree exp;
9830 rtx if_false_label, if_true_label;
9831 {
9832 register enum tree_code code = TREE_CODE (exp);
9833 /* Some cases need to create a label to jump to
9834 in order to properly fall through.
9835 These cases set DROP_THROUGH_LABEL nonzero. */
9836 rtx drop_through_label = 0;
9837 rtx temp;
9838 rtx comparison = 0;
9839 int i;
9840 tree type;
9841 enum machine_mode mode;
9842
9843 emit_queue ();
9844
9845 switch (code)
9846 {
9847 case ERROR_MARK:
9848 break;
9849
9850 case INTEGER_CST:
9851 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9852 if (temp)
9853 emit_jump (temp);
9854 break;
9855
9856 #if 0
9857 /* This is not true with #pragma weak */
9858 case ADDR_EXPR:
9859 /* The address of something can never be zero. */
9860 if (if_true_label)
9861 emit_jump (if_true_label);
9862 break;
9863 #endif
9864
9865 case NOP_EXPR:
9866 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9867 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9868 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9869 goto normal;
9870 case CONVERT_EXPR:
9871 /* If we are narrowing the operand, we have to do the compare in the
9872 narrower mode. */
9873 if ((TYPE_PRECISION (TREE_TYPE (exp))
9874 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9875 goto normal;
9876 case NON_LVALUE_EXPR:
9877 case REFERENCE_EXPR:
9878 case ABS_EXPR:
9879 case NEGATE_EXPR:
9880 case LROTATE_EXPR:
9881 case RROTATE_EXPR:
9882 /* These cannot change zero->non-zero or vice versa. */
9883 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9884 break;
9885
9886 #if 0
9887 /* This is never less insns than evaluating the PLUS_EXPR followed by
9888 a test and can be longer if the test is eliminated. */
9889 case PLUS_EXPR:
9890 /* Reduce to minus. */
9891 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9892 TREE_OPERAND (exp, 0),
9893 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9894 TREE_OPERAND (exp, 1))));
9895 /* Process as MINUS. */
9896 #endif
9897
9898 case MINUS_EXPR:
9899 /* Non-zero iff operands of minus differ. */
9900 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9901 TREE_OPERAND (exp, 0),
9902 TREE_OPERAND (exp, 1)),
9903 NE, NE);
9904 break;
9905
9906 case BIT_AND_EXPR:
9907 /* If we are AND'ing with a small constant, do this comparison in the
9908 smallest type that fits. If the machine doesn't have comparisons
9909 that small, it will be converted back to the wider comparison.
9910 This helps if we are testing the sign bit of a narrower object.
9911 combine can't do this for us because it can't know whether a
9912 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9913
9914 if (! SLOW_BYTE_ACCESS
9915 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9916 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9917 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9918 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9919 && (type = type_for_mode (mode, 1)) != 0
9920 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9921 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9922 != CODE_FOR_nothing))
9923 {
9924 do_jump (convert (type, exp), if_false_label, if_true_label);
9925 break;
9926 }
9927 goto normal;
9928
9929 case TRUTH_NOT_EXPR:
9930 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9931 break;
9932
9933 case TRUTH_ANDIF_EXPR:
9934 if (if_false_label == 0)
9935 if_false_label = drop_through_label = gen_label_rtx ();
9936 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9937 start_cleanup_deferal ();
9938 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9939 end_cleanup_deferal ();
9940 break;
9941
9942 case TRUTH_ORIF_EXPR:
9943 if (if_true_label == 0)
9944 if_true_label = drop_through_label = gen_label_rtx ();
9945 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9946 start_cleanup_deferal ();
9947 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9948 end_cleanup_deferal ();
9949 break;
9950
9951 case COMPOUND_EXPR:
9952 push_temp_slots ();
9953 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9954 preserve_temp_slots (NULL_RTX);
9955 free_temp_slots ();
9956 pop_temp_slots ();
9957 emit_queue ();
9958 do_pending_stack_adjust ();
9959 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9960 break;
9961
9962 case COMPONENT_REF:
9963 case BIT_FIELD_REF:
9964 case ARRAY_REF:
9965 {
9966 int bitsize, bitpos, unsignedp;
9967 enum machine_mode mode;
9968 tree type;
9969 tree offset;
9970 int volatilep = 0;
9971 int alignment;
9972
9973 /* Get description of this reference. We don't actually care
9974 about the underlying object here. */
9975 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9976 &mode, &unsignedp, &volatilep,
9977 &alignment);
9978
9979 type = type_for_size (bitsize, unsignedp);
9980 if (! SLOW_BYTE_ACCESS
9981 && type != 0 && bitsize >= 0
9982 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9983 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9984 != CODE_FOR_nothing))
9985 {
9986 do_jump (convert (type, exp), if_false_label, if_true_label);
9987 break;
9988 }
9989 goto normal;
9990 }
9991
9992 case COND_EXPR:
9993 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9994 if (integer_onep (TREE_OPERAND (exp, 1))
9995 && integer_zerop (TREE_OPERAND (exp, 2)))
9996 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9997
9998 else if (integer_zerop (TREE_OPERAND (exp, 1))
9999 && integer_onep (TREE_OPERAND (exp, 2)))
10000 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10001
10002 else
10003 {
10004 register rtx label1 = gen_label_rtx ();
10005 drop_through_label = gen_label_rtx ();
10006
10007 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10008
10009 start_cleanup_deferal ();
10010 /* Now the THEN-expression. */
10011 do_jump (TREE_OPERAND (exp, 1),
10012 if_false_label ? if_false_label : drop_through_label,
10013 if_true_label ? if_true_label : drop_through_label);
10014 /* In case the do_jump just above never jumps. */
10015 do_pending_stack_adjust ();
10016 emit_label (label1);
10017
10018 /* Now the ELSE-expression. */
10019 do_jump (TREE_OPERAND (exp, 2),
10020 if_false_label ? if_false_label : drop_through_label,
10021 if_true_label ? if_true_label : drop_through_label);
10022 end_cleanup_deferal ();
10023 }
10024 break;
10025
10026 case EQ_EXPR:
10027 {
10028 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10029
10030 if (integer_zerop (TREE_OPERAND (exp, 1)))
10031 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10032 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10033 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10034 do_jump
10035 (fold
10036 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10037 fold (build (EQ_EXPR, TREE_TYPE (exp),
10038 fold (build1 (REALPART_EXPR,
10039 TREE_TYPE (inner_type),
10040 TREE_OPERAND (exp, 0))),
10041 fold (build1 (REALPART_EXPR,
10042 TREE_TYPE (inner_type),
10043 TREE_OPERAND (exp, 1))))),
10044 fold (build (EQ_EXPR, TREE_TYPE (exp),
10045 fold (build1 (IMAGPART_EXPR,
10046 TREE_TYPE (inner_type),
10047 TREE_OPERAND (exp, 0))),
10048 fold (build1 (IMAGPART_EXPR,
10049 TREE_TYPE (inner_type),
10050 TREE_OPERAND (exp, 1))))))),
10051 if_false_label, if_true_label);
10052 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10053 && !can_compare_p (TYPE_MODE (inner_type)))
10054 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10055 else
10056 comparison = compare (exp, EQ, EQ);
10057 break;
10058 }
10059
10060 case NE_EXPR:
10061 {
10062 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10063
10064 if (integer_zerop (TREE_OPERAND (exp, 1)))
10065 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10066 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10067 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10068 do_jump
10069 (fold
10070 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10071 fold (build (NE_EXPR, TREE_TYPE (exp),
10072 fold (build1 (REALPART_EXPR,
10073 TREE_TYPE (inner_type),
10074 TREE_OPERAND (exp, 0))),
10075 fold (build1 (REALPART_EXPR,
10076 TREE_TYPE (inner_type),
10077 TREE_OPERAND (exp, 1))))),
10078 fold (build (NE_EXPR, TREE_TYPE (exp),
10079 fold (build1 (IMAGPART_EXPR,
10080 TREE_TYPE (inner_type),
10081 TREE_OPERAND (exp, 0))),
10082 fold (build1 (IMAGPART_EXPR,
10083 TREE_TYPE (inner_type),
10084 TREE_OPERAND (exp, 1))))))),
10085 if_false_label, if_true_label);
10086 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10087 && !can_compare_p (TYPE_MODE (inner_type)))
10088 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10089 else
10090 comparison = compare (exp, NE, NE);
10091 break;
10092 }
10093
10094 case LT_EXPR:
10095 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10096 == MODE_INT)
10097 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10098 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10099 else
10100 comparison = compare (exp, LT, LTU);
10101 break;
10102
10103 case LE_EXPR:
10104 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10105 == MODE_INT)
10106 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10107 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10108 else
10109 comparison = compare (exp, LE, LEU);
10110 break;
10111
10112 case GT_EXPR:
10113 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10114 == MODE_INT)
10115 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10116 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10117 else
10118 comparison = compare (exp, GT, GTU);
10119 break;
10120
10121 case GE_EXPR:
10122 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10123 == MODE_INT)
10124 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10125 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10126 else
10127 comparison = compare (exp, GE, GEU);
10128 break;
10129
10130 default:
10131 normal:
10132 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10133 #if 0
10134 /* This is not needed any more and causes poor code since it causes
10135 comparisons and tests from non-SI objects to have different code
10136 sequences. */
10137 /* Copy to register to avoid generating bad insns by cse
10138 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10139 if (!cse_not_expected && GET_CODE (temp) == MEM)
10140 temp = copy_to_reg (temp);
10141 #endif
10142 do_pending_stack_adjust ();
10143 if (GET_CODE (temp) == CONST_INT)
10144 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10145 else if (GET_CODE (temp) == LABEL_REF)
10146 comparison = const_true_rtx;
10147 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10148 && !can_compare_p (GET_MODE (temp)))
10149 /* Note swapping the labels gives us not-equal. */
10150 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10151 else if (GET_MODE (temp) != VOIDmode)
10152 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10153 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10154 GET_MODE (temp), NULL_RTX, 0);
10155 else
10156 abort ();
10157 }
10158
10159 /* Do any postincrements in the expression that was tested. */
10160 emit_queue ();
10161
10162 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10163 straight into a conditional jump instruction as the jump condition.
10164 Otherwise, all the work has been done already. */
10165
10166 if (comparison == const_true_rtx)
10167 {
10168 if (if_true_label)
10169 emit_jump (if_true_label);
10170 }
10171 else if (comparison == const0_rtx)
10172 {
10173 if (if_false_label)
10174 emit_jump (if_false_label);
10175 }
10176 else if (comparison)
10177 do_jump_for_compare (comparison, if_false_label, if_true_label);
10178
10179 if (drop_through_label)
10180 {
10181 /* If do_jump produces code that might be jumped around,
10182 do any stack adjusts from that code, before the place
10183 where control merges in. */
10184 do_pending_stack_adjust ();
10185 emit_label (drop_through_label);
10186 }
10187 }
10188 \f
10189 /* Given a comparison expression EXP for values too wide to be compared
10190 with one insn, test the comparison and jump to the appropriate label.
10191 The code of EXP is ignored; we always test GT if SWAP is 0,
10192 and LT if SWAP is 1. */
10193
10194 static void
10195 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10196 tree exp;
10197 int swap;
10198 rtx if_false_label, if_true_label;
10199 {
10200 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10201 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10202 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10203 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10204 rtx drop_through_label = 0;
10205 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10206 int i;
10207
10208 if (! if_true_label || ! if_false_label)
10209 drop_through_label = gen_label_rtx ();
10210 if (! if_true_label)
10211 if_true_label = drop_through_label;
10212 if (! if_false_label)
10213 if_false_label = drop_through_label;
10214
10215 /* Compare a word at a time, high order first. */
10216 for (i = 0; i < nwords; i++)
10217 {
10218 rtx comp;
10219 rtx op0_word, op1_word;
10220
10221 if (WORDS_BIG_ENDIAN)
10222 {
10223 op0_word = operand_subword_force (op0, i, mode);
10224 op1_word = operand_subword_force (op1, i, mode);
10225 }
10226 else
10227 {
10228 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10229 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10230 }
10231
10232 /* All but high-order word must be compared as unsigned. */
10233 comp = compare_from_rtx (op0_word, op1_word,
10234 (unsignedp || i > 0) ? GTU : GT,
10235 unsignedp, word_mode, NULL_RTX, 0);
10236 if (comp == const_true_rtx)
10237 emit_jump (if_true_label);
10238 else if (comp != const0_rtx)
10239 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10240
10241 /* Consider lower words only if these are equal. */
10242 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10243 NULL_RTX, 0);
10244 if (comp == const_true_rtx)
10245 emit_jump (if_false_label);
10246 else if (comp != const0_rtx)
10247 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10248 }
10249
10250 if (if_false_label)
10251 emit_jump (if_false_label);
10252 if (drop_through_label)
10253 emit_label (drop_through_label);
10254 }
10255
10256 /* Compare OP0 with OP1, word at a time, in mode MODE.
10257 UNSIGNEDP says to do unsigned comparison.
10258 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10259
10260 void
10261 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10262 enum machine_mode mode;
10263 int unsignedp;
10264 rtx op0, op1;
10265 rtx if_false_label, if_true_label;
10266 {
10267 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10268 rtx drop_through_label = 0;
10269 int i;
10270
10271 if (! if_true_label || ! if_false_label)
10272 drop_through_label = gen_label_rtx ();
10273 if (! if_true_label)
10274 if_true_label = drop_through_label;
10275 if (! if_false_label)
10276 if_false_label = drop_through_label;
10277
10278 /* Compare a word at a time, high order first. */
10279 for (i = 0; i < nwords; i++)
10280 {
10281 rtx comp;
10282 rtx op0_word, op1_word;
10283
10284 if (WORDS_BIG_ENDIAN)
10285 {
10286 op0_word = operand_subword_force (op0, i, mode);
10287 op1_word = operand_subword_force (op1, i, mode);
10288 }
10289 else
10290 {
10291 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10292 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10293 }
10294
10295 /* All but high-order word must be compared as unsigned. */
10296 comp = compare_from_rtx (op0_word, op1_word,
10297 (unsignedp || i > 0) ? GTU : GT,
10298 unsignedp, word_mode, NULL_RTX, 0);
10299 if (comp == const_true_rtx)
10300 emit_jump (if_true_label);
10301 else if (comp != const0_rtx)
10302 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10303
10304 /* Consider lower words only if these are equal. */
10305 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10306 NULL_RTX, 0);
10307 if (comp == const_true_rtx)
10308 emit_jump (if_false_label);
10309 else if (comp != const0_rtx)
10310 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10311 }
10312
10313 if (if_false_label)
10314 emit_jump (if_false_label);
10315 if (drop_through_label)
10316 emit_label (drop_through_label);
10317 }
10318
10319 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10320 with one insn, test the comparison and jump to the appropriate label. */
10321
10322 static void
10323 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10324 tree exp;
10325 rtx if_false_label, if_true_label;
10326 {
10327 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10328 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10329 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10330 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10331 int i;
10332 rtx drop_through_label = 0;
10333
10334 if (! if_false_label)
10335 drop_through_label = if_false_label = gen_label_rtx ();
10336
10337 for (i = 0; i < nwords; i++)
10338 {
10339 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10340 operand_subword_force (op1, i, mode),
10341 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10342 word_mode, NULL_RTX, 0);
10343 if (comp == const_true_rtx)
10344 emit_jump (if_false_label);
10345 else if (comp != const0_rtx)
10346 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10347 }
10348
10349 if (if_true_label)
10350 emit_jump (if_true_label);
10351 if (drop_through_label)
10352 emit_label (drop_through_label);
10353 }
10354 \f
10355 /* Jump according to whether OP0 is 0.
10356 We assume that OP0 has an integer mode that is too wide
10357 for the available compare insns. */
10358
10359 static void
10360 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10361 rtx op0;
10362 rtx if_false_label, if_true_label;
10363 {
10364 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10365 int i;
10366 rtx drop_through_label = 0;
10367
10368 if (! if_false_label)
10369 drop_through_label = if_false_label = gen_label_rtx ();
10370
10371 for (i = 0; i < nwords; i++)
10372 {
10373 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10374 GET_MODE (op0)),
10375 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10376 if (comp == const_true_rtx)
10377 emit_jump (if_false_label);
10378 else if (comp != const0_rtx)
10379 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10380 }
10381
10382 if (if_true_label)
10383 emit_jump (if_true_label);
10384 if (drop_through_label)
10385 emit_label (drop_through_label);
10386 }
10387
10388 /* Given a comparison expression in rtl form, output conditional branches to
10389 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10390
10391 static void
10392 do_jump_for_compare (comparison, if_false_label, if_true_label)
10393 rtx comparison, if_false_label, if_true_label;
10394 {
10395 if (if_true_label)
10396 {
10397 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10398 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10399 else
10400 abort ();
10401
10402 if (if_false_label)
10403 emit_jump (if_false_label);
10404 }
10405 else if (if_false_label)
10406 {
10407 rtx insn;
10408 rtx prev = get_last_insn ();
10409 rtx branch = 0;
10410
10411 /* Output the branch with the opposite condition. Then try to invert
10412 what is generated. If more than one insn is a branch, or if the
10413 branch is not the last insn written, abort. If we can't invert
10414 the branch, emit make a true label, redirect this jump to that,
10415 emit a jump to the false label and define the true label. */
10416
10417 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10418 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10419 else
10420 abort ();
10421
10422 /* Here we get the first insn that was just emitted. It used to be the
10423 case that, on some machines, emitting the branch would discard
10424 the previous compare insn and emit a replacement. This isn't
10425 done anymore, but abort if we see that PREV is deleted. */
10426
10427 if (prev == 0)
10428 insn = get_insns ();
10429 else if (INSN_DELETED_P (prev))
10430 abort ();
10431 else
10432 insn = NEXT_INSN (prev);
10433
10434 for (; insn; insn = NEXT_INSN (insn))
10435 if (GET_CODE (insn) == JUMP_INSN)
10436 {
10437 if (branch)
10438 abort ();
10439 branch = insn;
10440 }
10441
10442 if (branch != get_last_insn ())
10443 abort ();
10444
10445 JUMP_LABEL (branch) = if_false_label;
10446 if (! invert_jump (branch, if_false_label))
10447 {
10448 if_true_label = gen_label_rtx ();
10449 redirect_jump (branch, if_true_label);
10450 emit_jump (if_false_label);
10451 emit_label (if_true_label);
10452 }
10453 }
10454 }
10455 \f
10456 /* Generate code for a comparison expression EXP
10457 (including code to compute the values to be compared)
10458 and set (CC0) according to the result.
10459 SIGNED_CODE should be the rtx operation for this comparison for
10460 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10461
10462 We force a stack adjustment unless there are currently
10463 things pushed on the stack that aren't yet used. */
10464
10465 static rtx
10466 compare (exp, signed_code, unsigned_code)
10467 register tree exp;
10468 enum rtx_code signed_code, unsigned_code;
10469 {
10470 register rtx op0
10471 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10472 register rtx op1
10473 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10474 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10475 register enum machine_mode mode = TYPE_MODE (type);
10476 int unsignedp = TREE_UNSIGNED (type);
10477 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10478
10479 #ifdef HAVE_canonicalize_funcptr_for_compare
10480 /* If function pointers need to be "canonicalized" before they can
10481 be reliably compared, then canonicalize them. */
10482 if (HAVE_canonicalize_funcptr_for_compare
10483 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10484 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10485 == FUNCTION_TYPE))
10486 {
10487 rtx new_op0 = gen_reg_rtx (mode);
10488
10489 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10490 op0 = new_op0;
10491 }
10492
10493 if (HAVE_canonicalize_funcptr_for_compare
10494 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10495 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10496 == FUNCTION_TYPE))
10497 {
10498 rtx new_op1 = gen_reg_rtx (mode);
10499
10500 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10501 op1 = new_op1;
10502 }
10503 #endif
10504
10505 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10506 ((mode == BLKmode)
10507 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10508 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10509 }
10510
10511 /* Like compare but expects the values to compare as two rtx's.
10512 The decision as to signed or unsigned comparison must be made by the caller.
10513
10514 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10515 compared.
10516
10517 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10518 size of MODE should be used. */
10519
10520 rtx
10521 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10522 register rtx op0, op1;
10523 enum rtx_code code;
10524 int unsignedp;
10525 enum machine_mode mode;
10526 rtx size;
10527 int align;
10528 {
10529 rtx tem;
10530
10531 /* If one operand is constant, make it the second one. Only do this
10532 if the other operand is not constant as well. */
10533
10534 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10535 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10536 {
10537 tem = op0;
10538 op0 = op1;
10539 op1 = tem;
10540 code = swap_condition (code);
10541 }
10542
10543 if (flag_force_mem)
10544 {
10545 op0 = force_not_mem (op0);
10546 op1 = force_not_mem (op1);
10547 }
10548
10549 do_pending_stack_adjust ();
10550
10551 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10552 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10553 return tem;
10554
10555 #if 0
10556 /* There's no need to do this now that combine.c can eliminate lots of
10557 sign extensions. This can be less efficient in certain cases on other
10558 machines. */
10559
10560 /* If this is a signed equality comparison, we can do it as an
10561 unsigned comparison since zero-extension is cheaper than sign
10562 extension and comparisons with zero are done as unsigned. This is
10563 the case even on machines that can do fast sign extension, since
10564 zero-extension is easier to combine with other operations than
10565 sign-extension is. If we are comparing against a constant, we must
10566 convert it to what it would look like unsigned. */
10567 if ((code == EQ || code == NE) && ! unsignedp
10568 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10569 {
10570 if (GET_CODE (op1) == CONST_INT
10571 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10572 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10573 unsignedp = 1;
10574 }
10575 #endif
10576
10577 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10578
10579 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10580 }
10581 \f
10582 /* Generate code to calculate EXP using a store-flag instruction
10583 and return an rtx for the result. EXP is either a comparison
10584 or a TRUTH_NOT_EXPR whose operand is a comparison.
10585
10586 If TARGET is nonzero, store the result there if convenient.
10587
10588 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10589 cheap.
10590
10591 Return zero if there is no suitable set-flag instruction
10592 available on this machine.
10593
10594 Once expand_expr has been called on the arguments of the comparison,
10595 we are committed to doing the store flag, since it is not safe to
10596 re-evaluate the expression. We emit the store-flag insn by calling
10597 emit_store_flag, but only expand the arguments if we have a reason
10598 to believe that emit_store_flag will be successful. If we think that
10599 it will, but it isn't, we have to simulate the store-flag with a
10600 set/jump/set sequence. */
10601
10602 static rtx
10603 do_store_flag (exp, target, mode, only_cheap)
10604 tree exp;
10605 rtx target;
10606 enum machine_mode mode;
10607 int only_cheap;
10608 {
10609 enum rtx_code code;
10610 tree arg0, arg1, type;
10611 tree tem;
10612 enum machine_mode operand_mode;
10613 int invert = 0;
10614 int unsignedp;
10615 rtx op0, op1;
10616 enum insn_code icode;
10617 rtx subtarget = target;
10618 rtx result, label, pattern, jump_pat;
10619
10620 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10621 result at the end. We can't simply invert the test since it would
10622 have already been inverted if it were valid. This case occurs for
10623 some floating-point comparisons. */
10624
10625 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10626 invert = 1, exp = TREE_OPERAND (exp, 0);
10627
10628 arg0 = TREE_OPERAND (exp, 0);
10629 arg1 = TREE_OPERAND (exp, 1);
10630 type = TREE_TYPE (arg0);
10631 operand_mode = TYPE_MODE (type);
10632 unsignedp = TREE_UNSIGNED (type);
10633
10634 /* We won't bother with BLKmode store-flag operations because it would mean
10635 passing a lot of information to emit_store_flag. */
10636 if (operand_mode == BLKmode)
10637 return 0;
10638
10639 /* We won't bother with store-flag operations involving function pointers
10640 when function pointers must be canonicalized before comparisons. */
10641 #ifdef HAVE_canonicalize_funcptr_for_compare
10642 if (HAVE_canonicalize_funcptr_for_compare
10643 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10644 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10645 == FUNCTION_TYPE))
10646 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10647 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10648 == FUNCTION_TYPE))))
10649 return 0;
10650 #endif
10651
10652 STRIP_NOPS (arg0);
10653 STRIP_NOPS (arg1);
10654
10655 /* Get the rtx comparison code to use. We know that EXP is a comparison
10656 operation of some type. Some comparisons against 1 and -1 can be
10657 converted to comparisons with zero. Do so here so that the tests
10658 below will be aware that we have a comparison with zero. These
10659 tests will not catch constants in the first operand, but constants
10660 are rarely passed as the first operand. */
10661
10662 switch (TREE_CODE (exp))
10663 {
10664 case EQ_EXPR:
10665 code = EQ;
10666 break;
10667 case NE_EXPR:
10668 code = NE;
10669 break;
10670 case LT_EXPR:
10671 if (integer_onep (arg1))
10672 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10673 else
10674 code = unsignedp ? LTU : LT;
10675 break;
10676 case LE_EXPR:
10677 if (! unsignedp && integer_all_onesp (arg1))
10678 arg1 = integer_zero_node, code = LT;
10679 else
10680 code = unsignedp ? LEU : LE;
10681 break;
10682 case GT_EXPR:
10683 if (! unsignedp && integer_all_onesp (arg1))
10684 arg1 = integer_zero_node, code = GE;
10685 else
10686 code = unsignedp ? GTU : GT;
10687 break;
10688 case GE_EXPR:
10689 if (integer_onep (arg1))
10690 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10691 else
10692 code = unsignedp ? GEU : GE;
10693 break;
10694 default:
10695 abort ();
10696 }
10697
10698 /* Put a constant second. */
10699 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10700 {
10701 tem = arg0; arg0 = arg1; arg1 = tem;
10702 code = swap_condition (code);
10703 }
10704
10705 /* If this is an equality or inequality test of a single bit, we can
10706 do this by shifting the bit being tested to the low-order bit and
10707 masking the result with the constant 1. If the condition was EQ,
10708 we xor it with 1. This does not require an scc insn and is faster
10709 than an scc insn even if we have it. */
10710
10711 if ((code == NE || code == EQ)
10712 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10713 && integer_pow2p (TREE_OPERAND (arg0, 1))
10714 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10715 {
10716 tree inner = TREE_OPERAND (arg0, 0);
10717 HOST_WIDE_INT tem;
10718 int bitnum;
10719 int ops_unsignedp;
10720
10721 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10722 NULL_RTX, VOIDmode, 0));
10723 /* In this case, immed_double_const will sign extend the value to make
10724 it look the same on the host and target. We must remove the
10725 sign-extension before calling exact_log2, since exact_log2 will
10726 fail for negative values. */
10727 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10728 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10729 /* We don't use the obvious constant shift to generate the mask,
10730 because that generates compiler warnings when BITS_PER_WORD is
10731 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10732 code is unreachable in that case. */
10733 tem = tem & GET_MODE_MASK (word_mode);
10734 bitnum = exact_log2 (tem);
10735
10736 /* If INNER is a right shift of a constant and it plus BITNUM does
10737 not overflow, adjust BITNUM and INNER. */
10738
10739 if (TREE_CODE (inner) == RSHIFT_EXPR
10740 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10741 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10742 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10743 < TYPE_PRECISION (type)))
10744 {
10745 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10746 inner = TREE_OPERAND (inner, 0);
10747 }
10748
10749 /* If we are going to be able to omit the AND below, we must do our
10750 operations as unsigned. If we must use the AND, we have a choice.
10751 Normally unsigned is faster, but for some machines signed is. */
10752 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10753 #ifdef LOAD_EXTEND_OP
10754 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10755 #else
10756 : 1
10757 #endif
10758 );
10759
10760 if (subtarget == 0 || GET_CODE (subtarget) != REG
10761 || GET_MODE (subtarget) != operand_mode
10762 || ! safe_from_p (subtarget, inner))
10763 subtarget = 0;
10764
10765 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10766
10767 if (bitnum != 0)
10768 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10769 size_int (bitnum), subtarget, ops_unsignedp);
10770
10771 if (GET_MODE (op0) != mode)
10772 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10773
10774 if ((code == EQ && ! invert) || (code == NE && invert))
10775 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10776 ops_unsignedp, OPTAB_LIB_WIDEN);
10777
10778 /* Put the AND last so it can combine with more things. */
10779 if (bitnum != TYPE_PRECISION (type) - 1)
10780 op0 = expand_and (op0, const1_rtx, subtarget);
10781
10782 return op0;
10783 }
10784
10785 /* Now see if we are likely to be able to do this. Return if not. */
10786 if (! can_compare_p (operand_mode))
10787 return 0;
10788 icode = setcc_gen_code[(int) code];
10789 if (icode == CODE_FOR_nothing
10790 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10791 {
10792 /* We can only do this if it is one of the special cases that
10793 can be handled without an scc insn. */
10794 if ((code == LT && integer_zerop (arg1))
10795 || (! only_cheap && code == GE && integer_zerop (arg1)))
10796 ;
10797 else if (BRANCH_COST >= 0
10798 && ! only_cheap && (code == NE || code == EQ)
10799 && TREE_CODE (type) != REAL_TYPE
10800 && ((abs_optab->handlers[(int) operand_mode].insn_code
10801 != CODE_FOR_nothing)
10802 || (ffs_optab->handlers[(int) operand_mode].insn_code
10803 != CODE_FOR_nothing)))
10804 ;
10805 else
10806 return 0;
10807 }
10808
10809 preexpand_calls (exp);
10810 if (subtarget == 0 || GET_CODE (subtarget) != REG
10811 || GET_MODE (subtarget) != operand_mode
10812 || ! safe_from_p (subtarget, arg1))
10813 subtarget = 0;
10814
10815 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10816 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10817
10818 if (target == 0)
10819 target = gen_reg_rtx (mode);
10820
10821 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10822 because, if the emit_store_flag does anything it will succeed and
10823 OP0 and OP1 will not be used subsequently. */
10824
10825 result = emit_store_flag (target, code,
10826 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10827 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10828 operand_mode, unsignedp, 1);
10829
10830 if (result)
10831 {
10832 if (invert)
10833 result = expand_binop (mode, xor_optab, result, const1_rtx,
10834 result, 0, OPTAB_LIB_WIDEN);
10835 return result;
10836 }
10837
10838 /* If this failed, we have to do this with set/compare/jump/set code. */
10839 if (GET_CODE (target) != REG
10840 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10841 target = gen_reg_rtx (GET_MODE (target));
10842
10843 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10844 result = compare_from_rtx (op0, op1, code, unsignedp,
10845 operand_mode, NULL_RTX, 0);
10846 if (GET_CODE (result) == CONST_INT)
10847 return (((result == const0_rtx && ! invert)
10848 || (result != const0_rtx && invert))
10849 ? const0_rtx : const1_rtx);
10850
10851 label = gen_label_rtx ();
10852 if (bcc_gen_fctn[(int) code] == 0)
10853 abort ();
10854
10855 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10856 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10857 emit_label (label);
10858
10859 return target;
10860 }
10861 \f
10862 /* Generate a tablejump instruction (used for switch statements). */
10863
10864 #ifdef HAVE_tablejump
10865
10866 /* INDEX is the value being switched on, with the lowest value
10867 in the table already subtracted.
10868 MODE is its expected mode (needed if INDEX is constant).
10869 RANGE is the length of the jump table.
10870 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10871
10872 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10873 index value is out of range. */
10874
10875 void
10876 do_tablejump (index, mode, range, table_label, default_label)
10877 rtx index, range, table_label, default_label;
10878 enum machine_mode mode;
10879 {
10880 register rtx temp, vector;
10881
10882 /* Do an unsigned comparison (in the proper mode) between the index
10883 expression and the value which represents the length of the range.
10884 Since we just finished subtracting the lower bound of the range
10885 from the index expression, this comparison allows us to simultaneously
10886 check that the original index expression value is both greater than
10887 or equal to the minimum value of the range and less than or equal to
10888 the maximum value of the range. */
10889
10890 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10891 emit_jump_insn (gen_bgtu (default_label));
10892
10893 /* If index is in range, it must fit in Pmode.
10894 Convert to Pmode so we can index with it. */
10895 if (mode != Pmode)
10896 index = convert_to_mode (Pmode, index, 1);
10897
10898 /* Don't let a MEM slip thru, because then INDEX that comes
10899 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10900 and break_out_memory_refs will go to work on it and mess it up. */
10901 #ifdef PIC_CASE_VECTOR_ADDRESS
10902 if (flag_pic && GET_CODE (index) != REG)
10903 index = copy_to_mode_reg (Pmode, index);
10904 #endif
10905
10906 /* If flag_force_addr were to affect this address
10907 it could interfere with the tricky assumptions made
10908 about addresses that contain label-refs,
10909 which may be valid only very near the tablejump itself. */
10910 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10911 GET_MODE_SIZE, because this indicates how large insns are. The other
10912 uses should all be Pmode, because they are addresses. This code
10913 could fail if addresses and insns are not the same size. */
10914 index = gen_rtx (PLUS, Pmode,
10915 gen_rtx (MULT, Pmode, index,
10916 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10917 gen_rtx (LABEL_REF, Pmode, table_label));
10918 #ifdef PIC_CASE_VECTOR_ADDRESS
10919 if (flag_pic)
10920 index = PIC_CASE_VECTOR_ADDRESS (index);
10921 else
10922 #endif
10923 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10924 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10925 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
10926 RTX_UNCHANGING_P (vector) = 1;
10927 convert_move (temp, vector, 0);
10928
10929 emit_jump_insn (gen_tablejump (temp, table_label));
10930
10931 #ifndef CASE_VECTOR_PC_RELATIVE
10932 /* If we are generating PIC code or if the table is PC-relative, the
10933 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10934 if (! flag_pic)
10935 emit_barrier ();
10936 #endif
10937 }
10938
10939 #endif /* HAVE_tablejump */
10940
10941
10942 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10943 to that value is on the top of the stack. The resulting type is TYPE, and
10944 the source declaration is DECL. */
10945
10946 void
10947 bc_load_memory (type, decl)
10948 tree type, decl;
10949 {
10950 enum bytecode_opcode opcode;
10951
10952
10953 /* Bit fields are special. We only know about signed and
10954 unsigned ints, and enums. The latter are treated as
10955 signed integers. */
10956
10957 if (DECL_BIT_FIELD (decl))
10958 if (TREE_CODE (type) == ENUMERAL_TYPE
10959 || TREE_CODE (type) == INTEGER_TYPE)
10960 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10961 else
10962 abort ();
10963 else
10964 /* See corresponding comment in bc_store_memory. */
10965 if (TYPE_MODE (type) == BLKmode
10966 || TYPE_MODE (type) == VOIDmode)
10967 return;
10968 else
10969 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
10970
10971 if (opcode == neverneverland)
10972 abort ();
10973
10974 bc_emit_bytecode (opcode);
10975
10976 #ifdef DEBUG_PRINT_CODE
10977 fputc ('\n', stderr);
10978 #endif
10979 }
10980
10981
10982 /* Store the contents of the second stack slot to the address in the
10983 top stack slot. DECL is the declaration of the destination and is used
10984 to determine whether we're dealing with a bitfield. */
10985
10986 void
10987 bc_store_memory (type, decl)
10988 tree type, decl;
10989 {
10990 enum bytecode_opcode opcode;
10991
10992
10993 if (DECL_BIT_FIELD (decl))
10994 {
10995 if (TREE_CODE (type) == ENUMERAL_TYPE
10996 || TREE_CODE (type) == INTEGER_TYPE)
10997 opcode = sstoreBI;
10998 else
10999 abort ();
11000 }
11001 else
11002 if (TYPE_MODE (type) == BLKmode)
11003 {
11004 /* Copy structure. This expands to a block copy instruction, storeBLK.
11005 In addition to the arguments expected by the other store instructions,
11006 it also expects a type size (SImode) on top of the stack, which is the
11007 structure size in size units (usually bytes). The two first arguments
11008 are already on the stack; so we just put the size on level 1. For some
11009 other languages, the size may be variable, this is why we don't encode
11010 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11011
11012 bc_expand_expr (TYPE_SIZE (type));
11013 opcode = storeBLK;
11014 }
11015 else
11016 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11017
11018 if (opcode == neverneverland)
11019 abort ();
11020
11021 bc_emit_bytecode (opcode);
11022
11023 #ifdef DEBUG_PRINT_CODE
11024 fputc ('\n', stderr);
11025 #endif
11026 }
11027
11028
11029 /* Allocate local stack space sufficient to hold a value of the given
11030 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11031 integral power of 2. A special case is locals of type VOID, which
11032 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11033 remapped into the corresponding attribute of SI. */
11034
11035 rtx
11036 bc_allocate_local (size, alignment)
11037 int size, alignment;
11038 {
11039 rtx retval;
11040 int byte_alignment;
11041
11042 if (size < 0)
11043 abort ();
11044
11045 /* Normalize size and alignment */
11046 if (!size)
11047 size = UNITS_PER_WORD;
11048
11049 if (alignment < BITS_PER_UNIT)
11050 byte_alignment = 1 << (INT_ALIGN - 1);
11051 else
11052 /* Align */
11053 byte_alignment = alignment / BITS_PER_UNIT;
11054
11055 if (local_vars_size & (byte_alignment - 1))
11056 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11057
11058 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11059 local_vars_size += size;
11060
11061 return retval;
11062 }
11063
11064
11065 /* Allocate variable-sized local array. Variable-sized arrays are
11066 actually pointers to the address in memory where they are stored. */
11067
11068 rtx
11069 bc_allocate_variable_array (size)
11070 tree size;
11071 {
11072 rtx retval;
11073 const int ptralign = (1 << (PTR_ALIGN - 1));
11074
11075 /* Align pointer */
11076 if (local_vars_size & ptralign)
11077 local_vars_size += ptralign - (local_vars_size & ptralign);
11078
11079 /* Note down local space needed: pointer to block; also return
11080 dummy rtx */
11081
11082 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11083 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11084 return retval;
11085 }
11086
11087
11088 /* Push the machine address for the given external variable offset. */
11089
11090 void
11091 bc_load_externaddr (externaddr)
11092 rtx externaddr;
11093 {
11094 bc_emit_bytecode (constP);
11095 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11096 BYTECODE_BC_LABEL (externaddr)->offset);
11097
11098 #ifdef DEBUG_PRINT_CODE
11099 fputc ('\n', stderr);
11100 #endif
11101 }
11102
11103
11104 /* Like above, but expects an IDENTIFIER. */
11105
11106 void
11107 bc_load_externaddr_id (id, offset)
11108 tree id;
11109 int offset;
11110 {
11111 if (!IDENTIFIER_POINTER (id))
11112 abort ();
11113
11114 bc_emit_bytecode (constP);
11115 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11116
11117 #ifdef DEBUG_PRINT_CODE
11118 fputc ('\n', stderr);
11119 #endif
11120 }
11121
11122
11123 /* Push the machine address for the given local variable offset. */
11124
11125 void
11126 bc_load_localaddr (localaddr)
11127 rtx localaddr;
11128 {
11129 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11130 }
11131
11132
11133 /* Push the machine address for the given parameter offset.
11134 NOTE: offset is in bits. */
11135
11136 void
11137 bc_load_parmaddr (parmaddr)
11138 rtx parmaddr;
11139 {
11140 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11141 / BITS_PER_UNIT));
11142 }
11143
11144
11145 /* Convert a[i] into *(a + i). */
11146
11147 tree
11148 bc_canonicalize_array_ref (exp)
11149 tree exp;
11150 {
11151 tree type = TREE_TYPE (exp);
11152 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11153 TREE_OPERAND (exp, 0));
11154 tree index = TREE_OPERAND (exp, 1);
11155
11156
11157 /* Convert the integer argument to a type the same size as a pointer
11158 so the multiply won't overflow spuriously. */
11159
11160 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11161 index = convert (type_for_size (POINTER_SIZE, 0), index);
11162
11163 /* The array address isn't volatile even if the array is.
11164 (Of course this isn't terribly relevant since the bytecode
11165 translator treats nearly everything as volatile anyway.) */
11166 TREE_THIS_VOLATILE (array_adr) = 0;
11167
11168 return build1 (INDIRECT_REF, type,
11169 fold (build (PLUS_EXPR,
11170 TYPE_POINTER_TO (type),
11171 array_adr,
11172 fold (build (MULT_EXPR,
11173 TYPE_POINTER_TO (type),
11174 index,
11175 size_in_bytes (type))))));
11176 }
11177
11178
11179 /* Load the address of the component referenced by the given
11180 COMPONENT_REF expression.
11181
11182 Returns innermost lvalue. */
11183
11184 tree
11185 bc_expand_component_address (exp)
11186 tree exp;
11187 {
11188 tree tem, chain;
11189 enum machine_mode mode;
11190 int bitpos = 0;
11191 HOST_WIDE_INT SIval;
11192
11193
11194 tem = TREE_OPERAND (exp, 1);
11195 mode = DECL_MODE (tem);
11196
11197
11198 /* Compute cumulative bit offset for nested component refs
11199 and array refs, and find the ultimate containing object. */
11200
11201 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11202 {
11203 if (TREE_CODE (tem) == COMPONENT_REF)
11204 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11205 else
11206 if (TREE_CODE (tem) == ARRAY_REF
11207 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11208 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11209
11210 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11211 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11212 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11213 else
11214 break;
11215 }
11216
11217 bc_expand_expr (tem);
11218
11219
11220 /* For bitfields also push their offset and size */
11221 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11222 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11223 else
11224 if (SIval = bitpos / BITS_PER_UNIT)
11225 bc_emit_instruction (addconstPSI, SIval);
11226
11227 return (TREE_OPERAND (exp, 1));
11228 }
11229
11230
11231 /* Emit code to push two SI constants */
11232
11233 void
11234 bc_push_offset_and_size (offset, size)
11235 HOST_WIDE_INT offset, size;
11236 {
11237 bc_emit_instruction (constSI, offset);
11238 bc_emit_instruction (constSI, size);
11239 }
11240
11241
11242 /* Emit byte code to push the address of the given lvalue expression to
11243 the stack. If it's a bit field, we also push offset and size info.
11244
11245 Returns innermost component, which allows us to determine not only
11246 its type, but also whether it's a bitfield. */
11247
11248 tree
11249 bc_expand_address (exp)
11250 tree exp;
11251 {
11252 /* Safeguard */
11253 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11254 return (exp);
11255
11256
11257 switch (TREE_CODE (exp))
11258 {
11259 case ARRAY_REF:
11260
11261 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11262
11263 case COMPONENT_REF:
11264
11265 return (bc_expand_component_address (exp));
11266
11267 case INDIRECT_REF:
11268
11269 bc_expand_expr (TREE_OPERAND (exp, 0));
11270
11271 /* For variable-sized types: retrieve pointer. Sometimes the
11272 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11273 also make sure we have an operand, just in case... */
11274
11275 if (TREE_OPERAND (exp, 0)
11276 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11277 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11278 bc_emit_instruction (loadP);
11279
11280 /* If packed, also return offset and size */
11281 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11282
11283 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11284 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11285
11286 return (TREE_OPERAND (exp, 0));
11287
11288 case FUNCTION_DECL:
11289
11290 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11291 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11292 break;
11293
11294 case PARM_DECL:
11295
11296 bc_load_parmaddr (DECL_RTL (exp));
11297
11298 /* For variable-sized types: retrieve pointer */
11299 if (TYPE_SIZE (TREE_TYPE (exp))
11300 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11301 bc_emit_instruction (loadP);
11302
11303 /* If packed, also return offset and size */
11304 if (DECL_BIT_FIELD (exp))
11305 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11306 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11307
11308 break;
11309
11310 case RESULT_DECL:
11311
11312 bc_emit_instruction (returnP);
11313 break;
11314
11315 case VAR_DECL:
11316
11317 #if 0
11318 if (BYTECODE_LABEL (DECL_RTL (exp)))
11319 bc_load_externaddr (DECL_RTL (exp));
11320 #endif
11321
11322 if (DECL_EXTERNAL (exp))
11323 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11324 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11325 else
11326 bc_load_localaddr (DECL_RTL (exp));
11327
11328 /* For variable-sized types: retrieve pointer */
11329 if (TYPE_SIZE (TREE_TYPE (exp))
11330 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11331 bc_emit_instruction (loadP);
11332
11333 /* If packed, also return offset and size */
11334 if (DECL_BIT_FIELD (exp))
11335 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11336 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11337
11338 break;
11339
11340 case STRING_CST:
11341 {
11342 rtx r;
11343
11344 bc_emit_bytecode (constP);
11345 r = output_constant_def (exp);
11346 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11347
11348 #ifdef DEBUG_PRINT_CODE
11349 fputc ('\n', stderr);
11350 #endif
11351 }
11352 break;
11353
11354 default:
11355
11356 abort();
11357 break;
11358 }
11359
11360 /* Most lvalues don't have components. */
11361 return (exp);
11362 }
11363
11364
11365 /* Emit a type code to be used by the runtime support in handling
11366 parameter passing. The type code consists of the machine mode
11367 plus the minimal alignment shifted left 8 bits. */
11368
11369 tree
11370 bc_runtime_type_code (type)
11371 tree type;
11372 {
11373 int val;
11374
11375 switch (TREE_CODE (type))
11376 {
11377 case VOID_TYPE:
11378 case INTEGER_TYPE:
11379 case REAL_TYPE:
11380 case COMPLEX_TYPE:
11381 case ENUMERAL_TYPE:
11382 case POINTER_TYPE:
11383 case RECORD_TYPE:
11384
11385 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11386 break;
11387
11388 case ERROR_MARK:
11389
11390 val = 0;
11391 break;
11392
11393 default:
11394
11395 abort ();
11396 }
11397 return build_int_2 (val, 0);
11398 }
11399
11400
11401 /* Generate constructor label */
11402
11403 char *
11404 bc_gen_constr_label ()
11405 {
11406 static int label_counter;
11407 static char label[20];
11408
11409 sprintf (label, "*LR%d", label_counter++);
11410
11411 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11412 }
11413
11414
11415 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11416 expand the constructor data as static data, and push a pointer to it.
11417 The pointer is put in the pointer table and is retrieved by a constP
11418 bytecode instruction. We then loop and store each constructor member in
11419 the corresponding component. Finally, we return the original pointer on
11420 the stack. */
11421
11422 void
11423 bc_expand_constructor (constr)
11424 tree constr;
11425 {
11426 char *l;
11427 HOST_WIDE_INT ptroffs;
11428 rtx constr_rtx;
11429
11430
11431 /* Literal constructors are handled as constants, whereas
11432 non-literals are evaluated and stored element by element
11433 into the data segment. */
11434
11435 /* Allocate space in proper segment and push pointer to space on stack.
11436 */
11437
11438 l = bc_gen_constr_label ();
11439
11440 if (TREE_CONSTANT (constr))
11441 {
11442 text_section ();
11443
11444 bc_emit_const_labeldef (l);
11445 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11446 }
11447 else
11448 {
11449 data_section ();
11450
11451 bc_emit_data_labeldef (l);
11452 bc_output_data_constructor (constr);
11453 }
11454
11455
11456 /* Add reference to pointer table and recall pointer to stack;
11457 this code is common for both types of constructors: literals
11458 and non-literals. */
11459
11460 ptroffs = bc_define_pointer (l);
11461 bc_emit_instruction (constP, ptroffs);
11462
11463 /* This is all that has to be done if it's a literal. */
11464 if (TREE_CONSTANT (constr))
11465 return;
11466
11467
11468 /* At this point, we have the pointer to the structure on top of the stack.
11469 Generate sequences of store_memory calls for the constructor. */
11470
11471 /* constructor type is structure */
11472 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11473 {
11474 register tree elt;
11475
11476 /* If the constructor has fewer fields than the structure,
11477 clear the whole structure first. */
11478
11479 if (list_length (CONSTRUCTOR_ELTS (constr))
11480 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11481 {
11482 bc_emit_instruction (duplicate);
11483 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11484 bc_emit_instruction (clearBLK);
11485 }
11486
11487 /* Store each element of the constructor into the corresponding
11488 field of TARGET. */
11489
11490 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11491 {
11492 register tree field = TREE_PURPOSE (elt);
11493 register enum machine_mode mode;
11494 int bitsize;
11495 int bitpos;
11496 int unsignedp;
11497
11498 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11499 mode = DECL_MODE (field);
11500 unsignedp = TREE_UNSIGNED (field);
11501
11502 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11503
11504 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11505 /* The alignment of TARGET is
11506 at least what its type requires. */
11507 VOIDmode, 0,
11508 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11509 int_size_in_bytes (TREE_TYPE (constr)));
11510 }
11511 }
11512 else
11513
11514 /* Constructor type is array */
11515 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11516 {
11517 register tree elt;
11518 register int i;
11519 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11520 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11521 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11522 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11523
11524 /* If the constructor has fewer fields than the structure,
11525 clear the whole structure first. */
11526
11527 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11528 {
11529 bc_emit_instruction (duplicate);
11530 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11531 bc_emit_instruction (clearBLK);
11532 }
11533
11534
11535 /* Store each element of the constructor into the corresponding
11536 element of TARGET, determined by counting the elements. */
11537
11538 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11539 elt;
11540 elt = TREE_CHAIN (elt), i++)
11541 {
11542 register enum machine_mode mode;
11543 int bitsize;
11544 int bitpos;
11545 int unsignedp;
11546
11547 mode = TYPE_MODE (elttype);
11548 bitsize = GET_MODE_BITSIZE (mode);
11549 unsignedp = TREE_UNSIGNED (elttype);
11550
11551 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11552 /* * TYPE_SIZE_UNIT (elttype) */ );
11553
11554 bc_store_field (elt, bitsize, bitpos, mode,
11555 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11556 /* The alignment of TARGET is
11557 at least what its type requires. */
11558 VOIDmode, 0,
11559 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11560 int_size_in_bytes (TREE_TYPE (constr)));
11561 }
11562
11563 }
11564 }
11565
11566
11567 /* Store the value of EXP (an expression tree) into member FIELD of
11568 structure at address on stack, which has type TYPE, mode MODE and
11569 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11570 structure.
11571
11572 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11573 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11574
11575 void
11576 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11577 value_mode, unsignedp, align, total_size)
11578 int bitsize, bitpos;
11579 enum machine_mode mode;
11580 tree field, exp, type;
11581 enum machine_mode value_mode;
11582 int unsignedp;
11583 int align;
11584 int total_size;
11585 {
11586
11587 /* Expand expression and copy pointer */
11588 bc_expand_expr (exp);
11589 bc_emit_instruction (over);
11590
11591
11592 /* If the component is a bit field, we cannot use addressing to access
11593 it. Use bit-field techniques to store in it. */
11594
11595 if (DECL_BIT_FIELD (field))
11596 {
11597 bc_store_bit_field (bitpos, bitsize, unsignedp);
11598 return;
11599 }
11600 else
11601 /* Not bit field */
11602 {
11603 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11604
11605 /* Advance pointer to the desired member */
11606 if (offset)
11607 bc_emit_instruction (addconstPSI, offset);
11608
11609 /* Store */
11610 bc_store_memory (type, field);
11611 }
11612 }
11613
11614
11615 /* Store SI/SU in bitfield */
11616
11617 void
11618 bc_store_bit_field (offset, size, unsignedp)
11619 int offset, size, unsignedp;
11620 {
11621 /* Push bitfield offset and size */
11622 bc_push_offset_and_size (offset, size);
11623
11624 /* Store */
11625 bc_emit_instruction (sstoreBI);
11626 }
11627
11628
11629 /* Load SI/SU from bitfield */
11630
11631 void
11632 bc_load_bit_field (offset, size, unsignedp)
11633 int offset, size, unsignedp;
11634 {
11635 /* Push bitfield offset and size */
11636 bc_push_offset_and_size (offset, size);
11637
11638 /* Load: sign-extend if signed, else zero-extend */
11639 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11640 }
11641
11642
11643 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11644 (adjust stack pointer upwards), negative means add that number of
11645 levels (adjust the stack pointer downwards). Only positive values
11646 normally make sense. */
11647
11648 void
11649 bc_adjust_stack (nlevels)
11650 int nlevels;
11651 {
11652 switch (nlevels)
11653 {
11654 case 0:
11655 break;
11656
11657 case 2:
11658 bc_emit_instruction (drop);
11659
11660 case 1:
11661 bc_emit_instruction (drop);
11662 break;
11663
11664 default:
11665
11666 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11667 stack_depth -= nlevels;
11668 }
11669
11670 #if defined (VALIDATE_STACK_FOR_BC)
11671 VALIDATE_STACK_FOR_BC ();
11672 #endif
11673 }
This page took 0.571649 seconds and 5 git commands to generate.