]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
7a855ad1475f00fa194e984f66e6196febf4a010
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
34 #include "expr.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "typeclass.h"
39
40 #include "bytecode.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
43 #include "bc-optab.h"
44 #include "bc-emit.h"
45
46
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
90
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
96
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
100
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 of TARGET_EXPRs. */
105 int target_temp_slot_level;
106
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
111
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
114
115 /* This structure is used by move_by_pieces to describe the move to
116 be performed. */
117
118 struct move_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
129 int from_struct;
130 int len;
131 int offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148 };
149
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
153
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
159
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
216
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
223
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
236
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
248
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
256 \f
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264
265 void
266 bc_init_mode_to_opcode_maps ()
267 {
268 int mode;
269
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
274
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
279
280 #include "modemap.def"
281 #undef DEF_MODEMAP
282 }
283 \f
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
286
287 void
288 init_expr_once ()
289 {
290 rtx insn, pat;
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
297
298 start_sequence ();
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
301
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
304 {
305 int regno;
306 rtx reg;
307 int num_clobbers;
308
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
312
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
315
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 regno++)
320 {
321 if (! HARD_REGNO_MODE_OK (regno, mode))
322 continue;
323
324 reg = gen_rtx (REG, mode, regno);
325
326 SET_SRC (pat) = mem;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
330
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
335
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
340
341 SET_SRC (pat) = reg;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
345 }
346 }
347
348 end_sequence ();
349 }
350
351 /* This is run at the start of compiling a function. */
352
353 void
354 init_expr ()
355 {
356 init_queue ();
357
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
362 apply_args_value = 0;
363 forced_labels = 0;
364 }
365
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
368
369 void
370 save_expr_status (p)
371 struct function *p;
372 {
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
375
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
382
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
386 saveregs_value = 0;
387 apply_args_value = 0;
388 forced_labels = 0;
389 }
390
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
393
394 void
395 restore_expr_status (p)
396 struct function *p;
397 {
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
404 }
405 \f
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
408
409 static rtx pending_chain;
410
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
414
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
417
418 static rtx
419 enqueue_insn (var, body)
420 rtx var, body;
421 {
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
425 }
426
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
433
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
437
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
441
442 rtx
443 protect_from_queue (x, modify)
444 register rtx x;
445 int modify;
446 {
447 register RTX_CODE code = GET_CODE (x);
448
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
452 return x;
453 #endif
454
455 if (code != QUEUED)
456 {
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
461 shared. */
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 {
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
471
472 if (QUEUED_INSN (y))
473 {
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
476 QUEUED_INSN (y));
477 return temp;
478 }
479 return new;
480 }
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
483 if (code == MEM)
484 {
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
487 {
488 x = copy_rtx (x);
489 XEXP (x, 0) = tem;
490 }
491 }
492 else if (code == PLUS || code == MULT)
493 {
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 {
498 x = copy_rtx (x);
499 XEXP (x, 0) = new0;
500 XEXP (x, 1) = new1;
501 }
502 }
503 return x;
504 }
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
509 use that copy. */
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 QUEUED_INSN (x));
517 return QUEUED_COPY (x);
518 }
519
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
524
525 static int
526 queued_subexp_p (x)
527 rtx x;
528 {
529 register enum rtx_code code = GET_CODE (x);
530 switch (code)
531 {
532 case QUEUED:
533 return 1;
534 case MEM:
535 return queued_subexp_p (XEXP (x, 0));
536 case MULT:
537 case PLUS:
538 case MINUS:
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
541 }
542 return 0;
543 }
544
545 /* Perform all the pending incrementations. */
546
547 void
548 emit_queue ()
549 {
550 register rtx p;
551 while (p = pending_chain)
552 {
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
555 }
556 }
557
558 static void
559 init_queue ()
560 {
561 if (pending_chain)
562 abort ();
563 }
564 \f
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
569
570 void
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
573 int unsignedp;
574 {
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
579 enum insn_code code;
580 rtx libcall;
581
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
587
588 if (to_real != from_real)
589 abort ();
590
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
593 TO here. */
594
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 abort ();
603
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 {
607 emit_move_insn (to, from);
608 return;
609 }
610
611 if (to_real)
612 {
613 rtx value;
614
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
616 {
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
619 != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from, UNKNOWN);
622 return;
623 }
624 }
625
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
710 }
711 #endif
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
717 }
718 #endif
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
724 }
725 #endif
726
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
739 }
740 #endif
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
746 }
747 #endif
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 {
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
753 }
754 #endif
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 {
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
760 }
761 #endif
762
763 libcall = (rtx) 0;
764 switch (from_mode)
765 {
766 case SFmode:
767 switch (to_mode)
768 {
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
772
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
776
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
780 }
781 break;
782
783 case DFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
789
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
793
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
797 }
798 break;
799
800 case XFmode:
801 switch (to_mode)
802 {
803 case SFmode:
804 libcall = truncxfsf2_libfunc;
805 break;
806
807 case DFmode:
808 libcall = truncxfdf2_libfunc;
809 break;
810 }
811 break;
812
813 case TFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = trunctfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = trunctfdf2_libfunc;
822 break;
823 }
824 break;
825 }
826
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
829 abort ();
830
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 1, from, from_mode);
833 emit_move_insn (to, value);
834 return;
835 }
836
837 /* Now both modes are integers. */
838
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
842 {
843 rtx insns;
844 rtx lowpart;
845 rtx fill_value;
846 rtx lowfrom;
847 int i;
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 != CODE_FOR_nothing)
854 {
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
862 return;
863 }
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
868 {
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
874 return;
875 }
876
877 /* No special multiword conversion insn; do it by hand. */
878 start_sequence ();
879
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
882
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
885
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
891
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
896
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
901 {
902 #ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
906 {
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
911 }
912 else
913 #endif
914 {
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 NULL_RTX, 0);
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
920 }
921 }
922
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 {
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
928
929 if (subword == 0)
930 abort ();
931
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
934 }
935
936 insns = get_insns ();
937 end_sequence ();
938
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
941 return;
942 }
943
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
947 {
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
957 }
958
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
961 {
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
964
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
967 {
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 return;
970 }
971 #endif /* HAVE_truncsipsi2 */
972 abort ();
973 }
974
975 if (from_mode == PSImode)
976 {
977 if (to_mode != SImode)
978 {
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
981 }
982 else
983 {
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
986 {
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 return;
989 }
990 #endif /* HAVE_extendpsisi2 */
991 abort ();
992 }
993 }
994
995 if (to_mode == PDImode)
996 {
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
999
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 return;
1005 }
1006 #endif /* HAVE_truncdipdi2 */
1007 abort ();
1008 }
1009
1010 if (from_mode == PDImode)
1011 {
1012 if (to_mode != DImode)
1013 {
1014 from = convert_to_mode (DImode, from, unsignedp);
1015 from_mode = DImode;
1016 }
1017 else
1018 {
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1021 {
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 return;
1024 }
1025 #endif /* HAVE_extendpdidi2 */
1026 abort ();
1027 }
1028 }
1029
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1032
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1037 {
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 return;
1050 }
1051
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 {
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1058 {
1059 emit_unop_insn (code, to, from, equiv_code);
1060 return;
1061 }
1062 else
1063 {
1064 enum machine_mode intermediate;
1065
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1075 {
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1078 return;
1079 }
1080
1081 /* No suitable intermediate mode. */
1082 abort ();
1083 }
1084 }
1085
1086 /* Support special truncate insns for certain modes. */
1087
1088 if (from_mode == DImode && to_mode == SImode)
1089 {
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1092 {
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1094 return;
1095 }
1096 #endif
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1098 return;
1099 }
1100
1101 if (from_mode == DImode && to_mode == HImode)
1102 {
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1105 {
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1107 return;
1108 }
1109 #endif
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1111 return;
1112 }
1113
1114 if (from_mode == DImode && to_mode == QImode)
1115 {
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1118 {
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1120 return;
1121 }
1122 #endif
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1124 return;
1125 }
1126
1127 if (from_mode == SImode && to_mode == HImode)
1128 {
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1133 return;
1134 }
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == SImode && to_mode == QImode)
1141 {
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1146 return;
1147 }
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == HImode && to_mode == QImode)
1154 {
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
1166 if (from_mode == TImode && to_mode == DImode)
1167 {
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1170 {
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1172 return;
1173 }
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == TImode && to_mode == SImode)
1180 {
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1183 {
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1185 return;
1186 }
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == TImode && to_mode == HImode)
1193 {
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1198 return;
1199 }
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == TImode && to_mode == QImode)
1206 {
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1211 return;
1212 }
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 {
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1225 return;
1226 }
1227
1228 /* Mode combination is not recognized. */
1229 abort ();
1230 }
1231
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1238
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1241
1242 rtx
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1245 rtx x;
1246 int unsignedp;
1247 {
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1249 }
1250
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1255
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1258
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1263
1264 rtx
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1267 rtx x;
1268 int unsignedp;
1269 {
1270 register rtx temp;
1271
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1274
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1279
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1282
1283 if (mode == oldmode)
1284 return x;
1285
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1291
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1295 {
1296 HOST_WIDE_INT val = INTVAL (x);
1297
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1300 {
1301 int width = GET_MODE_BITSIZE (oldmode);
1302
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1305 }
1306
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1308 }
1309
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1314
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1326 {
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1332 {
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1335
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 if (! unsignedp
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1342
1343 return GEN_INT (val);
1344 }
1345
1346 return gen_lowpart (mode, x);
1347 }
1348
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1351 return temp;
1352 }
1353 \f
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
1360 static void
1361 move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1364 {
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1368
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1381
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1388
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1397 {
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1400 {
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1404 }
1405 #endif
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1408 {
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1412 }
1413 #endif
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1418 {
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1420 data.autinc_to = 1;
1421 data.explicit_inc_to = -1;
1422 }
1423 #endif
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1426 {
1427 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = 1;
1430 }
1431 #endif
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 }
1435
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1439
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1442
1443 while (max_size > 1)
1444 {
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1447
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1451 mode = tmode;
1452
1453 if (mode == VOIDmode)
1454 break;
1455
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1461
1462 max_size = GET_MODE_SIZE (mode);
1463 }
1464
1465 /* The code above should have handled everything. */
1466 if (data.len > 0)
1467 abort ();
1468 }
1469
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1472
1473 static int
1474 move_by_pieces_ninsns (l, align)
1475 unsigned int l;
1476 int align;
1477 {
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1480
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1483 align = MOVE_MAX;
1484
1485 while (max_size > 1)
1486 {
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1489
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1494
1495 if (mode == VOIDmode)
1496 break;
1497
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1503
1504 max_size = GET_MODE_SIZE (mode);
1505 }
1506
1507 return n_insns;
1508 }
1509
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1513
1514 static void
1515 move_by_pieces_1 (genfun, mode, data)
1516 rtx (*genfun) ();
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1519 {
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1522
1523 while (data->len >= size)
1524 {
1525 if (data->reverse) data->offset -= size;
1526
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : change_address (data->to, mode,
1530 plus_constant (data->to_addr, data->offset)));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1532 from1 =
1533 (data->autinc_from
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : change_address (data->from, mode,
1536 plus_constant (data->from_addr, data->offset)));
1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
1538
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data->explicit_inc_to < 0)
1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1542 if (data->explicit_inc_from < 0)
1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1544 #endif
1545
1546 emit_insn ((*genfun) (to1, from1));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data->explicit_inc_to > 0)
1549 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1550 if (data->explicit_inc_from > 0)
1551 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1552 #endif
1553
1554 if (! data->reverse) data->offset += size;
1555
1556 data->len -= size;
1557 }
1558 }
1559 \f
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1563
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1569
1570 void
1571 emit_block_move (x, y, size, align)
1572 rtx x, y;
1573 rtx size;
1574 int align;
1575 {
1576 if (GET_MODE (x) != BLKmode)
1577 abort ();
1578
1579 if (GET_MODE (y) != BLKmode)
1580 abort ();
1581
1582 x = protect_from_queue (x, 1);
1583 y = protect_from_queue (y, 0);
1584 size = protect_from_queue (size, 0);
1585
1586 if (GET_CODE (x) != MEM)
1587 abort ();
1588 if (GET_CODE (y) != MEM)
1589 abort ();
1590 if (size == 0)
1591 abort ();
1592
1593 if (GET_CODE (size) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1595 move_by_pieces (x, y, INTVAL (size), align);
1596 else
1597 {
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1601
1602 rtx opalign = GEN_INT (align);
1603 enum machine_mode mode;
1604
1605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1606 mode = GET_MODE_WIDER_MODE (mode))
1607 {
1608 enum insn_code code = movstr_optab[(int) mode];
1609
1610 if (code != CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1617 <= GET_MODE_MASK (mode)))
1618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1619 && (insn_operand_predicate[(int) code][0] == 0
1620 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1621 && (insn_operand_predicate[(int) code][1] == 0
1622 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1623 && (insn_operand_predicate[(int) code][3] == 0
1624 || (*insn_operand_predicate[(int) code][3]) (opalign,
1625 VOIDmode)))
1626 {
1627 rtx op2;
1628 rtx last = get_last_insn ();
1629 rtx pat;
1630
1631 op2 = convert_to_mode (mode, size, 1);
1632 if (insn_operand_predicate[(int) code][2] != 0
1633 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1634 op2 = copy_to_mode_reg (mode, op2);
1635
1636 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1637 if (pat)
1638 {
1639 emit_insn (pat);
1640 return;
1641 }
1642 else
1643 delete_insns_since (last);
1644 }
1645 }
1646
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc, 0,
1649 VOIDmode, 3, XEXP (x, 0), Pmode,
1650 XEXP (y, 0), Pmode,
1651 convert_to_mode (TYPE_MODE (sizetype), size,
1652 TREE_UNSIGNED (sizetype)),
1653 TYPE_MODE (sizetype));
1654 #else
1655 emit_library_call (bcopy_libfunc, 0,
1656 VOIDmode, 3, XEXP (y, 0), Pmode,
1657 XEXP (x, 0), Pmode,
1658 convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node)),
1660 TYPE_MODE (integer_type_node));
1661 #endif
1662 }
1663 }
1664 \f
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1667
1668 void
1669 move_block_to_reg (regno, x, nregs, mode)
1670 int regno;
1671 rtx x;
1672 int nregs;
1673 enum machine_mode mode;
1674 {
1675 int i;
1676 rtx pat, last;
1677
1678 if (nregs == 0)
1679 return;
1680
1681 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1682 x = validize_mem (force_const_mem (mode, x));
1683
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple)
1687 {
1688 last = get_last_insn ();
1689 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1690 GEN_INT (nregs));
1691 if (pat)
1692 {
1693 emit_insn (pat);
1694 return;
1695 }
1696 else
1697 delete_insns_since (last);
1698 }
1699 #endif
1700
1701 for (i = 0; i < nregs; i++)
1702 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1703 operand_subword_force (x, i, mode));
1704 }
1705
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1709
1710
1711 void
1712 move_block_from_reg (regno, x, nregs, size)
1713 int regno;
1714 rtx x;
1715 int nregs;
1716 int size;
1717 {
1718 int i;
1719 rtx pat, last;
1720 enum machine_mode mode;
1721
1722 /* If SIZE is that of a mode no bigger than a word, just use that
1723 mode's store operation. */
1724 if (size <= UNITS_PER_WORD
1725 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1726 {
1727 emit_move_insn (change_address (x, mode, NULL),
1728 gen_rtx (REG, mode, regno));
1729 return;
1730 }
1731
1732 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1733 to the left before storing to memory. Note that the previous test
1734 doesn't handle all cases (e.g. SIZE == 3). */
1735 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1736 {
1737 rtx tem = operand_subword (x, 0, 1, BLKmode);
1738 rtx shift;
1739
1740 if (tem == 0)
1741 abort ();
1742
1743 shift = expand_shift (LSHIFT_EXPR, word_mode,
1744 gen_rtx (REG, word_mode, regno),
1745 build_int_2 ((UNITS_PER_WORD - size)
1746 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1747 emit_move_insn (tem, shift);
1748 return;
1749 }
1750
1751 /* See if the machine can do this with a store multiple insn. */
1752 #ifdef HAVE_store_multiple
1753 if (HAVE_store_multiple)
1754 {
1755 last = get_last_insn ();
1756 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1757 GEN_INT (nregs));
1758 if (pat)
1759 {
1760 emit_insn (pat);
1761 return;
1762 }
1763 else
1764 delete_insns_since (last);
1765 }
1766 #endif
1767
1768 for (i = 0; i < nregs; i++)
1769 {
1770 rtx tem = operand_subword (x, i, 1, BLKmode);
1771
1772 if (tem == 0)
1773 abort ();
1774
1775 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1776 }
1777 }
1778
1779 /* Emit code to move a block Y to a block X, where X is non-consecutive
1780 registers represented by a PARALLEL. */
1781
1782 void
1783 emit_group_load (x, y)
1784 rtx x, y;
1785 {
1786 rtx target_reg, source;
1787 int i;
1788
1789 if (GET_CODE (x) != PARALLEL)
1790 abort ();
1791
1792 /* Check for a NULL entry, used to indicate that the parameter goes
1793 both on the stack and in registers. */
1794 if (XEXP (XVECEXP (x, 0, 0), 0))
1795 i = 0;
1796 else
1797 i = 1;
1798
1799 for (; i < XVECLEN (x, 0); i++)
1800 {
1801 rtx element = XVECEXP (x, 0, i);
1802
1803 target_reg = XEXP (element, 0);
1804
1805 if (GET_CODE (y) == MEM)
1806 source = change_address (y, GET_MODE (target_reg),
1807 plus_constant (XEXP (y, 0),
1808 INTVAL (XEXP (element, 1))));
1809 else if (XEXP (element, 1) == const0_rtx)
1810 {
1811 if (GET_MODE (target_reg) == GET_MODE (y))
1812 source = y;
1813 /* Allow for the target_reg to be smaller than the input register
1814 to allow for AIX with 4 DF arguments after a single SI arg. The
1815 last DF argument will only load 1 word into the integer registers,
1816 but load a DF value into the float registers. */
1817 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1818 <= GET_MODE_SIZE (GET_MODE (y)))
1819 && GET_MODE (target_reg) == word_mode)
1820 /* This might be a const_double, so we can't just use SUBREG. */
1821 source = operand_subword (y, 0, 0, VOIDmode);
1822 else
1823 abort ();
1824 }
1825 else
1826 abort ();
1827
1828 emit_move_insn (target_reg, source);
1829 }
1830 }
1831
1832 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1833 registers represented by a PARALLEL. */
1834
1835 void
1836 emit_group_store (x, y)
1837 rtx x, y;
1838 {
1839 rtx source_reg, target;
1840 int i;
1841
1842 if (GET_CODE (y) != PARALLEL)
1843 abort ();
1844
1845 /* Check for a NULL entry, used to indicate that the parameter goes
1846 both on the stack and in registers. */
1847 if (XEXP (XVECEXP (y, 0, 0), 0))
1848 i = 0;
1849 else
1850 i = 1;
1851
1852 for (; i < XVECLEN (y, 0); i++)
1853 {
1854 rtx element = XVECEXP (y, 0, i);
1855
1856 source_reg = XEXP (element, 0);
1857
1858 if (GET_CODE (x) == MEM)
1859 target = change_address (x, GET_MODE (source_reg),
1860 plus_constant (XEXP (x, 0),
1861 INTVAL (XEXP (element, 1))));
1862 else if (XEXP (element, 1) == const0_rtx)
1863 {
1864 target = x;
1865 if (GET_MODE (target) != GET_MODE (source_reg))
1866 target = gen_lowpart (GET_MODE (source_reg), target);
1867 }
1868 else
1869 abort ();
1870
1871 emit_move_insn (target, source_reg);
1872 }
1873 }
1874
1875 /* Add a USE expression for REG to the (possibly empty) list pointed
1876 to by CALL_FUSAGE. REG must denote a hard register. */
1877
1878 void
1879 use_reg (call_fusage, reg)
1880 rtx *call_fusage, reg;
1881 {
1882 if (GET_CODE (reg) != REG
1883 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1884 abort();
1885
1886 *call_fusage
1887 = gen_rtx (EXPR_LIST, VOIDmode,
1888 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1889 }
1890
1891 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1892 starting at REGNO. All of these registers must be hard registers. */
1893
1894 void
1895 use_regs (call_fusage, regno, nregs)
1896 rtx *call_fusage;
1897 int regno;
1898 int nregs;
1899 {
1900 int i;
1901
1902 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1903 abort ();
1904
1905 for (i = 0; i < nregs; i++)
1906 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1907 }
1908
1909 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1910 PARALLEL REGS. This is for calls that pass values in multiple
1911 non-contiguous locations. The Irix 6 ABI has examples of this. */
1912
1913 void
1914 use_group_regs (call_fusage, regs)
1915 rtx *call_fusage;
1916 rtx regs;
1917 {
1918 int i;
1919
1920 /* Check for a NULL entry, used to indicate that the parameter goes
1921 both on the stack and in registers. */
1922 if (XEXP (XVECEXP (regs, 0, 0), 0))
1923 i = 0;
1924 else
1925 i = 1;
1926
1927 for (; i < XVECLEN (regs, 0); i++)
1928 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1929 }
1930 \f
1931 /* Generate several move instructions to clear LEN bytes of block TO.
1932 (A MEM rtx with BLKmode). The caller must pass TO through
1933 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1934 we can assume. */
1935
1936 static void
1937 clear_by_pieces (to, len, align)
1938 rtx to;
1939 int len, align;
1940 {
1941 struct clear_by_pieces data;
1942 rtx to_addr = XEXP (to, 0);
1943 int max_size = MOVE_MAX + 1;
1944
1945 data.offset = 0;
1946 data.to_addr = to_addr;
1947 data.to = to;
1948 data.autinc_to
1949 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1950 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1951
1952 data.explicit_inc_to = 0;
1953 data.reverse
1954 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1955 if (data.reverse) data.offset = len;
1956 data.len = len;
1957
1958 data.to_struct = MEM_IN_STRUCT_P (to);
1959
1960 /* If copying requires more than two move insns,
1961 copy addresses to registers (to make displacements shorter)
1962 and use post-increment if available. */
1963 if (!data.autinc_to
1964 && move_by_pieces_ninsns (len, align) > 2)
1965 {
1966 #ifdef HAVE_PRE_DECREMENT
1967 if (data.reverse && ! data.autinc_to)
1968 {
1969 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1970 data.autinc_to = 1;
1971 data.explicit_inc_to = -1;
1972 }
1973 #endif
1974 #ifdef HAVE_POST_INCREMENT
1975 if (! data.reverse && ! data.autinc_to)
1976 {
1977 data.to_addr = copy_addr_to_reg (to_addr);
1978 data.autinc_to = 1;
1979 data.explicit_inc_to = 1;
1980 }
1981 #endif
1982 if (!data.autinc_to && CONSTANT_P (to_addr))
1983 data.to_addr = copy_addr_to_reg (to_addr);
1984 }
1985
1986 if (! SLOW_UNALIGNED_ACCESS
1987 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1988 align = MOVE_MAX;
1989
1990 /* First move what we can in the largest integer mode, then go to
1991 successively smaller modes. */
1992
1993 while (max_size > 1)
1994 {
1995 enum machine_mode mode = VOIDmode, tmode;
1996 enum insn_code icode;
1997
1998 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1999 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2000 if (GET_MODE_SIZE (tmode) < max_size)
2001 mode = tmode;
2002
2003 if (mode == VOIDmode)
2004 break;
2005
2006 icode = mov_optab->handlers[(int) mode].insn_code;
2007 if (icode != CODE_FOR_nothing
2008 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2009 GET_MODE_SIZE (mode)))
2010 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2011
2012 max_size = GET_MODE_SIZE (mode);
2013 }
2014
2015 /* The code above should have handled everything. */
2016 if (data.len != 0)
2017 abort ();
2018 }
2019
2020 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2021 with move instructions for mode MODE. GENFUN is the gen_... function
2022 to make a move insn for that mode. DATA has all the other info. */
2023
2024 static void
2025 clear_by_pieces_1 (genfun, mode, data)
2026 rtx (*genfun) ();
2027 enum machine_mode mode;
2028 struct clear_by_pieces *data;
2029 {
2030 register int size = GET_MODE_SIZE (mode);
2031 register rtx to1;
2032
2033 while (data->len >= size)
2034 {
2035 if (data->reverse) data->offset -= size;
2036
2037 to1 = (data->autinc_to
2038 ? gen_rtx (MEM, mode, data->to_addr)
2039 : change_address (data->to, mode,
2040 plus_constant (data->to_addr, data->offset)));
2041 MEM_IN_STRUCT_P (to1) = data->to_struct;
2042
2043 #ifdef HAVE_PRE_DECREMENT
2044 if (data->explicit_inc_to < 0)
2045 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2046 #endif
2047
2048 emit_insn ((*genfun) (to1, const0_rtx));
2049 #ifdef HAVE_POST_INCREMENT
2050 if (data->explicit_inc_to > 0)
2051 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2052 #endif
2053
2054 if (! data->reverse) data->offset += size;
2055
2056 data->len -= size;
2057 }
2058 }
2059 \f
2060 /* Write zeros through the storage of OBJECT.
2061 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2062 the maximum alignment we can is has, measured in bytes. */
2063
2064 void
2065 clear_storage (object, size, align)
2066 rtx object;
2067 rtx size;
2068 int align;
2069 {
2070 if (GET_MODE (object) == BLKmode)
2071 {
2072 object = protect_from_queue (object, 1);
2073 size = protect_from_queue (size, 0);
2074
2075 if (GET_CODE (size) == CONST_INT
2076 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2077 clear_by_pieces (object, INTVAL (size), align);
2078
2079 else
2080 {
2081 /* Try the most limited insn first, because there's no point
2082 including more than one in the machine description unless
2083 the more limited one has some advantage. */
2084
2085 rtx opalign = GEN_INT (align);
2086 enum machine_mode mode;
2087
2088 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2089 mode = GET_MODE_WIDER_MODE (mode))
2090 {
2091 enum insn_code code = clrstr_optab[(int) mode];
2092
2093 if (code != CODE_FOR_nothing
2094 /* We don't need MODE to be narrower than
2095 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2096 the mode mask, as it is returned by the macro, it will
2097 definitely be less than the actual mode mask. */
2098 && ((GET_CODE (size) == CONST_INT
2099 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2100 <= GET_MODE_MASK (mode)))
2101 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2102 && (insn_operand_predicate[(int) code][0] == 0
2103 || (*insn_operand_predicate[(int) code][0]) (object,
2104 BLKmode))
2105 && (insn_operand_predicate[(int) code][2] == 0
2106 || (*insn_operand_predicate[(int) code][2]) (opalign,
2107 VOIDmode)))
2108 {
2109 rtx op1;
2110 rtx last = get_last_insn ();
2111 rtx pat;
2112
2113 op1 = convert_to_mode (mode, size, 1);
2114 if (insn_operand_predicate[(int) code][1] != 0
2115 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2116 mode))
2117 op1 = copy_to_mode_reg (mode, op1);
2118
2119 pat = GEN_FCN ((int) code) (object, op1, opalign);
2120 if (pat)
2121 {
2122 emit_insn (pat);
2123 return;
2124 }
2125 else
2126 delete_insns_since (last);
2127 }
2128 }
2129
2130
2131 #ifdef TARGET_MEM_FUNCTIONS
2132 emit_library_call (memset_libfunc, 0,
2133 VOIDmode, 3,
2134 XEXP (object, 0), Pmode,
2135 const0_rtx, TYPE_MODE (integer_type_node),
2136 convert_to_mode (TYPE_MODE (sizetype),
2137 size, TREE_UNSIGNED (sizetype)),
2138 TYPE_MODE (sizetype));
2139 #else
2140 emit_library_call (bzero_libfunc, 0,
2141 VOIDmode, 2,
2142 XEXP (object, 0), Pmode,
2143 convert_to_mode (TYPE_MODE (integer_type_node),
2144 size,
2145 TREE_UNSIGNED (integer_type_node)),
2146 TYPE_MODE (integer_type_node));
2147 #endif
2148 }
2149 }
2150 else
2151 emit_move_insn (object, const0_rtx);
2152 }
2153
2154 /* Generate code to copy Y into X.
2155 Both Y and X must have the same mode, except that
2156 Y can be a constant with VOIDmode.
2157 This mode cannot be BLKmode; use emit_block_move for that.
2158
2159 Return the last instruction emitted. */
2160
2161 rtx
2162 emit_move_insn (x, y)
2163 rtx x, y;
2164 {
2165 enum machine_mode mode = GET_MODE (x);
2166
2167 x = protect_from_queue (x, 1);
2168 y = protect_from_queue (y, 0);
2169
2170 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2171 abort ();
2172
2173 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2174 y = force_const_mem (mode, y);
2175
2176 /* If X or Y are memory references, verify that their addresses are valid
2177 for the machine. */
2178 if (GET_CODE (x) == MEM
2179 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2180 && ! push_operand (x, GET_MODE (x)))
2181 || (flag_force_addr
2182 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2183 x = change_address (x, VOIDmode, XEXP (x, 0));
2184
2185 if (GET_CODE (y) == MEM
2186 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2187 || (flag_force_addr
2188 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2189 y = change_address (y, VOIDmode, XEXP (y, 0));
2190
2191 if (mode == BLKmode)
2192 abort ();
2193
2194 return emit_move_insn_1 (x, y);
2195 }
2196
2197 /* Low level part of emit_move_insn.
2198 Called just like emit_move_insn, but assumes X and Y
2199 are basically valid. */
2200
2201 rtx
2202 emit_move_insn_1 (x, y)
2203 rtx x, y;
2204 {
2205 enum machine_mode mode = GET_MODE (x);
2206 enum machine_mode submode;
2207 enum mode_class class = GET_MODE_CLASS (mode);
2208 int i;
2209
2210 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2211 return
2212 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2213
2214 /* Expand complex moves by moving real part and imag part, if possible. */
2215 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2216 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2217 * BITS_PER_UNIT),
2218 (class == MODE_COMPLEX_INT
2219 ? MODE_INT : MODE_FLOAT),
2220 0))
2221 && (mov_optab->handlers[(int) submode].insn_code
2222 != CODE_FOR_nothing))
2223 {
2224 /* Don't split destination if it is a stack push. */
2225 int stack = push_operand (x, GET_MODE (x));
2226 rtx insns;
2227
2228 /* If this is a stack, push the highpart first, so it
2229 will be in the argument order.
2230
2231 In that case, change_address is used only to convert
2232 the mode, not to change the address. */
2233 if (stack)
2234 {
2235 /* Note that the real part always precedes the imag part in memory
2236 regardless of machine's endianness. */
2237 #ifdef STACK_GROWS_DOWNWARD
2238 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2239 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2240 gen_imagpart (submode, y)));
2241 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2242 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2243 gen_realpart (submode, y)));
2244 #else
2245 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2246 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2247 gen_realpart (submode, y)));
2248 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2249 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2250 gen_imagpart (submode, y)));
2251 #endif
2252 }
2253 else
2254 {
2255 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2256 (gen_realpart (submode, x), gen_realpart (submode, y)));
2257 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2258 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2259 }
2260
2261 return get_last_insn ();
2262 }
2263
2264 /* This will handle any multi-word mode that lacks a move_insn pattern.
2265 However, you will get better code if you define such patterns,
2266 even if they must turn into multiple assembler instructions. */
2267 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2268 {
2269 rtx last_insn = 0;
2270 rtx insns;
2271
2272 #ifdef PUSH_ROUNDING
2273
2274 /* If X is a push on the stack, do the push now and replace
2275 X with a reference to the stack pointer. */
2276 if (push_operand (x, GET_MODE (x)))
2277 {
2278 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2279 x = change_address (x, VOIDmode, stack_pointer_rtx);
2280 }
2281 #endif
2282
2283 /* Show the output dies here. */
2284 if (x != y)
2285 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2286
2287 for (i = 0;
2288 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2289 i++)
2290 {
2291 rtx xpart = operand_subword (x, i, 1, mode);
2292 rtx ypart = operand_subword (y, i, 1, mode);
2293
2294 /* If we can't get a part of Y, put Y into memory if it is a
2295 constant. Otherwise, force it into a register. If we still
2296 can't get a part of Y, abort. */
2297 if (ypart == 0 && CONSTANT_P (y))
2298 {
2299 y = force_const_mem (mode, y);
2300 ypart = operand_subword (y, i, 1, mode);
2301 }
2302 else if (ypart == 0)
2303 ypart = operand_subword_force (y, i, mode);
2304
2305 if (xpart == 0 || ypart == 0)
2306 abort ();
2307
2308 last_insn = emit_move_insn (xpart, ypart);
2309 }
2310
2311 return last_insn;
2312 }
2313 else
2314 abort ();
2315 }
2316 \f
2317 /* Pushing data onto the stack. */
2318
2319 /* Push a block of length SIZE (perhaps variable)
2320 and return an rtx to address the beginning of the block.
2321 Note that it is not possible for the value returned to be a QUEUED.
2322 The value may be virtual_outgoing_args_rtx.
2323
2324 EXTRA is the number of bytes of padding to push in addition to SIZE.
2325 BELOW nonzero means this padding comes at low addresses;
2326 otherwise, the padding comes at high addresses. */
2327
2328 rtx
2329 push_block (size, extra, below)
2330 rtx size;
2331 int extra, below;
2332 {
2333 register rtx temp;
2334
2335 size = convert_modes (Pmode, ptr_mode, size, 1);
2336 if (CONSTANT_P (size))
2337 anti_adjust_stack (plus_constant (size, extra));
2338 else if (GET_CODE (size) == REG && extra == 0)
2339 anti_adjust_stack (size);
2340 else
2341 {
2342 rtx temp = copy_to_mode_reg (Pmode, size);
2343 if (extra != 0)
2344 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2345 temp, 0, OPTAB_LIB_WIDEN);
2346 anti_adjust_stack (temp);
2347 }
2348
2349 #ifdef STACK_GROWS_DOWNWARD
2350 temp = virtual_outgoing_args_rtx;
2351 if (extra != 0 && below)
2352 temp = plus_constant (temp, extra);
2353 #else
2354 if (GET_CODE (size) == CONST_INT)
2355 temp = plus_constant (virtual_outgoing_args_rtx,
2356 - INTVAL (size) - (below ? 0 : extra));
2357 else if (extra != 0 && !below)
2358 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2359 negate_rtx (Pmode, plus_constant (size, extra)));
2360 else
2361 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2362 negate_rtx (Pmode, size));
2363 #endif
2364
2365 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2366 }
2367
2368 rtx
2369 gen_push_operand ()
2370 {
2371 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2372 }
2373
2374 /* Generate code to push X onto the stack, assuming it has mode MODE and
2375 type TYPE.
2376 MODE is redundant except when X is a CONST_INT (since they don't
2377 carry mode info).
2378 SIZE is an rtx for the size of data to be copied (in bytes),
2379 needed only if X is BLKmode.
2380
2381 ALIGN (in bytes) is maximum alignment we can assume.
2382
2383 If PARTIAL and REG are both nonzero, then copy that many of the first
2384 words of X into registers starting with REG, and push the rest of X.
2385 The amount of space pushed is decreased by PARTIAL words,
2386 rounded *down* to a multiple of PARM_BOUNDARY.
2387 REG must be a hard register in this case.
2388 If REG is zero but PARTIAL is not, take any all others actions for an
2389 argument partially in registers, but do not actually load any
2390 registers.
2391
2392 EXTRA is the amount in bytes of extra space to leave next to this arg.
2393 This is ignored if an argument block has already been allocated.
2394
2395 On a machine that lacks real push insns, ARGS_ADDR is the address of
2396 the bottom of the argument block for this call. We use indexing off there
2397 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2398 argument block has not been preallocated.
2399
2400 ARGS_SO_FAR is the size of args previously pushed for this call. */
2401
2402 void
2403 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2404 args_addr, args_so_far)
2405 register rtx x;
2406 enum machine_mode mode;
2407 tree type;
2408 rtx size;
2409 int align;
2410 int partial;
2411 rtx reg;
2412 int extra;
2413 rtx args_addr;
2414 rtx args_so_far;
2415 {
2416 rtx xinner;
2417 enum direction stack_direction
2418 #ifdef STACK_GROWS_DOWNWARD
2419 = downward;
2420 #else
2421 = upward;
2422 #endif
2423
2424 /* Decide where to pad the argument: `downward' for below,
2425 `upward' for above, or `none' for don't pad it.
2426 Default is below for small data on big-endian machines; else above. */
2427 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2428
2429 /* Invert direction if stack is post-update. */
2430 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2431 if (where_pad != none)
2432 where_pad = (where_pad == downward ? upward : downward);
2433
2434 xinner = x = protect_from_queue (x, 0);
2435
2436 if (mode == BLKmode)
2437 {
2438 /* Copy a block into the stack, entirely or partially. */
2439
2440 register rtx temp;
2441 int used = partial * UNITS_PER_WORD;
2442 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2443 int skip;
2444
2445 if (size == 0)
2446 abort ();
2447
2448 used -= offset;
2449
2450 /* USED is now the # of bytes we need not copy to the stack
2451 because registers will take care of them. */
2452
2453 if (partial != 0)
2454 xinner = change_address (xinner, BLKmode,
2455 plus_constant (XEXP (xinner, 0), used));
2456
2457 /* If the partial register-part of the arg counts in its stack size,
2458 skip the part of stack space corresponding to the registers.
2459 Otherwise, start copying to the beginning of the stack space,
2460 by setting SKIP to 0. */
2461 #ifndef REG_PARM_STACK_SPACE
2462 skip = 0;
2463 #else
2464 skip = used;
2465 #endif
2466
2467 #ifdef PUSH_ROUNDING
2468 /* Do it with several push insns if that doesn't take lots of insns
2469 and if there is no difficulty with push insns that skip bytes
2470 on the stack for alignment purposes. */
2471 if (args_addr == 0
2472 && GET_CODE (size) == CONST_INT
2473 && skip == 0
2474 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2475 < MOVE_RATIO)
2476 /* Here we avoid the case of a structure whose weak alignment
2477 forces many pushes of a small amount of data,
2478 and such small pushes do rounding that causes trouble. */
2479 && ((! SLOW_UNALIGNED_ACCESS)
2480 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2481 || PUSH_ROUNDING (align) == align)
2482 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2483 {
2484 /* Push padding now if padding above and stack grows down,
2485 or if padding below and stack grows up.
2486 But if space already allocated, this has already been done. */
2487 if (extra && args_addr == 0
2488 && where_pad != none && where_pad != stack_direction)
2489 anti_adjust_stack (GEN_INT (extra));
2490
2491 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2492 INTVAL (size) - used, align);
2493 }
2494 else
2495 #endif /* PUSH_ROUNDING */
2496 {
2497 /* Otherwise make space on the stack and copy the data
2498 to the address of that space. */
2499
2500 /* Deduct words put into registers from the size we must copy. */
2501 if (partial != 0)
2502 {
2503 if (GET_CODE (size) == CONST_INT)
2504 size = GEN_INT (INTVAL (size) - used);
2505 else
2506 size = expand_binop (GET_MODE (size), sub_optab, size,
2507 GEN_INT (used), NULL_RTX, 0,
2508 OPTAB_LIB_WIDEN);
2509 }
2510
2511 /* Get the address of the stack space.
2512 In this case, we do not deal with EXTRA separately.
2513 A single stack adjust will do. */
2514 if (! args_addr)
2515 {
2516 temp = push_block (size, extra, where_pad == downward);
2517 extra = 0;
2518 }
2519 else if (GET_CODE (args_so_far) == CONST_INT)
2520 temp = memory_address (BLKmode,
2521 plus_constant (args_addr,
2522 skip + INTVAL (args_so_far)));
2523 else
2524 temp = memory_address (BLKmode,
2525 plus_constant (gen_rtx (PLUS, Pmode,
2526 args_addr, args_so_far),
2527 skip));
2528
2529 /* TEMP is the address of the block. Copy the data there. */
2530 if (GET_CODE (size) == CONST_INT
2531 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2532 < MOVE_RATIO))
2533 {
2534 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2535 INTVAL (size), align);
2536 goto ret;
2537 }
2538 /* Try the most limited insn first, because there's no point
2539 including more than one in the machine description unless
2540 the more limited one has some advantage. */
2541 #ifdef HAVE_movstrqi
2542 if (HAVE_movstrqi
2543 && GET_CODE (size) == CONST_INT
2544 && ((unsigned) INTVAL (size)
2545 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2546 {
2547 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2548 xinner, size, GEN_INT (align));
2549 if (pat != 0)
2550 {
2551 emit_insn (pat);
2552 goto ret;
2553 }
2554 }
2555 #endif
2556 #ifdef HAVE_movstrhi
2557 if (HAVE_movstrhi
2558 && GET_CODE (size) == CONST_INT
2559 && ((unsigned) INTVAL (size)
2560 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2561 {
2562 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2563 xinner, size, GEN_INT (align));
2564 if (pat != 0)
2565 {
2566 emit_insn (pat);
2567 goto ret;
2568 }
2569 }
2570 #endif
2571 #ifdef HAVE_movstrsi
2572 if (HAVE_movstrsi)
2573 {
2574 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2575 xinner, size, GEN_INT (align));
2576 if (pat != 0)
2577 {
2578 emit_insn (pat);
2579 goto ret;
2580 }
2581 }
2582 #endif
2583 #ifdef HAVE_movstrdi
2584 if (HAVE_movstrdi)
2585 {
2586 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2587 xinner, size, GEN_INT (align));
2588 if (pat != 0)
2589 {
2590 emit_insn (pat);
2591 goto ret;
2592 }
2593 }
2594 #endif
2595
2596 #ifndef ACCUMULATE_OUTGOING_ARGS
2597 /* If the source is referenced relative to the stack pointer,
2598 copy it to another register to stabilize it. We do not need
2599 to do this if we know that we won't be changing sp. */
2600
2601 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2602 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2603 temp = copy_to_reg (temp);
2604 #endif
2605
2606 /* Make inhibit_defer_pop nonzero around the library call
2607 to force it to pop the bcopy-arguments right away. */
2608 NO_DEFER_POP;
2609 #ifdef TARGET_MEM_FUNCTIONS
2610 emit_library_call (memcpy_libfunc, 0,
2611 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2612 convert_to_mode (TYPE_MODE (sizetype),
2613 size, TREE_UNSIGNED (sizetype)),
2614 TYPE_MODE (sizetype));
2615 #else
2616 emit_library_call (bcopy_libfunc, 0,
2617 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2618 convert_to_mode (TYPE_MODE (integer_type_node),
2619 size,
2620 TREE_UNSIGNED (integer_type_node)),
2621 TYPE_MODE (integer_type_node));
2622 #endif
2623 OK_DEFER_POP;
2624 }
2625 }
2626 else if (partial > 0)
2627 {
2628 /* Scalar partly in registers. */
2629
2630 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2631 int i;
2632 int not_stack;
2633 /* # words of start of argument
2634 that we must make space for but need not store. */
2635 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2636 int args_offset = INTVAL (args_so_far);
2637 int skip;
2638
2639 /* Push padding now if padding above and stack grows down,
2640 or if padding below and stack grows up.
2641 But if space already allocated, this has already been done. */
2642 if (extra && args_addr == 0
2643 && where_pad != none && where_pad != stack_direction)
2644 anti_adjust_stack (GEN_INT (extra));
2645
2646 /* If we make space by pushing it, we might as well push
2647 the real data. Otherwise, we can leave OFFSET nonzero
2648 and leave the space uninitialized. */
2649 if (args_addr == 0)
2650 offset = 0;
2651
2652 /* Now NOT_STACK gets the number of words that we don't need to
2653 allocate on the stack. */
2654 not_stack = partial - offset;
2655
2656 /* If the partial register-part of the arg counts in its stack size,
2657 skip the part of stack space corresponding to the registers.
2658 Otherwise, start copying to the beginning of the stack space,
2659 by setting SKIP to 0. */
2660 #ifndef REG_PARM_STACK_SPACE
2661 skip = 0;
2662 #else
2663 skip = not_stack;
2664 #endif
2665
2666 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2667 x = validize_mem (force_const_mem (mode, x));
2668
2669 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2670 SUBREGs of such registers are not allowed. */
2671 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2672 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2673 x = copy_to_reg (x);
2674
2675 /* Loop over all the words allocated on the stack for this arg. */
2676 /* We can do it by words, because any scalar bigger than a word
2677 has a size a multiple of a word. */
2678 #ifndef PUSH_ARGS_REVERSED
2679 for (i = not_stack; i < size; i++)
2680 #else
2681 for (i = size - 1; i >= not_stack; i--)
2682 #endif
2683 if (i >= not_stack + offset)
2684 emit_push_insn (operand_subword_force (x, i, mode),
2685 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2686 0, args_addr,
2687 GEN_INT (args_offset + ((i - not_stack + skip)
2688 * UNITS_PER_WORD)));
2689 }
2690 else
2691 {
2692 rtx addr;
2693
2694 /* Push padding now if padding above and stack grows down,
2695 or if padding below and stack grows up.
2696 But if space already allocated, this has already been done. */
2697 if (extra && args_addr == 0
2698 && where_pad != none && where_pad != stack_direction)
2699 anti_adjust_stack (GEN_INT (extra));
2700
2701 #ifdef PUSH_ROUNDING
2702 if (args_addr == 0)
2703 addr = gen_push_operand ();
2704 else
2705 #endif
2706 if (GET_CODE (args_so_far) == CONST_INT)
2707 addr
2708 = memory_address (mode,
2709 plus_constant (args_addr, INTVAL (args_so_far)));
2710 else
2711 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2712 args_so_far));
2713
2714 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2715 }
2716
2717 ret:
2718 /* If part should go in registers, copy that part
2719 into the appropriate registers. Do this now, at the end,
2720 since mem-to-mem copies above may do function calls. */
2721 if (partial > 0 && reg != 0)
2722 {
2723 /* Handle calls that pass values in multiple non-contiguous locations.
2724 The Irix 6 ABI has examples of this. */
2725 if (GET_CODE (reg) == PARALLEL)
2726 emit_group_load (reg, x);
2727 else
2728 move_block_to_reg (REGNO (reg), x, partial, mode);
2729 }
2730
2731 if (extra && args_addr == 0 && where_pad == stack_direction)
2732 anti_adjust_stack (GEN_INT (extra));
2733 }
2734 \f
2735 /* Expand an assignment that stores the value of FROM into TO.
2736 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2737 (This may contain a QUEUED rtx;
2738 if the value is constant, this rtx is a constant.)
2739 Otherwise, the returned value is NULL_RTX.
2740
2741 SUGGEST_REG is no longer actually used.
2742 It used to mean, copy the value through a register
2743 and return that register, if that is possible.
2744 We now use WANT_VALUE to decide whether to do this. */
2745
2746 rtx
2747 expand_assignment (to, from, want_value, suggest_reg)
2748 tree to, from;
2749 int want_value;
2750 int suggest_reg;
2751 {
2752 register rtx to_rtx = 0;
2753 rtx result;
2754
2755 /* Don't crash if the lhs of the assignment was erroneous. */
2756
2757 if (TREE_CODE (to) == ERROR_MARK)
2758 {
2759 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2760 return want_value ? result : NULL_RTX;
2761 }
2762
2763 if (output_bytecode)
2764 {
2765 tree dest_innermost;
2766
2767 bc_expand_expr (from);
2768 bc_emit_instruction (duplicate);
2769
2770 dest_innermost = bc_expand_address (to);
2771
2772 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2773 take care of it here. */
2774
2775 bc_store_memory (TREE_TYPE (to), dest_innermost);
2776 return NULL;
2777 }
2778
2779 /* Assignment of a structure component needs special treatment
2780 if the structure component's rtx is not simply a MEM.
2781 Assignment of an array element at a constant index, and assignment of
2782 an array element in an unaligned packed structure field, has the same
2783 problem. */
2784
2785 if (TREE_CODE (to) == COMPONENT_REF
2786 || TREE_CODE (to) == BIT_FIELD_REF
2787 || (TREE_CODE (to) == ARRAY_REF
2788 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2789 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2790 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2791 {
2792 enum machine_mode mode1;
2793 int bitsize;
2794 int bitpos;
2795 tree offset;
2796 int unsignedp;
2797 int volatilep = 0;
2798 tree tem;
2799 int alignment;
2800
2801 push_temp_slots ();
2802 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2803 &unsignedp, &volatilep, &alignment);
2804
2805 /* If we are going to use store_bit_field and extract_bit_field,
2806 make sure to_rtx will be safe for multiple use. */
2807
2808 if (mode1 == VOIDmode && want_value)
2809 tem = stabilize_reference (tem);
2810
2811 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2812 if (offset != 0)
2813 {
2814 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2815
2816 if (GET_CODE (to_rtx) != MEM)
2817 abort ();
2818 to_rtx = change_address (to_rtx, VOIDmode,
2819 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2820 force_reg (ptr_mode, offset_rtx)));
2821 }
2822 if (volatilep)
2823 {
2824 if (GET_CODE (to_rtx) == MEM)
2825 {
2826 /* When the offset is zero, to_rtx is the address of the
2827 structure we are storing into, and hence may be shared.
2828 We must make a new MEM before setting the volatile bit. */
2829 if (offset == 0)
2830 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2831 MEM_VOLATILE_P (to_rtx) = 1;
2832 }
2833 #if 0 /* This was turned off because, when a field is volatile
2834 in an object which is not volatile, the object may be in a register,
2835 and then we would abort over here. */
2836 else
2837 abort ();
2838 #endif
2839 }
2840
2841 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2842 (want_value
2843 /* Spurious cast makes HPUX compiler happy. */
2844 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2845 : VOIDmode),
2846 unsignedp,
2847 /* Required alignment of containing datum. */
2848 alignment,
2849 int_size_in_bytes (TREE_TYPE (tem)));
2850 preserve_temp_slots (result);
2851 free_temp_slots ();
2852 pop_temp_slots ();
2853
2854 /* If the value is meaningful, convert RESULT to the proper mode.
2855 Otherwise, return nothing. */
2856 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2857 TYPE_MODE (TREE_TYPE (from)),
2858 result,
2859 TREE_UNSIGNED (TREE_TYPE (to)))
2860 : NULL_RTX);
2861 }
2862
2863 /* If the rhs is a function call and its value is not an aggregate,
2864 call the function before we start to compute the lhs.
2865 This is needed for correct code for cases such as
2866 val = setjmp (buf) on machines where reference to val
2867 requires loading up part of an address in a separate insn.
2868
2869 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2870 a promoted variable where the zero- or sign- extension needs to be done.
2871 Handling this in the normal way is safe because no computation is done
2872 before the call. */
2873 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2874 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2875 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2876 {
2877 rtx value;
2878
2879 push_temp_slots ();
2880 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2881 if (to_rtx == 0)
2882 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2883
2884 /* Handle calls that return values in multiple non-contiguous locations.
2885 The Irix 6 ABI has examples of this. */
2886 if (GET_CODE (to_rtx) == PARALLEL)
2887 emit_group_load (to_rtx, value);
2888 else if (GET_MODE (to_rtx) == BLKmode)
2889 emit_block_move (to_rtx, value, expr_size (from),
2890 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2891 else
2892 emit_move_insn (to_rtx, value);
2893 preserve_temp_slots (to_rtx);
2894 free_temp_slots ();
2895 pop_temp_slots ();
2896 return want_value ? to_rtx : NULL_RTX;
2897 }
2898
2899 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2900 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2901
2902 if (to_rtx == 0)
2903 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2904
2905 /* Don't move directly into a return register. */
2906 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2907 {
2908 rtx temp;
2909
2910 push_temp_slots ();
2911 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2912 emit_move_insn (to_rtx, temp);
2913 preserve_temp_slots (to_rtx);
2914 free_temp_slots ();
2915 pop_temp_slots ();
2916 return want_value ? to_rtx : NULL_RTX;
2917 }
2918
2919 /* In case we are returning the contents of an object which overlaps
2920 the place the value is being stored, use a safe function when copying
2921 a value through a pointer into a structure value return block. */
2922 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2923 && current_function_returns_struct
2924 && !current_function_returns_pcc_struct)
2925 {
2926 rtx from_rtx, size;
2927
2928 push_temp_slots ();
2929 size = expr_size (from);
2930 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2931
2932 #ifdef TARGET_MEM_FUNCTIONS
2933 emit_library_call (memcpy_libfunc, 0,
2934 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2935 XEXP (from_rtx, 0), Pmode,
2936 convert_to_mode (TYPE_MODE (sizetype),
2937 size, TREE_UNSIGNED (sizetype)),
2938 TYPE_MODE (sizetype));
2939 #else
2940 emit_library_call (bcopy_libfunc, 0,
2941 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2942 XEXP (to_rtx, 0), Pmode,
2943 convert_to_mode (TYPE_MODE (integer_type_node),
2944 size, TREE_UNSIGNED (integer_type_node)),
2945 TYPE_MODE (integer_type_node));
2946 #endif
2947
2948 preserve_temp_slots (to_rtx);
2949 free_temp_slots ();
2950 pop_temp_slots ();
2951 return want_value ? to_rtx : NULL_RTX;
2952 }
2953
2954 /* Compute FROM and store the value in the rtx we got. */
2955
2956 push_temp_slots ();
2957 result = store_expr (from, to_rtx, want_value);
2958 preserve_temp_slots (result);
2959 free_temp_slots ();
2960 pop_temp_slots ();
2961 return want_value ? result : NULL_RTX;
2962 }
2963
2964 /* Generate code for computing expression EXP,
2965 and storing the value into TARGET.
2966 TARGET may contain a QUEUED rtx.
2967
2968 If WANT_VALUE is nonzero, return a copy of the value
2969 not in TARGET, so that we can be sure to use the proper
2970 value in a containing expression even if TARGET has something
2971 else stored in it. If possible, we copy the value through a pseudo
2972 and return that pseudo. Or, if the value is constant, we try to
2973 return the constant. In some cases, we return a pseudo
2974 copied *from* TARGET.
2975
2976 If the mode is BLKmode then we may return TARGET itself.
2977 It turns out that in BLKmode it doesn't cause a problem.
2978 because C has no operators that could combine two different
2979 assignments into the same BLKmode object with different values
2980 with no sequence point. Will other languages need this to
2981 be more thorough?
2982
2983 If WANT_VALUE is 0, we return NULL, to make sure
2984 to catch quickly any cases where the caller uses the value
2985 and fails to set WANT_VALUE. */
2986
2987 rtx
2988 store_expr (exp, target, want_value)
2989 register tree exp;
2990 register rtx target;
2991 int want_value;
2992 {
2993 register rtx temp;
2994 int dont_return_target = 0;
2995
2996 if (TREE_CODE (exp) == COMPOUND_EXPR)
2997 {
2998 /* Perform first part of compound expression, then assign from second
2999 part. */
3000 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3001 emit_queue ();
3002 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3003 }
3004 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3005 {
3006 /* For conditional expression, get safe form of the target. Then
3007 test the condition, doing the appropriate assignment on either
3008 side. This avoids the creation of unnecessary temporaries.
3009 For non-BLKmode, it is more efficient not to do this. */
3010
3011 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3012 rtx flag = NULL_RTX;
3013 tree left_cleanups = NULL_TREE;
3014 tree right_cleanups = NULL_TREE;
3015 tree old_cleanups = cleanups_this_call;
3016
3017 /* Used to save a pointer to the place to put the setting of
3018 the flag that indicates if this side of the conditional was
3019 taken. We backpatch the code, if we find out later that we
3020 have any conditional cleanups that need to be performed. */
3021 rtx dest_right_flag = NULL_RTX;
3022 rtx dest_left_flag = NULL_RTX;
3023
3024 emit_queue ();
3025 target = protect_from_queue (target, 1);
3026
3027 do_pending_stack_adjust ();
3028 NO_DEFER_POP;
3029 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3030 store_expr (TREE_OPERAND (exp, 1), target, 0);
3031 dest_left_flag = get_last_insn ();
3032 /* Handle conditional cleanups, if any. */
3033 left_cleanups = defer_cleanups_to (old_cleanups);
3034 emit_queue ();
3035 emit_jump_insn (gen_jump (lab2));
3036 emit_barrier ();
3037 emit_label (lab1);
3038 store_expr (TREE_OPERAND (exp, 2), target, 0);
3039 dest_right_flag = get_last_insn ();
3040 /* Handle conditional cleanups, if any. */
3041 right_cleanups = defer_cleanups_to (old_cleanups);
3042 emit_queue ();
3043 emit_label (lab2);
3044 OK_DEFER_POP;
3045
3046 /* Add back in any conditional cleanups. */
3047 if (left_cleanups || right_cleanups)
3048 {
3049 tree new_cleanups;
3050 tree cond;
3051 rtx last;
3052
3053 /* Now that we know that a flag is needed, go back and add in the
3054 setting of the flag. */
3055
3056 flag = gen_reg_rtx (word_mode);
3057
3058 /* Do the left side flag. */
3059 last = get_last_insn ();
3060 /* Flag left cleanups as needed. */
3061 emit_move_insn (flag, const1_rtx);
3062 /* ??? deprecated, use sequences instead. */
3063 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3064
3065 /* Do the right side flag. */
3066 last = get_last_insn ();
3067 /* Flag left cleanups as needed. */
3068 emit_move_insn (flag, const0_rtx);
3069 /* ??? deprecated, use sequences instead. */
3070 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3071
3072 /* All cleanups must be on the function_obstack. */
3073 push_obstacks_nochange ();
3074 resume_temporary_allocation ();
3075
3076 /* convert flag, which is an rtx, into a tree. */
3077 cond = make_node (RTL_EXPR);
3078 TREE_TYPE (cond) = integer_type_node;
3079 RTL_EXPR_RTL (cond) = flag;
3080 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3081 cond = save_expr (cond);
3082
3083 if (! left_cleanups)
3084 left_cleanups = integer_zero_node;
3085 if (! right_cleanups)
3086 right_cleanups = integer_zero_node;
3087 new_cleanups = build (COND_EXPR, void_type_node,
3088 truthvalue_conversion (cond),
3089 left_cleanups, right_cleanups);
3090 new_cleanups = fold (new_cleanups);
3091
3092 pop_obstacks ();
3093
3094 /* Now add in the conditionalized cleanups. */
3095 cleanups_this_call
3096 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3097 expand_eh_region_start ();
3098 }
3099 return want_value ? target : NULL_RTX;
3100 }
3101 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3102 && GET_MODE (target) != BLKmode)
3103 /* If target is in memory and caller wants value in a register instead,
3104 arrange that. Pass TARGET as target for expand_expr so that,
3105 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3106 We know expand_expr will not use the target in that case.
3107 Don't do this if TARGET is volatile because we are supposed
3108 to write it and then read it. */
3109 {
3110 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3111 GET_MODE (target), 0);
3112 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3113 temp = copy_to_reg (temp);
3114 dont_return_target = 1;
3115 }
3116 else if (queued_subexp_p (target))
3117 /* If target contains a postincrement, let's not risk
3118 using it as the place to generate the rhs. */
3119 {
3120 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3121 {
3122 /* Expand EXP into a new pseudo. */
3123 temp = gen_reg_rtx (GET_MODE (target));
3124 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3125 }
3126 else
3127 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3128
3129 /* If target is volatile, ANSI requires accessing the value
3130 *from* the target, if it is accessed. So make that happen.
3131 In no case return the target itself. */
3132 if (! MEM_VOLATILE_P (target) && want_value)
3133 dont_return_target = 1;
3134 }
3135 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3136 /* If this is an scalar in a register that is stored in a wider mode
3137 than the declared mode, compute the result into its declared mode
3138 and then convert to the wider mode. Our value is the computed
3139 expression. */
3140 {
3141 /* If we don't want a value, we can do the conversion inside EXP,
3142 which will often result in some optimizations. Do the conversion
3143 in two steps: first change the signedness, if needed, then
3144 the extend. But don't do this if the type of EXP is a subtype
3145 of something else since then the conversion might involve
3146 more than just converting modes. */
3147 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3148 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3149 {
3150 if (TREE_UNSIGNED (TREE_TYPE (exp))
3151 != SUBREG_PROMOTED_UNSIGNED_P (target))
3152 exp
3153 = convert
3154 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3155 TREE_TYPE (exp)),
3156 exp);
3157
3158 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3159 SUBREG_PROMOTED_UNSIGNED_P (target)),
3160 exp);
3161 }
3162
3163 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3164
3165 /* If TEMP is a volatile MEM and we want a result value, make
3166 the access now so it gets done only once. Likewise if
3167 it contains TARGET. */
3168 if (GET_CODE (temp) == MEM && want_value
3169 && (MEM_VOLATILE_P (temp)
3170 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3171 temp = copy_to_reg (temp);
3172
3173 /* If TEMP is a VOIDmode constant, use convert_modes to make
3174 sure that we properly convert it. */
3175 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3176 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3177 TYPE_MODE (TREE_TYPE (exp)), temp,
3178 SUBREG_PROMOTED_UNSIGNED_P (target));
3179
3180 convert_move (SUBREG_REG (target), temp,
3181 SUBREG_PROMOTED_UNSIGNED_P (target));
3182 return want_value ? temp : NULL_RTX;
3183 }
3184 else
3185 {
3186 temp = expand_expr (exp, target, GET_MODE (target), 0);
3187 /* Return TARGET if it's a specified hardware register.
3188 If TARGET is a volatile mem ref, either return TARGET
3189 or return a reg copied *from* TARGET; ANSI requires this.
3190
3191 Otherwise, if TEMP is not TARGET, return TEMP
3192 if it is constant (for efficiency),
3193 or if we really want the correct value. */
3194 if (!(target && GET_CODE (target) == REG
3195 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3196 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3197 && temp != target
3198 && (CONSTANT_P (temp) || want_value))
3199 dont_return_target = 1;
3200 }
3201
3202 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3203 the same as that of TARGET, adjust the constant. This is needed, for
3204 example, in case it is a CONST_DOUBLE and we want only a word-sized
3205 value. */
3206 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3207 && TREE_CODE (exp) != ERROR_MARK
3208 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3209 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3210 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3211
3212 /* If value was not generated in the target, store it there.
3213 Convert the value to TARGET's type first if nec. */
3214
3215 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3216 {
3217 target = protect_from_queue (target, 1);
3218 if (GET_MODE (temp) != GET_MODE (target)
3219 && GET_MODE (temp) != VOIDmode)
3220 {
3221 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3222 if (dont_return_target)
3223 {
3224 /* In this case, we will return TEMP,
3225 so make sure it has the proper mode.
3226 But don't forget to store the value into TARGET. */
3227 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3228 emit_move_insn (target, temp);
3229 }
3230 else
3231 convert_move (target, temp, unsignedp);
3232 }
3233
3234 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3235 {
3236 /* Handle copying a string constant into an array.
3237 The string constant may be shorter than the array.
3238 So copy just the string's actual length, and clear the rest. */
3239 rtx size;
3240 rtx addr;
3241
3242 /* Get the size of the data type of the string,
3243 which is actually the size of the target. */
3244 size = expr_size (exp);
3245 if (GET_CODE (size) == CONST_INT
3246 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3247 emit_block_move (target, temp, size,
3248 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3249 else
3250 {
3251 /* Compute the size of the data to copy from the string. */
3252 tree copy_size
3253 = size_binop (MIN_EXPR,
3254 make_tree (sizetype, size),
3255 convert (sizetype,
3256 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3257 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3258 VOIDmode, 0);
3259 rtx label = 0;
3260
3261 /* Copy that much. */
3262 emit_block_move (target, temp, copy_size_rtx,
3263 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3264
3265 /* Figure out how much is left in TARGET that we have to clear.
3266 Do all calculations in ptr_mode. */
3267
3268 addr = XEXP (target, 0);
3269 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3270
3271 if (GET_CODE (copy_size_rtx) == CONST_INT)
3272 {
3273 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3274 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3275 }
3276 else
3277 {
3278 addr = force_reg (ptr_mode, addr);
3279 addr = expand_binop (ptr_mode, add_optab, addr,
3280 copy_size_rtx, NULL_RTX, 0,
3281 OPTAB_LIB_WIDEN);
3282
3283 size = expand_binop (ptr_mode, sub_optab, size,
3284 copy_size_rtx, NULL_RTX, 0,
3285 OPTAB_LIB_WIDEN);
3286
3287 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3288 GET_MODE (size), 0, 0);
3289 label = gen_label_rtx ();
3290 emit_jump_insn (gen_blt (label));
3291 }
3292
3293 if (size != const0_rtx)
3294 {
3295 #ifdef TARGET_MEM_FUNCTIONS
3296 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3297 addr, ptr_mode,
3298 const0_rtx, TYPE_MODE (integer_type_node),
3299 convert_to_mode (TYPE_MODE (sizetype),
3300 size,
3301 TREE_UNSIGNED (sizetype)),
3302 TYPE_MODE (sizetype));
3303 #else
3304 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3305 addr, ptr_mode,
3306 convert_to_mode (TYPE_MODE (integer_type_node),
3307 size,
3308 TREE_UNSIGNED (integer_type_node)),
3309 TYPE_MODE (integer_type_node));
3310 #endif
3311 }
3312
3313 if (label)
3314 emit_label (label);
3315 }
3316 }
3317 /* Handle calls that return values in multiple non-contiguous locations.
3318 The Irix 6 ABI has examples of this. */
3319 else if (GET_CODE (target) == PARALLEL)
3320 emit_group_load (target, temp);
3321 else if (GET_MODE (temp) == BLKmode)
3322 emit_block_move (target, temp, expr_size (exp),
3323 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3324 else
3325 emit_move_insn (target, temp);
3326 }
3327
3328 /* If we don't want a value, return NULL_RTX. */
3329 if (! want_value)
3330 return NULL_RTX;
3331
3332 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3333 ??? The latter test doesn't seem to make sense. */
3334 else if (dont_return_target && GET_CODE (temp) != MEM)
3335 return temp;
3336
3337 /* Return TARGET itself if it is a hard register. */
3338 else if (want_value && GET_MODE (target) != BLKmode
3339 && ! (GET_CODE (target) == REG
3340 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3341 return copy_to_reg (target);
3342
3343 else
3344 return target;
3345 }
3346 \f
3347 /* Return 1 if EXP just contains zeros. */
3348
3349 static int
3350 is_zeros_p (exp)
3351 tree exp;
3352 {
3353 tree elt;
3354
3355 switch (TREE_CODE (exp))
3356 {
3357 case CONVERT_EXPR:
3358 case NOP_EXPR:
3359 case NON_LVALUE_EXPR:
3360 return is_zeros_p (TREE_OPERAND (exp, 0));
3361
3362 case INTEGER_CST:
3363 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3364
3365 case COMPLEX_CST:
3366 return
3367 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3368
3369 case REAL_CST:
3370 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3371
3372 case CONSTRUCTOR:
3373 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3374 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3375 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3376 if (! is_zeros_p (TREE_VALUE (elt)))
3377 return 0;
3378
3379 return 1;
3380 }
3381
3382 return 0;
3383 }
3384
3385 /* Return 1 if EXP contains mostly (3/4) zeros. */
3386
3387 static int
3388 mostly_zeros_p (exp)
3389 tree exp;
3390 {
3391 if (TREE_CODE (exp) == CONSTRUCTOR)
3392 {
3393 int elts = 0, zeros = 0;
3394 tree elt = CONSTRUCTOR_ELTS (exp);
3395 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3396 {
3397 /* If there are no ranges of true bits, it is all zero. */
3398 return elt == NULL_TREE;
3399 }
3400 for (; elt; elt = TREE_CHAIN (elt))
3401 {
3402 /* We do not handle the case where the index is a RANGE_EXPR,
3403 so the statistic will be somewhat inaccurate.
3404 We do make a more accurate count in store_constructor itself,
3405 so since this function is only used for nested array elements,
3406 this should be close enough. */
3407 if (mostly_zeros_p (TREE_VALUE (elt)))
3408 zeros++;
3409 elts++;
3410 }
3411
3412 return 4 * zeros >= 3 * elts;
3413 }
3414
3415 return is_zeros_p (exp);
3416 }
3417 \f
3418 /* Helper function for store_constructor.
3419 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3420 TYPE is the type of the CONSTRUCTOR, not the element type.
3421 CLEARED is as for store_constructor.
3422
3423 This provides a recursive shortcut back to store_constructor when it isn't
3424 necessary to go through store_field. This is so that we can pass through
3425 the cleared field to let store_constructor know that we may not have to
3426 clear a substructure if the outer structure has already been cleared. */
3427
3428 static void
3429 store_constructor_field (target, bitsize, bitpos,
3430 mode, exp, type, cleared)
3431 rtx target;
3432 int bitsize, bitpos;
3433 enum machine_mode mode;
3434 tree exp, type;
3435 int cleared;
3436 {
3437 if (TREE_CODE (exp) == CONSTRUCTOR
3438 && bitpos % BITS_PER_UNIT == 0
3439 /* If we have a non-zero bitpos for a register target, then we just
3440 let store_field do the bitfield handling. This is unlikely to
3441 generate unnecessary clear instructions anyways. */
3442 && (bitpos == 0 || GET_CODE (target) == MEM))
3443 {
3444 if (bitpos != 0)
3445 target = change_address (target, VOIDmode,
3446 plus_constant (XEXP (target, 0),
3447 bitpos / BITS_PER_UNIT));
3448 store_constructor (exp, target, cleared);
3449 }
3450 else
3451 store_field (target, bitsize, bitpos, mode, exp,
3452 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3453 int_size_in_bytes (type));
3454 }
3455
3456 /* Store the value of constructor EXP into the rtx TARGET.
3457 TARGET is either a REG or a MEM.
3458 CLEARED is true if TARGET is known to have been zero'd. */
3459
3460 static void
3461 store_constructor (exp, target, cleared)
3462 tree exp;
3463 rtx target;
3464 int cleared;
3465 {
3466 tree type = TREE_TYPE (exp);
3467
3468 /* We know our target cannot conflict, since safe_from_p has been called. */
3469 #if 0
3470 /* Don't try copying piece by piece into a hard register
3471 since that is vulnerable to being clobbered by EXP.
3472 Instead, construct in a pseudo register and then copy it all. */
3473 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3474 {
3475 rtx temp = gen_reg_rtx (GET_MODE (target));
3476 store_constructor (exp, temp, 0);
3477 emit_move_insn (target, temp);
3478 return;
3479 }
3480 #endif
3481
3482 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3483 || TREE_CODE (type) == QUAL_UNION_TYPE)
3484 {
3485 register tree elt;
3486
3487 /* Inform later passes that the whole union value is dead. */
3488 if (TREE_CODE (type) == UNION_TYPE
3489 || TREE_CODE (type) == QUAL_UNION_TYPE)
3490 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3491
3492 /* If we are building a static constructor into a register,
3493 set the initial value as zero so we can fold the value into
3494 a constant. But if more than one register is involved,
3495 this probably loses. */
3496 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3497 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3498 {
3499 if (! cleared)
3500 emit_move_insn (target, const0_rtx);
3501
3502 cleared = 1;
3503 }
3504
3505 /* If the constructor has fewer fields than the structure
3506 or if we are initializing the structure to mostly zeros,
3507 clear the whole structure first. */
3508 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3509 != list_length (TYPE_FIELDS (type)))
3510 || mostly_zeros_p (exp))
3511 {
3512 if (! cleared)
3513 clear_storage (target, expr_size (exp),
3514 TYPE_ALIGN (type) / BITS_PER_UNIT);
3515
3516 cleared = 1;
3517 }
3518 else
3519 /* Inform later passes that the old value is dead. */
3520 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3521
3522 /* Store each element of the constructor into
3523 the corresponding field of TARGET. */
3524
3525 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3526 {
3527 register tree field = TREE_PURPOSE (elt);
3528 register enum machine_mode mode;
3529 int bitsize;
3530 int bitpos = 0;
3531 int unsignedp;
3532 tree pos, constant = 0, offset = 0;
3533 rtx to_rtx = target;
3534
3535 /* Just ignore missing fields.
3536 We cleared the whole structure, above,
3537 if any fields are missing. */
3538 if (field == 0)
3539 continue;
3540
3541 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3542 continue;
3543
3544 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3545 unsignedp = TREE_UNSIGNED (field);
3546 mode = DECL_MODE (field);
3547 if (DECL_BIT_FIELD (field))
3548 mode = VOIDmode;
3549
3550 pos = DECL_FIELD_BITPOS (field);
3551 if (TREE_CODE (pos) == INTEGER_CST)
3552 constant = pos;
3553 else if (TREE_CODE (pos) == PLUS_EXPR
3554 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3555 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3556 else
3557 offset = pos;
3558
3559 if (constant)
3560 bitpos = TREE_INT_CST_LOW (constant);
3561
3562 if (offset)
3563 {
3564 rtx offset_rtx;
3565
3566 if (contains_placeholder_p (offset))
3567 offset = build (WITH_RECORD_EXPR, sizetype,
3568 offset, exp);
3569
3570 offset = size_binop (FLOOR_DIV_EXPR, offset,
3571 size_int (BITS_PER_UNIT));
3572
3573 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3574 if (GET_CODE (to_rtx) != MEM)
3575 abort ();
3576
3577 to_rtx
3578 = change_address (to_rtx, VOIDmode,
3579 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3580 force_reg (ptr_mode, offset_rtx)));
3581 }
3582 if (TREE_READONLY (field))
3583 {
3584 if (GET_CODE (to_rtx) == MEM)
3585 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3586 XEXP (to_rtx, 0));
3587 RTX_UNCHANGING_P (to_rtx) = 1;
3588 }
3589
3590 store_constructor_field (to_rtx, bitsize, bitpos,
3591 mode, TREE_VALUE (elt), type, cleared);
3592 }
3593 }
3594 else if (TREE_CODE (type) == ARRAY_TYPE)
3595 {
3596 register tree elt;
3597 register int i;
3598 int need_to_clear;
3599 tree domain = TYPE_DOMAIN (type);
3600 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3601 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3602 tree elttype = TREE_TYPE (type);
3603
3604 /* If the constructor has fewer elements than the array,
3605 clear the whole array first. Similarly if this this is
3606 static constructor of a non-BLKmode object. */
3607 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3608 need_to_clear = 1;
3609 else
3610 {
3611 HOST_WIDE_INT count = 0, zero_count = 0;
3612 need_to_clear = 0;
3613 /* This loop is a more accurate version of the loop in
3614 mostly_zeros_p (it handles RANGE_EXPR in an index).
3615 It is also needed to check for missing elements. */
3616 for (elt = CONSTRUCTOR_ELTS (exp);
3617 elt != NULL_TREE;
3618 elt = TREE_CHAIN (elt))
3619 {
3620 tree index = TREE_PURPOSE (elt);
3621 HOST_WIDE_INT this_node_count;
3622 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3623 {
3624 tree lo_index = TREE_OPERAND (index, 0);
3625 tree hi_index = TREE_OPERAND (index, 1);
3626 if (TREE_CODE (lo_index) != INTEGER_CST
3627 || TREE_CODE (hi_index) != INTEGER_CST)
3628 {
3629 need_to_clear = 1;
3630 break;
3631 }
3632 this_node_count = TREE_INT_CST_LOW (hi_index)
3633 - TREE_INT_CST_LOW (lo_index) + 1;
3634 }
3635 else
3636 this_node_count = 1;
3637 count += this_node_count;
3638 if (mostly_zeros_p (TREE_VALUE (elt)))
3639 zero_count += this_node_count;
3640 }
3641 /* Clear the entire array first if there are any missing elements,
3642 or if the incidence of zero elements is >= 75%. */
3643 if (count < maxelt - minelt + 1
3644 || 4 * zero_count >= 3 * count)
3645 need_to_clear = 1;
3646 }
3647 if (need_to_clear)
3648 {
3649 if (! cleared)
3650 clear_storage (target, expr_size (exp),
3651 TYPE_ALIGN (type) / BITS_PER_UNIT);
3652 cleared = 1;
3653 }
3654 else
3655 /* Inform later passes that the old value is dead. */
3656 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3657
3658 /* Store each element of the constructor into
3659 the corresponding element of TARGET, determined
3660 by counting the elements. */
3661 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3662 elt;
3663 elt = TREE_CHAIN (elt), i++)
3664 {
3665 register enum machine_mode mode;
3666 int bitsize;
3667 int bitpos;
3668 int unsignedp;
3669 tree value = TREE_VALUE (elt);
3670 tree index = TREE_PURPOSE (elt);
3671 rtx xtarget = target;
3672
3673 if (cleared && is_zeros_p (value))
3674 continue;
3675
3676 mode = TYPE_MODE (elttype);
3677 bitsize = GET_MODE_BITSIZE (mode);
3678 unsignedp = TREE_UNSIGNED (elttype);
3679
3680 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3681 {
3682 tree lo_index = TREE_OPERAND (index, 0);
3683 tree hi_index = TREE_OPERAND (index, 1);
3684 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3685 struct nesting *loop;
3686 HOST_WIDE_INT lo, hi, count;
3687 tree position;
3688
3689 /* If the range is constant and "small", unroll the loop. */
3690 if (TREE_CODE (lo_index) == INTEGER_CST
3691 && TREE_CODE (hi_index) == INTEGER_CST
3692 && (lo = TREE_INT_CST_LOW (lo_index),
3693 hi = TREE_INT_CST_LOW (hi_index),
3694 count = hi - lo + 1,
3695 (GET_CODE (target) != MEM
3696 || count <= 2
3697 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3698 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3699 <= 40 * 8))))
3700 {
3701 lo -= minelt; hi -= minelt;
3702 for (; lo <= hi; lo++)
3703 {
3704 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3705 store_constructor_field (target, bitsize, bitpos,
3706 mode, value, type, cleared);
3707 }
3708 }
3709 else
3710 {
3711 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3712 loop_top = gen_label_rtx ();
3713 loop_end = gen_label_rtx ();
3714
3715 unsignedp = TREE_UNSIGNED (domain);
3716
3717 index = build_decl (VAR_DECL, NULL_TREE, domain);
3718
3719 DECL_RTL (index) = index_r
3720 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3721 &unsignedp, 0));
3722
3723 if (TREE_CODE (value) == SAVE_EXPR
3724 && SAVE_EXPR_RTL (value) == 0)
3725 {
3726 /* Make sure value gets expanded once before the
3727 loop. */
3728 expand_expr (value, const0_rtx, VOIDmode, 0);
3729 emit_queue ();
3730 }
3731 store_expr (lo_index, index_r, 0);
3732 loop = expand_start_loop (0);
3733
3734 /* Assign value to element index. */
3735 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3736 size_int (BITS_PER_UNIT));
3737 position = size_binop (MULT_EXPR,
3738 size_binop (MINUS_EXPR, index,
3739 TYPE_MIN_VALUE (domain)),
3740 position);
3741 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3742 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3743 xtarget = change_address (target, mode, addr);
3744 if (TREE_CODE (value) == CONSTRUCTOR)
3745 store_constructor (value, xtarget, cleared);
3746 else
3747 store_expr (value, xtarget, 0);
3748
3749 expand_exit_loop_if_false (loop,
3750 build (LT_EXPR, integer_type_node,
3751 index, hi_index));
3752
3753 expand_increment (build (PREINCREMENT_EXPR,
3754 TREE_TYPE (index),
3755 index, integer_one_node), 0, 0);
3756 expand_end_loop ();
3757 emit_label (loop_end);
3758
3759 /* Needed by stupid register allocation. to extend the
3760 lifetime of pseudo-regs used by target past the end
3761 of the loop. */
3762 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3763 }
3764 }
3765 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3766 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3767 {
3768 rtx pos_rtx, addr;
3769 tree position;
3770
3771 if (index == 0)
3772 index = size_int (i);
3773
3774 if (minelt)
3775 index = size_binop (MINUS_EXPR, index,
3776 TYPE_MIN_VALUE (domain));
3777 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3778 size_int (BITS_PER_UNIT));
3779 position = size_binop (MULT_EXPR, index, position);
3780 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3781 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3782 xtarget = change_address (target, mode, addr);
3783 store_expr (value, xtarget, 0);
3784 }
3785 else
3786 {
3787 if (index != 0)
3788 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3789 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3790 else
3791 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3792 store_constructor_field (target, bitsize, bitpos,
3793 mode, value, type, cleared);
3794 }
3795 }
3796 }
3797 /* set constructor assignments */
3798 else if (TREE_CODE (type) == SET_TYPE)
3799 {
3800 tree elt = CONSTRUCTOR_ELTS (exp);
3801 rtx xtarget = XEXP (target, 0);
3802 int set_word_size = TYPE_ALIGN (type);
3803 int nbytes = int_size_in_bytes (type), nbits;
3804 tree domain = TYPE_DOMAIN (type);
3805 tree domain_min, domain_max, bitlength;
3806
3807 /* The default implementation strategy is to extract the constant
3808 parts of the constructor, use that to initialize the target,
3809 and then "or" in whatever non-constant ranges we need in addition.
3810
3811 If a large set is all zero or all ones, it is
3812 probably better to set it using memset (if available) or bzero.
3813 Also, if a large set has just a single range, it may also be
3814 better to first clear all the first clear the set (using
3815 bzero/memset), and set the bits we want. */
3816
3817 /* Check for all zeros. */
3818 if (elt == NULL_TREE)
3819 {
3820 if (!cleared)
3821 clear_storage (target, expr_size (exp),
3822 TYPE_ALIGN (type) / BITS_PER_UNIT);
3823 return;
3824 }
3825
3826 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3827 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3828 bitlength = size_binop (PLUS_EXPR,
3829 size_binop (MINUS_EXPR, domain_max, domain_min),
3830 size_one_node);
3831
3832 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3833 abort ();
3834 nbits = TREE_INT_CST_LOW (bitlength);
3835
3836 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3837 are "complicated" (more than one range), initialize (the
3838 constant parts) by copying from a constant. */
3839 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3840 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3841 {
3842 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3843 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3844 char *bit_buffer = (char *) alloca (nbits);
3845 HOST_WIDE_INT word = 0;
3846 int bit_pos = 0;
3847 int ibit = 0;
3848 int offset = 0; /* In bytes from beginning of set. */
3849 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3850 for (;;)
3851 {
3852 if (bit_buffer[ibit])
3853 {
3854 if (BYTES_BIG_ENDIAN)
3855 word |= (1 << (set_word_size - 1 - bit_pos));
3856 else
3857 word |= 1 << bit_pos;
3858 }
3859 bit_pos++; ibit++;
3860 if (bit_pos >= set_word_size || ibit == nbits)
3861 {
3862 if (word != 0 || ! cleared)
3863 {
3864 rtx datum = GEN_INT (word);
3865 rtx to_rtx;
3866 /* The assumption here is that it is safe to use
3867 XEXP if the set is multi-word, but not if
3868 it's single-word. */
3869 if (GET_CODE (target) == MEM)
3870 {
3871 to_rtx = plus_constant (XEXP (target, 0), offset);
3872 to_rtx = change_address (target, mode, to_rtx);
3873 }
3874 else if (offset == 0)
3875 to_rtx = target;
3876 else
3877 abort ();
3878 emit_move_insn (to_rtx, datum);
3879 }
3880 if (ibit == nbits)
3881 break;
3882 word = 0;
3883 bit_pos = 0;
3884 offset += set_word_size / BITS_PER_UNIT;
3885 }
3886 }
3887 }
3888 else if (!cleared)
3889 {
3890 /* Don't bother clearing storage if the set is all ones. */
3891 if (TREE_CHAIN (elt) != NULL_TREE
3892 || (TREE_PURPOSE (elt) == NULL_TREE
3893 ? nbits != 1
3894 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3895 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3896 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3897 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3898 != nbits))))
3899 clear_storage (target, expr_size (exp),
3900 TYPE_ALIGN (type) / BITS_PER_UNIT);
3901 }
3902
3903 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3904 {
3905 /* start of range of element or NULL */
3906 tree startbit = TREE_PURPOSE (elt);
3907 /* end of range of element, or element value */
3908 tree endbit = TREE_VALUE (elt);
3909 HOST_WIDE_INT startb, endb;
3910 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3911
3912 bitlength_rtx = expand_expr (bitlength,
3913 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3914
3915 /* handle non-range tuple element like [ expr ] */
3916 if (startbit == NULL_TREE)
3917 {
3918 startbit = save_expr (endbit);
3919 endbit = startbit;
3920 }
3921 startbit = convert (sizetype, startbit);
3922 endbit = convert (sizetype, endbit);
3923 if (! integer_zerop (domain_min))
3924 {
3925 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3926 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3927 }
3928 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3929 EXPAND_CONST_ADDRESS);
3930 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3931 EXPAND_CONST_ADDRESS);
3932
3933 if (REG_P (target))
3934 {
3935 targetx = assign_stack_temp (GET_MODE (target),
3936 GET_MODE_SIZE (GET_MODE (target)),
3937 0);
3938 emit_move_insn (targetx, target);
3939 }
3940 else if (GET_CODE (target) == MEM)
3941 targetx = target;
3942 else
3943 abort ();
3944
3945 #ifdef TARGET_MEM_FUNCTIONS
3946 /* Optimization: If startbit and endbit are
3947 constants divisible by BITS_PER_UNIT,
3948 call memset instead. */
3949 if (TREE_CODE (startbit) == INTEGER_CST
3950 && TREE_CODE (endbit) == INTEGER_CST
3951 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3952 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3953 {
3954 emit_library_call (memset_libfunc, 0,
3955 VOIDmode, 3,
3956 plus_constant (XEXP (targetx, 0),
3957 startb / BITS_PER_UNIT),
3958 Pmode,
3959 constm1_rtx, TYPE_MODE (integer_type_node),
3960 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3961 TYPE_MODE (sizetype));
3962 }
3963 else
3964 #endif
3965 {
3966 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3967 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3968 bitlength_rtx, TYPE_MODE (sizetype),
3969 startbit_rtx, TYPE_MODE (sizetype),
3970 endbit_rtx, TYPE_MODE (sizetype));
3971 }
3972 if (REG_P (target))
3973 emit_move_insn (target, targetx);
3974 }
3975 }
3976
3977 else
3978 abort ();
3979 }
3980
3981 /* Store the value of EXP (an expression tree)
3982 into a subfield of TARGET which has mode MODE and occupies
3983 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3984 If MODE is VOIDmode, it means that we are storing into a bit-field.
3985
3986 If VALUE_MODE is VOIDmode, return nothing in particular.
3987 UNSIGNEDP is not used in this case.
3988
3989 Otherwise, return an rtx for the value stored. This rtx
3990 has mode VALUE_MODE if that is convenient to do.
3991 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3992
3993 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3994 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3995
3996 static rtx
3997 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3998 unsignedp, align, total_size)
3999 rtx target;
4000 int bitsize, bitpos;
4001 enum machine_mode mode;
4002 tree exp;
4003 enum machine_mode value_mode;
4004 int unsignedp;
4005 int align;
4006 int total_size;
4007 {
4008 HOST_WIDE_INT width_mask = 0;
4009
4010 if (bitsize < HOST_BITS_PER_WIDE_INT)
4011 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4012
4013 /* If we are storing into an unaligned field of an aligned union that is
4014 in a register, we may have the mode of TARGET being an integer mode but
4015 MODE == BLKmode. In that case, get an aligned object whose size and
4016 alignment are the same as TARGET and store TARGET into it (we can avoid
4017 the store if the field being stored is the entire width of TARGET). Then
4018 call ourselves recursively to store the field into a BLKmode version of
4019 that object. Finally, load from the object into TARGET. This is not
4020 very efficient in general, but should only be slightly more expensive
4021 than the otherwise-required unaligned accesses. Perhaps this can be
4022 cleaned up later. */
4023
4024 if (mode == BLKmode
4025 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4026 {
4027 rtx object = assign_stack_temp (GET_MODE (target),
4028 GET_MODE_SIZE (GET_MODE (target)), 0);
4029 rtx blk_object = copy_rtx (object);
4030
4031 MEM_IN_STRUCT_P (object) = 1;
4032 MEM_IN_STRUCT_P (blk_object) = 1;
4033 PUT_MODE (blk_object, BLKmode);
4034
4035 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4036 emit_move_insn (object, target);
4037
4038 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4039 align, total_size);
4040
4041 /* Even though we aren't returning target, we need to
4042 give it the updated value. */
4043 emit_move_insn (target, object);
4044
4045 return blk_object;
4046 }
4047
4048 /* If the structure is in a register or if the component
4049 is a bit field, we cannot use addressing to access it.
4050 Use bit-field techniques or SUBREG to store in it. */
4051
4052 if (mode == VOIDmode
4053 || (mode != BLKmode && ! direct_store[(int) mode])
4054 || GET_CODE (target) == REG
4055 || GET_CODE (target) == SUBREG
4056 /* If the field isn't aligned enough to store as an ordinary memref,
4057 store it as a bit field. */
4058 || (SLOW_UNALIGNED_ACCESS
4059 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4060 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4061 {
4062 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4063
4064 /* If BITSIZE is narrower than the size of the type of EXP
4065 we will be narrowing TEMP. Normally, what's wanted are the
4066 low-order bits. However, if EXP's type is a record and this is
4067 big-endian machine, we want the upper BITSIZE bits. */
4068 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4069 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4070 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4071 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4072 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4073 - bitsize),
4074 temp, 1);
4075
4076 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4077 MODE. */
4078 if (mode != VOIDmode && mode != BLKmode
4079 && mode != TYPE_MODE (TREE_TYPE (exp)))
4080 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4081
4082 /* If the modes of TARGET and TEMP are both BLKmode, both
4083 must be in memory and BITPOS must be aligned on a byte
4084 boundary. If so, we simply do a block copy. */
4085 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4086 {
4087 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4088 || bitpos % BITS_PER_UNIT != 0)
4089 abort ();
4090
4091 target = change_address (target, VOIDmode,
4092 plus_constant (XEXP (target, 0),
4093 bitpos / BITS_PER_UNIT));
4094
4095 emit_block_move (target, temp,
4096 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4097 / BITS_PER_UNIT),
4098 1);
4099
4100 return value_mode == VOIDmode ? const0_rtx : target;
4101 }
4102
4103 /* Store the value in the bitfield. */
4104 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4105 if (value_mode != VOIDmode)
4106 {
4107 /* The caller wants an rtx for the value. */
4108 /* If possible, avoid refetching from the bitfield itself. */
4109 if (width_mask != 0
4110 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4111 {
4112 tree count;
4113 enum machine_mode tmode;
4114
4115 if (unsignedp)
4116 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4117 tmode = GET_MODE (temp);
4118 if (tmode == VOIDmode)
4119 tmode = value_mode;
4120 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4121 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4122 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4123 }
4124 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4125 NULL_RTX, value_mode, 0, align,
4126 total_size);
4127 }
4128 return const0_rtx;
4129 }
4130 else
4131 {
4132 rtx addr = XEXP (target, 0);
4133 rtx to_rtx;
4134
4135 /* If a value is wanted, it must be the lhs;
4136 so make the address stable for multiple use. */
4137
4138 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4139 && ! CONSTANT_ADDRESS_P (addr)
4140 /* A frame-pointer reference is already stable. */
4141 && ! (GET_CODE (addr) == PLUS
4142 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4143 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4144 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4145 addr = copy_to_reg (addr);
4146
4147 /* Now build a reference to just the desired component. */
4148
4149 to_rtx = change_address (target, mode,
4150 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4151 MEM_IN_STRUCT_P (to_rtx) = 1;
4152
4153 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4154 }
4155 }
4156 \f
4157 /* Return true if any object containing the innermost array is an unaligned
4158 packed structure field. */
4159
4160 static int
4161 get_inner_unaligned_p (exp)
4162 tree exp;
4163 {
4164 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4165
4166 while (1)
4167 {
4168 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4169 {
4170 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4171 < needed_alignment)
4172 return 1;
4173 }
4174 else if (TREE_CODE (exp) != ARRAY_REF
4175 && TREE_CODE (exp) != NON_LVALUE_EXPR
4176 && ! ((TREE_CODE (exp) == NOP_EXPR
4177 || TREE_CODE (exp) == CONVERT_EXPR)
4178 && (TYPE_MODE (TREE_TYPE (exp))
4179 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4180 break;
4181
4182 exp = TREE_OPERAND (exp, 0);
4183 }
4184
4185 return 0;
4186 }
4187
4188 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4189 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4190 ARRAY_REFs and find the ultimate containing object, which we return.
4191
4192 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4193 bit position, and *PUNSIGNEDP to the signedness of the field.
4194 If the position of the field is variable, we store a tree
4195 giving the variable offset (in units) in *POFFSET.
4196 This offset is in addition to the bit position.
4197 If the position is not variable, we store 0 in *POFFSET.
4198 We set *PALIGNMENT to the alignment in bytes of the address that will be
4199 computed. This is the alignment of the thing we return if *POFFSET
4200 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4201
4202 If any of the extraction expressions is volatile,
4203 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4204
4205 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4206 is a mode that can be used to access the field. In that case, *PBITSIZE
4207 is redundant.
4208
4209 If the field describes a variable-sized object, *PMODE is set to
4210 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4211 this case, but the address of the object can be found. */
4212
4213 tree
4214 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4215 punsignedp, pvolatilep, palignment)
4216 tree exp;
4217 int *pbitsize;
4218 int *pbitpos;
4219 tree *poffset;
4220 enum machine_mode *pmode;
4221 int *punsignedp;
4222 int *pvolatilep;
4223 int *palignment;
4224 {
4225 tree orig_exp = exp;
4226 tree size_tree = 0;
4227 enum machine_mode mode = VOIDmode;
4228 tree offset = integer_zero_node;
4229 int alignment = BIGGEST_ALIGNMENT;
4230
4231 if (TREE_CODE (exp) == COMPONENT_REF)
4232 {
4233 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4234 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4235 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4236 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4237 }
4238 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4239 {
4240 size_tree = TREE_OPERAND (exp, 1);
4241 *punsignedp = TREE_UNSIGNED (exp);
4242 }
4243 else
4244 {
4245 mode = TYPE_MODE (TREE_TYPE (exp));
4246 *pbitsize = GET_MODE_BITSIZE (mode);
4247 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4248 }
4249
4250 if (size_tree)
4251 {
4252 if (TREE_CODE (size_tree) != INTEGER_CST)
4253 mode = BLKmode, *pbitsize = -1;
4254 else
4255 *pbitsize = TREE_INT_CST_LOW (size_tree);
4256 }
4257
4258 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4259 and find the ultimate containing object. */
4260
4261 *pbitpos = 0;
4262
4263 while (1)
4264 {
4265 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4266 {
4267 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4268 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4269 : TREE_OPERAND (exp, 2));
4270 tree constant = integer_zero_node, var = pos;
4271
4272 /* If this field hasn't been filled in yet, don't go
4273 past it. This should only happen when folding expressions
4274 made during type construction. */
4275 if (pos == 0)
4276 break;
4277
4278 /* Assume here that the offset is a multiple of a unit.
4279 If not, there should be an explicitly added constant. */
4280 if (TREE_CODE (pos) == PLUS_EXPR
4281 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4282 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4283 else if (TREE_CODE (pos) == INTEGER_CST)
4284 constant = pos, var = integer_zero_node;
4285
4286 *pbitpos += TREE_INT_CST_LOW (constant);
4287 offset = size_binop (PLUS_EXPR, offset,
4288 size_binop (EXACT_DIV_EXPR, var,
4289 size_int (BITS_PER_UNIT)));
4290 }
4291
4292 else if (TREE_CODE (exp) == ARRAY_REF)
4293 {
4294 /* This code is based on the code in case ARRAY_REF in expand_expr
4295 below. We assume here that the size of an array element is
4296 always an integral multiple of BITS_PER_UNIT. */
4297
4298 tree index = TREE_OPERAND (exp, 1);
4299 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4300 tree low_bound
4301 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4302 tree index_type = TREE_TYPE (index);
4303
4304 if (! integer_zerop (low_bound))
4305 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4306
4307 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4308 {
4309 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4310 index);
4311 index_type = TREE_TYPE (index);
4312 }
4313
4314 index = fold (build (MULT_EXPR, index_type, index,
4315 convert (index_type,
4316 TYPE_SIZE (TREE_TYPE (exp)))));
4317
4318 if (TREE_CODE (index) == INTEGER_CST
4319 && TREE_INT_CST_HIGH (index) == 0)
4320 *pbitpos += TREE_INT_CST_LOW (index);
4321 else
4322 offset = size_binop (PLUS_EXPR, offset,
4323 size_binop (FLOOR_DIV_EXPR, index,
4324 size_int (BITS_PER_UNIT)));
4325 }
4326 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4327 && ! ((TREE_CODE (exp) == NOP_EXPR
4328 || TREE_CODE (exp) == CONVERT_EXPR)
4329 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4330 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4331 != UNION_TYPE))
4332 && (TYPE_MODE (TREE_TYPE (exp))
4333 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4334 break;
4335
4336 /* If any reference in the chain is volatile, the effect is volatile. */
4337 if (TREE_THIS_VOLATILE (exp))
4338 *pvolatilep = 1;
4339
4340 /* If the offset is non-constant already, then we can't assume any
4341 alignment more than the alignment here. */
4342 if (! integer_zerop (offset))
4343 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4344
4345 exp = TREE_OPERAND (exp, 0);
4346 }
4347
4348 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4349 alignment = MIN (alignment, DECL_ALIGN (exp));
4350 else if (TREE_TYPE (exp) != 0)
4351 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4352
4353 if (integer_zerop (offset))
4354 offset = 0;
4355
4356 if (offset != 0 && contains_placeholder_p (offset))
4357 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4358
4359 *pmode = mode;
4360 *poffset = offset;
4361 *palignment = alignment / BITS_PER_UNIT;
4362 return exp;
4363 }
4364 \f
4365 /* Given an rtx VALUE that may contain additions and multiplications,
4366 return an equivalent value that just refers to a register or memory.
4367 This is done by generating instructions to perform the arithmetic
4368 and returning a pseudo-register containing the value.
4369
4370 The returned value may be a REG, SUBREG, MEM or constant. */
4371
4372 rtx
4373 force_operand (value, target)
4374 rtx value, target;
4375 {
4376 register optab binoptab = 0;
4377 /* Use a temporary to force order of execution of calls to
4378 `force_operand'. */
4379 rtx tmp;
4380 register rtx op2;
4381 /* Use subtarget as the target for operand 0 of a binary operation. */
4382 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4383
4384 if (GET_CODE (value) == PLUS)
4385 binoptab = add_optab;
4386 else if (GET_CODE (value) == MINUS)
4387 binoptab = sub_optab;
4388 else if (GET_CODE (value) == MULT)
4389 {
4390 op2 = XEXP (value, 1);
4391 if (!CONSTANT_P (op2)
4392 && !(GET_CODE (op2) == REG && op2 != subtarget))
4393 subtarget = 0;
4394 tmp = force_operand (XEXP (value, 0), subtarget);
4395 return expand_mult (GET_MODE (value), tmp,
4396 force_operand (op2, NULL_RTX),
4397 target, 0);
4398 }
4399
4400 if (binoptab)
4401 {
4402 op2 = XEXP (value, 1);
4403 if (!CONSTANT_P (op2)
4404 && !(GET_CODE (op2) == REG && op2 != subtarget))
4405 subtarget = 0;
4406 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4407 {
4408 binoptab = add_optab;
4409 op2 = negate_rtx (GET_MODE (value), op2);
4410 }
4411
4412 /* Check for an addition with OP2 a constant integer and our first
4413 operand a PLUS of a virtual register and something else. In that
4414 case, we want to emit the sum of the virtual register and the
4415 constant first and then add the other value. This allows virtual
4416 register instantiation to simply modify the constant rather than
4417 creating another one around this addition. */
4418 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4419 && GET_CODE (XEXP (value, 0)) == PLUS
4420 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4421 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4422 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4423 {
4424 rtx temp = expand_binop (GET_MODE (value), binoptab,
4425 XEXP (XEXP (value, 0), 0), op2,
4426 subtarget, 0, OPTAB_LIB_WIDEN);
4427 return expand_binop (GET_MODE (value), binoptab, temp,
4428 force_operand (XEXP (XEXP (value, 0), 1), 0),
4429 target, 0, OPTAB_LIB_WIDEN);
4430 }
4431
4432 tmp = force_operand (XEXP (value, 0), subtarget);
4433 return expand_binop (GET_MODE (value), binoptab, tmp,
4434 force_operand (op2, NULL_RTX),
4435 target, 0, OPTAB_LIB_WIDEN);
4436 /* We give UNSIGNEDP = 0 to expand_binop
4437 because the only operations we are expanding here are signed ones. */
4438 }
4439 return value;
4440 }
4441 \f
4442 /* Subroutine of expand_expr:
4443 save the non-copied parts (LIST) of an expr (LHS), and return a list
4444 which can restore these values to their previous values,
4445 should something modify their storage. */
4446
4447 static tree
4448 save_noncopied_parts (lhs, list)
4449 tree lhs;
4450 tree list;
4451 {
4452 tree tail;
4453 tree parts = 0;
4454
4455 for (tail = list; tail; tail = TREE_CHAIN (tail))
4456 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4457 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4458 else
4459 {
4460 tree part = TREE_VALUE (tail);
4461 tree part_type = TREE_TYPE (part);
4462 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4463 rtx target = assign_temp (part_type, 0, 1, 1);
4464 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4465 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4466 parts = tree_cons (to_be_saved,
4467 build (RTL_EXPR, part_type, NULL_TREE,
4468 (tree) target),
4469 parts);
4470 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4471 }
4472 return parts;
4473 }
4474
4475 /* Subroutine of expand_expr:
4476 record the non-copied parts (LIST) of an expr (LHS), and return a list
4477 which specifies the initial values of these parts. */
4478
4479 static tree
4480 init_noncopied_parts (lhs, list)
4481 tree lhs;
4482 tree list;
4483 {
4484 tree tail;
4485 tree parts = 0;
4486
4487 for (tail = list; tail; tail = TREE_CHAIN (tail))
4488 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4489 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4490 else
4491 {
4492 tree part = TREE_VALUE (tail);
4493 tree part_type = TREE_TYPE (part);
4494 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4495 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4496 }
4497 return parts;
4498 }
4499
4500 /* Subroutine of expand_expr: return nonzero iff there is no way that
4501 EXP can reference X, which is being modified. */
4502
4503 static int
4504 safe_from_p (x, exp)
4505 rtx x;
4506 tree exp;
4507 {
4508 rtx exp_rtl = 0;
4509 int i, nops;
4510
4511 if (x == 0
4512 /* If EXP has varying size, we MUST use a target since we currently
4513 have no way of allocating temporaries of variable size
4514 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4515 So we assume here that something at a higher level has prevented a
4516 clash. This is somewhat bogus, but the best we can do. Only
4517 do this when X is BLKmode. */
4518 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4519 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4520 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4521 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4522 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4523 != INTEGER_CST)
4524 && GET_MODE (x) == BLKmode))
4525 return 1;
4526
4527 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4528 find the underlying pseudo. */
4529 if (GET_CODE (x) == SUBREG)
4530 {
4531 x = SUBREG_REG (x);
4532 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4533 return 0;
4534 }
4535
4536 /* If X is a location in the outgoing argument area, it is always safe. */
4537 if (GET_CODE (x) == MEM
4538 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4539 || (GET_CODE (XEXP (x, 0)) == PLUS
4540 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4541 return 1;
4542
4543 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4544 {
4545 case 'd':
4546 exp_rtl = DECL_RTL (exp);
4547 break;
4548
4549 case 'c':
4550 return 1;
4551
4552 case 'x':
4553 if (TREE_CODE (exp) == TREE_LIST)
4554 return ((TREE_VALUE (exp) == 0
4555 || safe_from_p (x, TREE_VALUE (exp)))
4556 && (TREE_CHAIN (exp) == 0
4557 || safe_from_p (x, TREE_CHAIN (exp))));
4558 else
4559 return 0;
4560
4561 case '1':
4562 return safe_from_p (x, TREE_OPERAND (exp, 0));
4563
4564 case '2':
4565 case '<':
4566 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4567 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4568
4569 case 'e':
4570 case 'r':
4571 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4572 the expression. If it is set, we conflict iff we are that rtx or
4573 both are in memory. Otherwise, we check all operands of the
4574 expression recursively. */
4575
4576 switch (TREE_CODE (exp))
4577 {
4578 case ADDR_EXPR:
4579 return (staticp (TREE_OPERAND (exp, 0))
4580 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4581
4582 case INDIRECT_REF:
4583 if (GET_CODE (x) == MEM)
4584 return 0;
4585 break;
4586
4587 case CALL_EXPR:
4588 exp_rtl = CALL_EXPR_RTL (exp);
4589 if (exp_rtl == 0)
4590 {
4591 /* Assume that the call will clobber all hard registers and
4592 all of memory. */
4593 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4594 || GET_CODE (x) == MEM)
4595 return 0;
4596 }
4597
4598 break;
4599
4600 case RTL_EXPR:
4601 /* If a sequence exists, we would have to scan every instruction
4602 in the sequence to see if it was safe. This is probably not
4603 worthwhile. */
4604 if (RTL_EXPR_SEQUENCE (exp))
4605 return 0;
4606
4607 exp_rtl = RTL_EXPR_RTL (exp);
4608 break;
4609
4610 case WITH_CLEANUP_EXPR:
4611 exp_rtl = RTL_EXPR_RTL (exp);
4612 break;
4613
4614 case CLEANUP_POINT_EXPR:
4615 return safe_from_p (x, TREE_OPERAND (exp, 0));
4616
4617 case SAVE_EXPR:
4618 exp_rtl = SAVE_EXPR_RTL (exp);
4619 break;
4620
4621 case BIND_EXPR:
4622 /* The only operand we look at is operand 1. The rest aren't
4623 part of the expression. */
4624 return safe_from_p (x, TREE_OPERAND (exp, 1));
4625
4626 case METHOD_CALL_EXPR:
4627 /* This takes a rtx argument, but shouldn't appear here. */
4628 abort ();
4629 }
4630
4631 /* If we have an rtx, we do not need to scan our operands. */
4632 if (exp_rtl)
4633 break;
4634
4635 nops = tree_code_length[(int) TREE_CODE (exp)];
4636 for (i = 0; i < nops; i++)
4637 if (TREE_OPERAND (exp, i) != 0
4638 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4639 return 0;
4640 }
4641
4642 /* If we have an rtl, find any enclosed object. Then see if we conflict
4643 with it. */
4644 if (exp_rtl)
4645 {
4646 if (GET_CODE (exp_rtl) == SUBREG)
4647 {
4648 exp_rtl = SUBREG_REG (exp_rtl);
4649 if (GET_CODE (exp_rtl) == REG
4650 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4651 return 0;
4652 }
4653
4654 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4655 are memory and EXP is not readonly. */
4656 return ! (rtx_equal_p (x, exp_rtl)
4657 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4658 && ! TREE_READONLY (exp)));
4659 }
4660
4661 /* If we reach here, it is safe. */
4662 return 1;
4663 }
4664
4665 /* Subroutine of expand_expr: return nonzero iff EXP is an
4666 expression whose type is statically determinable. */
4667
4668 static int
4669 fixed_type_p (exp)
4670 tree exp;
4671 {
4672 if (TREE_CODE (exp) == PARM_DECL
4673 || TREE_CODE (exp) == VAR_DECL
4674 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4675 || TREE_CODE (exp) == COMPONENT_REF
4676 || TREE_CODE (exp) == ARRAY_REF)
4677 return 1;
4678 return 0;
4679 }
4680
4681 /* Subroutine of expand_expr: return rtx if EXP is a
4682 variable or parameter; else return 0. */
4683
4684 static rtx
4685 var_rtx (exp)
4686 tree exp;
4687 {
4688 STRIP_NOPS (exp);
4689 switch (TREE_CODE (exp))
4690 {
4691 case PARM_DECL:
4692 case VAR_DECL:
4693 return DECL_RTL (exp);
4694 default:
4695 return 0;
4696 }
4697 }
4698 \f
4699 /* expand_expr: generate code for computing expression EXP.
4700 An rtx for the computed value is returned. The value is never null.
4701 In the case of a void EXP, const0_rtx is returned.
4702
4703 The value may be stored in TARGET if TARGET is nonzero.
4704 TARGET is just a suggestion; callers must assume that
4705 the rtx returned may not be the same as TARGET.
4706
4707 If TARGET is CONST0_RTX, it means that the value will be ignored.
4708
4709 If TMODE is not VOIDmode, it suggests generating the
4710 result in mode TMODE. But this is done only when convenient.
4711 Otherwise, TMODE is ignored and the value generated in its natural mode.
4712 TMODE is just a suggestion; callers must assume that
4713 the rtx returned may not have mode TMODE.
4714
4715 Note that TARGET may have neither TMODE nor MODE. In that case, it
4716 probably will not be used.
4717
4718 If MODIFIER is EXPAND_SUM then when EXP is an addition
4719 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4720 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4721 products as above, or REG or MEM, or constant.
4722 Ordinarily in such cases we would output mul or add instructions
4723 and then return a pseudo reg containing the sum.
4724
4725 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4726 it also marks a label as absolutely required (it can't be dead).
4727 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4728 This is used for outputting expressions used in initializers.
4729
4730 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4731 with a constant address even if that address is not normally legitimate.
4732 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4733
4734 rtx
4735 expand_expr (exp, target, tmode, modifier)
4736 register tree exp;
4737 rtx target;
4738 enum machine_mode tmode;
4739 enum expand_modifier modifier;
4740 {
4741 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4742 This is static so it will be accessible to our recursive callees. */
4743 static tree placeholder_list = 0;
4744 register rtx op0, op1, temp;
4745 tree type = TREE_TYPE (exp);
4746 int unsignedp = TREE_UNSIGNED (type);
4747 register enum machine_mode mode = TYPE_MODE (type);
4748 register enum tree_code code = TREE_CODE (exp);
4749 optab this_optab;
4750 /* Use subtarget as the target for operand 0 of a binary operation. */
4751 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4752 rtx original_target = target;
4753 /* Maybe defer this until sure not doing bytecode? */
4754 int ignore = (target == const0_rtx
4755 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4756 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4757 || code == COND_EXPR)
4758 && TREE_CODE (type) == VOID_TYPE));
4759 tree context;
4760
4761
4762 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4763 {
4764 bc_expand_expr (exp);
4765 return NULL;
4766 }
4767
4768 /* Don't use hard regs as subtargets, because the combiner
4769 can only handle pseudo regs. */
4770 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4771 subtarget = 0;
4772 /* Avoid subtargets inside loops,
4773 since they hide some invariant expressions. */
4774 if (preserve_subexpressions_p ())
4775 subtarget = 0;
4776
4777 /* If we are going to ignore this result, we need only do something
4778 if there is a side-effect somewhere in the expression. If there
4779 is, short-circuit the most common cases here. Note that we must
4780 not call expand_expr with anything but const0_rtx in case this
4781 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4782
4783 if (ignore)
4784 {
4785 if (! TREE_SIDE_EFFECTS (exp))
4786 return const0_rtx;
4787
4788 /* Ensure we reference a volatile object even if value is ignored. */
4789 if (TREE_THIS_VOLATILE (exp)
4790 && TREE_CODE (exp) != FUNCTION_DECL
4791 && mode != VOIDmode && mode != BLKmode)
4792 {
4793 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4794 if (GET_CODE (temp) == MEM)
4795 temp = copy_to_reg (temp);
4796 return const0_rtx;
4797 }
4798
4799 if (TREE_CODE_CLASS (code) == '1')
4800 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4801 VOIDmode, modifier);
4802 else if (TREE_CODE_CLASS (code) == '2'
4803 || TREE_CODE_CLASS (code) == '<')
4804 {
4805 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4806 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4807 return const0_rtx;
4808 }
4809 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4811 /* If the second operand has no side effects, just evaluate
4812 the first. */
4813 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4814 VOIDmode, modifier);
4815
4816 target = 0;
4817 }
4818
4819 /* If will do cse, generate all results into pseudo registers
4820 since 1) that allows cse to find more things
4821 and 2) otherwise cse could produce an insn the machine
4822 cannot support. */
4823
4824 if (! cse_not_expected && mode != BLKmode && target
4825 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4826 target = subtarget;
4827
4828 switch (code)
4829 {
4830 case LABEL_DECL:
4831 {
4832 tree function = decl_function_context (exp);
4833 /* Handle using a label in a containing function. */
4834 if (function != current_function_decl && function != 0)
4835 {
4836 struct function *p = find_function_data (function);
4837 /* Allocate in the memory associated with the function
4838 that the label is in. */
4839 push_obstacks (p->function_obstack,
4840 p->function_maybepermanent_obstack);
4841
4842 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4843 label_rtx (exp), p->forced_labels);
4844 pop_obstacks ();
4845 }
4846 else if (modifier == EXPAND_INITIALIZER)
4847 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4848 label_rtx (exp), forced_labels);
4849 temp = gen_rtx (MEM, FUNCTION_MODE,
4850 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4851 if (function != current_function_decl && function != 0)
4852 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4853 return temp;
4854 }
4855
4856 case PARM_DECL:
4857 if (DECL_RTL (exp) == 0)
4858 {
4859 error_with_decl (exp, "prior parameter's size depends on `%s'");
4860 return CONST0_RTX (mode);
4861 }
4862
4863 /* ... fall through ... */
4864
4865 case VAR_DECL:
4866 /* If a static var's type was incomplete when the decl was written,
4867 but the type is complete now, lay out the decl now. */
4868 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4869 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4870 {
4871 push_obstacks_nochange ();
4872 end_temporary_allocation ();
4873 layout_decl (exp, 0);
4874 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4875 pop_obstacks ();
4876 }
4877
4878 /* ... fall through ... */
4879
4880 case FUNCTION_DECL:
4881 case RESULT_DECL:
4882 if (DECL_RTL (exp) == 0)
4883 abort ();
4884
4885 /* Ensure variable marked as used even if it doesn't go through
4886 a parser. If it hasn't be used yet, write out an external
4887 definition. */
4888 if (! TREE_USED (exp))
4889 {
4890 assemble_external (exp);
4891 TREE_USED (exp) = 1;
4892 }
4893
4894 /* Show we haven't gotten RTL for this yet. */
4895 temp = 0;
4896
4897 /* Handle variables inherited from containing functions. */
4898 context = decl_function_context (exp);
4899
4900 /* We treat inline_function_decl as an alias for the current function
4901 because that is the inline function whose vars, types, etc.
4902 are being merged into the current function.
4903 See expand_inline_function. */
4904
4905 if (context != 0 && context != current_function_decl
4906 && context != inline_function_decl
4907 /* If var is static, we don't need a static chain to access it. */
4908 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4909 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4910 {
4911 rtx addr;
4912
4913 /* Mark as non-local and addressable. */
4914 DECL_NONLOCAL (exp) = 1;
4915 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4916 abort ();
4917 mark_addressable (exp);
4918 if (GET_CODE (DECL_RTL (exp)) != MEM)
4919 abort ();
4920 addr = XEXP (DECL_RTL (exp), 0);
4921 if (GET_CODE (addr) == MEM)
4922 addr = gen_rtx (MEM, Pmode,
4923 fix_lexical_addr (XEXP (addr, 0), exp));
4924 else
4925 addr = fix_lexical_addr (addr, exp);
4926 temp = change_address (DECL_RTL (exp), mode, addr);
4927 }
4928
4929 /* This is the case of an array whose size is to be determined
4930 from its initializer, while the initializer is still being parsed.
4931 See expand_decl. */
4932
4933 else if (GET_CODE (DECL_RTL (exp)) == MEM
4934 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4935 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4936 XEXP (DECL_RTL (exp), 0));
4937
4938 /* If DECL_RTL is memory, we are in the normal case and either
4939 the address is not valid or it is not a register and -fforce-addr
4940 is specified, get the address into a register. */
4941
4942 else if (GET_CODE (DECL_RTL (exp)) == MEM
4943 && modifier != EXPAND_CONST_ADDRESS
4944 && modifier != EXPAND_SUM
4945 && modifier != EXPAND_INITIALIZER
4946 && (! memory_address_p (DECL_MODE (exp),
4947 XEXP (DECL_RTL (exp), 0))
4948 || (flag_force_addr
4949 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4950 temp = change_address (DECL_RTL (exp), VOIDmode,
4951 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4952
4953 /* If we got something, return it. But first, set the alignment
4954 the address is a register. */
4955 if (temp != 0)
4956 {
4957 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4958 mark_reg_pointer (XEXP (temp, 0),
4959 DECL_ALIGN (exp) / BITS_PER_UNIT);
4960
4961 return temp;
4962 }
4963
4964 /* If the mode of DECL_RTL does not match that of the decl, it
4965 must be a promoted value. We return a SUBREG of the wanted mode,
4966 but mark it so that we know that it was already extended. */
4967
4968 if (GET_CODE (DECL_RTL (exp)) == REG
4969 && GET_MODE (DECL_RTL (exp)) != mode)
4970 {
4971 /* Get the signedness used for this variable. Ensure we get the
4972 same mode we got when the variable was declared. */
4973 if (GET_MODE (DECL_RTL (exp))
4974 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4975 abort ();
4976
4977 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4978 SUBREG_PROMOTED_VAR_P (temp) = 1;
4979 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4980 return temp;
4981 }
4982
4983 return DECL_RTL (exp);
4984
4985 case INTEGER_CST:
4986 return immed_double_const (TREE_INT_CST_LOW (exp),
4987 TREE_INT_CST_HIGH (exp),
4988 mode);
4989
4990 case CONST_DECL:
4991 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4992
4993 case REAL_CST:
4994 /* If optimized, generate immediate CONST_DOUBLE
4995 which will be turned into memory by reload if necessary.
4996
4997 We used to force a register so that loop.c could see it. But
4998 this does not allow gen_* patterns to perform optimizations with
4999 the constants. It also produces two insns in cases like "x = 1.0;".
5000 On most machines, floating-point constants are not permitted in
5001 many insns, so we'd end up copying it to a register in any case.
5002
5003 Now, we do the copying in expand_binop, if appropriate. */
5004 return immed_real_const (exp);
5005
5006 case COMPLEX_CST:
5007 case STRING_CST:
5008 if (! TREE_CST_RTL (exp))
5009 output_constant_def (exp);
5010
5011 /* TREE_CST_RTL probably contains a constant address.
5012 On RISC machines where a constant address isn't valid,
5013 make some insns to get that address into a register. */
5014 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5015 && modifier != EXPAND_CONST_ADDRESS
5016 && modifier != EXPAND_INITIALIZER
5017 && modifier != EXPAND_SUM
5018 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5019 || (flag_force_addr
5020 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5021 return change_address (TREE_CST_RTL (exp), VOIDmode,
5022 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5023 return TREE_CST_RTL (exp);
5024
5025 case SAVE_EXPR:
5026 context = decl_function_context (exp);
5027
5028 /* We treat inline_function_decl as an alias for the current function
5029 because that is the inline function whose vars, types, etc.
5030 are being merged into the current function.
5031 See expand_inline_function. */
5032 if (context == current_function_decl || context == inline_function_decl)
5033 context = 0;
5034
5035 /* If this is non-local, handle it. */
5036 if (context)
5037 {
5038 temp = SAVE_EXPR_RTL (exp);
5039 if (temp && GET_CODE (temp) == REG)
5040 {
5041 put_var_into_stack (exp);
5042 temp = SAVE_EXPR_RTL (exp);
5043 }
5044 if (temp == 0 || GET_CODE (temp) != MEM)
5045 abort ();
5046 return change_address (temp, mode,
5047 fix_lexical_addr (XEXP (temp, 0), exp));
5048 }
5049 if (SAVE_EXPR_RTL (exp) == 0)
5050 {
5051 if (mode == VOIDmode)
5052 temp = const0_rtx;
5053 else
5054 temp = assign_temp (type, 0, 0, 0);
5055
5056 SAVE_EXPR_RTL (exp) = temp;
5057 if (!optimize && GET_CODE (temp) == REG)
5058 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5059 save_expr_regs);
5060
5061 /* If the mode of TEMP does not match that of the expression, it
5062 must be a promoted value. We pass store_expr a SUBREG of the
5063 wanted mode but mark it so that we know that it was already
5064 extended. Note that `unsignedp' was modified above in
5065 this case. */
5066
5067 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5068 {
5069 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5070 SUBREG_PROMOTED_VAR_P (temp) = 1;
5071 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5072 }
5073
5074 if (temp == const0_rtx)
5075 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5076 else
5077 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5078 }
5079
5080 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5081 must be a promoted value. We return a SUBREG of the wanted mode,
5082 but mark it so that we know that it was already extended. */
5083
5084 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5085 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5086 {
5087 /* Compute the signedness and make the proper SUBREG. */
5088 promote_mode (type, mode, &unsignedp, 0);
5089 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5090 SUBREG_PROMOTED_VAR_P (temp) = 1;
5091 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5092 return temp;
5093 }
5094
5095 return SAVE_EXPR_RTL (exp);
5096
5097 case UNSAVE_EXPR:
5098 {
5099 rtx temp;
5100 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5101 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5102 return temp;
5103 }
5104
5105 case PLACEHOLDER_EXPR:
5106 /* If there is an object on the head of the placeholder list,
5107 see if some object in it's references is of type TYPE. For
5108 further information, see tree.def. */
5109 if (placeholder_list)
5110 {
5111 tree need_type = TYPE_MAIN_VARIANT (type);
5112 tree object = 0;
5113 tree old_list = placeholder_list;
5114 tree elt;
5115
5116 /* See if the object is the type that we want. Then see if
5117 the operand of any reference is the type we want. */
5118 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5119 == need_type))
5120 object = TREE_PURPOSE (placeholder_list);
5121
5122 /* Find the innermost reference that is of the type we want. */
5123 for (elt = TREE_PURPOSE (placeholder_list);
5124 elt != 0
5125 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5126 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5127 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5128 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5129 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5130 || TREE_CODE (elt) == COND_EXPR)
5131 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5132 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5133 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5134 == need_type))
5135 object = TREE_OPERAND (elt, 0);
5136
5137 if (object != 0)
5138 {
5139 /* Expand this object skipping the list entries before
5140 it was found in case it is also a PLACEHOLDER_EXPR.
5141 In that case, we want to translate it using subsequent
5142 entries. */
5143 placeholder_list = TREE_CHAIN (placeholder_list);
5144 temp = expand_expr (object, original_target, tmode, modifier);
5145 placeholder_list = old_list;
5146 return temp;
5147 }
5148 }
5149
5150 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5151 abort ();
5152
5153 case WITH_RECORD_EXPR:
5154 /* Put the object on the placeholder list, expand our first operand,
5155 and pop the list. */
5156 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5157 placeholder_list);
5158 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5159 tmode, modifier);
5160 placeholder_list = TREE_CHAIN (placeholder_list);
5161 return target;
5162
5163 case EXIT_EXPR:
5164 expand_exit_loop_if_false (NULL_PTR,
5165 invert_truthvalue (TREE_OPERAND (exp, 0)));
5166 return const0_rtx;
5167
5168 case LOOP_EXPR:
5169 push_temp_slots ();
5170 expand_start_loop (1);
5171 expand_expr_stmt (TREE_OPERAND (exp, 0));
5172 expand_end_loop ();
5173 pop_temp_slots ();
5174
5175 return const0_rtx;
5176
5177 case BIND_EXPR:
5178 {
5179 tree vars = TREE_OPERAND (exp, 0);
5180 int vars_need_expansion = 0;
5181
5182 /* Need to open a binding contour here because
5183 if there are any cleanups they most be contained here. */
5184 expand_start_bindings (0);
5185
5186 /* Mark the corresponding BLOCK for output in its proper place. */
5187 if (TREE_OPERAND (exp, 2) != 0
5188 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5189 insert_block (TREE_OPERAND (exp, 2));
5190
5191 /* If VARS have not yet been expanded, expand them now. */
5192 while (vars)
5193 {
5194 if (DECL_RTL (vars) == 0)
5195 {
5196 vars_need_expansion = 1;
5197 expand_decl (vars);
5198 }
5199 expand_decl_init (vars);
5200 vars = TREE_CHAIN (vars);
5201 }
5202
5203 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5204
5205 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5206
5207 return temp;
5208 }
5209
5210 case RTL_EXPR:
5211 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5212 abort ();
5213 emit_insns (RTL_EXPR_SEQUENCE (exp));
5214 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5215 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5216 free_temps_for_rtl_expr (exp);
5217 return RTL_EXPR_RTL (exp);
5218
5219 case CONSTRUCTOR:
5220 /* If we don't need the result, just ensure we evaluate any
5221 subexpressions. */
5222 if (ignore)
5223 {
5224 tree elt;
5225 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5226 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5227 return const0_rtx;
5228 }
5229
5230 /* All elts simple constants => refer to a constant in memory. But
5231 if this is a non-BLKmode mode, let it store a field at a time
5232 since that should make a CONST_INT or CONST_DOUBLE when we
5233 fold. Likewise, if we have a target we can use, it is best to
5234 store directly into the target unless the type is large enough
5235 that memcpy will be used. If we are making an initializer and
5236 all operands are constant, put it in memory as well. */
5237 else if ((TREE_STATIC (exp)
5238 && ((mode == BLKmode
5239 && ! (target != 0 && safe_from_p (target, exp)))
5240 || TREE_ADDRESSABLE (exp)
5241 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5242 && (move_by_pieces_ninsns
5243 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5244 TYPE_ALIGN (type) / BITS_PER_UNIT)
5245 > MOVE_RATIO)
5246 && ! mostly_zeros_p (exp))))
5247 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5248 {
5249 rtx constructor = output_constant_def (exp);
5250 if (modifier != EXPAND_CONST_ADDRESS
5251 && modifier != EXPAND_INITIALIZER
5252 && modifier != EXPAND_SUM
5253 && (! memory_address_p (GET_MODE (constructor),
5254 XEXP (constructor, 0))
5255 || (flag_force_addr
5256 && GET_CODE (XEXP (constructor, 0)) != REG)))
5257 constructor = change_address (constructor, VOIDmode,
5258 XEXP (constructor, 0));
5259 return constructor;
5260 }
5261
5262 else
5263 {
5264 /* Handle calls that pass values in multiple non-contiguous
5265 locations. The Irix 6 ABI has examples of this. */
5266 if (target == 0 || ! safe_from_p (target, exp)
5267 || GET_CODE (target) == PARALLEL)
5268 {
5269 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5270 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5271 else
5272 target = assign_temp (type, 0, 1, 1);
5273 }
5274
5275 if (TREE_READONLY (exp))
5276 {
5277 if (GET_CODE (target) == MEM)
5278 target = change_address (target, GET_MODE (target),
5279 XEXP (target, 0));
5280 RTX_UNCHANGING_P (target) = 1;
5281 }
5282
5283 store_constructor (exp, target, 0);
5284 return target;
5285 }
5286
5287 case INDIRECT_REF:
5288 {
5289 tree exp1 = TREE_OPERAND (exp, 0);
5290 tree exp2;
5291
5292 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5293 op0 = memory_address (mode, op0);
5294
5295 temp = gen_rtx (MEM, mode, op0);
5296 /* If address was computed by addition,
5297 mark this as an element of an aggregate. */
5298 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5299 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5300 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5301 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5302 || (TREE_CODE (exp1) == ADDR_EXPR
5303 && (exp2 = TREE_OPERAND (exp1, 0))
5304 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5305 MEM_IN_STRUCT_P (temp) = 1;
5306 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5307
5308 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5309 here, because, in C and C++, the fact that a location is accessed
5310 through a pointer to const does not mean that the value there can
5311 never change. Languages where it can never change should
5312 also set TREE_STATIC. */
5313 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5314 return temp;
5315 }
5316
5317 case ARRAY_REF:
5318 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5319 abort ();
5320
5321 {
5322 tree array = TREE_OPERAND (exp, 0);
5323 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5324 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5325 tree index = TREE_OPERAND (exp, 1);
5326 tree index_type = TREE_TYPE (index);
5327 int i;
5328
5329 if (TREE_CODE (low_bound) != INTEGER_CST
5330 && contains_placeholder_p (low_bound))
5331 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5332
5333 /* Optimize the special-case of a zero lower bound.
5334
5335 We convert the low_bound to sizetype to avoid some problems
5336 with constant folding. (E.g. suppose the lower bound is 1,
5337 and its mode is QI. Without the conversion, (ARRAY
5338 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5339 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5340
5341 But sizetype isn't quite right either (especially if
5342 the lowbound is negative). FIXME */
5343
5344 if (! integer_zerop (low_bound))
5345 index = fold (build (MINUS_EXPR, index_type, index,
5346 convert (sizetype, low_bound)));
5347
5348 if ((TREE_CODE (index) != INTEGER_CST
5349 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5350 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5351 {
5352 /* Nonconstant array index or nonconstant element size, and
5353 not an array in an unaligned (packed) structure field.
5354 Generate the tree for *(&array+index) and expand that,
5355 except do it in a language-independent way
5356 and don't complain about non-lvalue arrays.
5357 `mark_addressable' should already have been called
5358 for any array for which this case will be reached. */
5359
5360 /* Don't forget the const or volatile flag from the array
5361 element. */
5362 tree variant_type = build_type_variant (type,
5363 TREE_READONLY (exp),
5364 TREE_THIS_VOLATILE (exp));
5365 tree array_adr = build1 (ADDR_EXPR,
5366 build_pointer_type (variant_type), array);
5367 tree elt;
5368 tree size = size_in_bytes (type);
5369
5370 /* Convert the integer argument to a type the same size as sizetype
5371 so the multiply won't overflow spuriously. */
5372 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5373 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5374 index);
5375
5376 if (TREE_CODE (size) != INTEGER_CST
5377 && contains_placeholder_p (size))
5378 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5379
5380 /* Don't think the address has side effects
5381 just because the array does.
5382 (In some cases the address might have side effects,
5383 and we fail to record that fact here. However, it should not
5384 matter, since expand_expr should not care.) */
5385 TREE_SIDE_EFFECTS (array_adr) = 0;
5386
5387 elt
5388 = build1
5389 (INDIRECT_REF, type,
5390 fold (build (PLUS_EXPR,
5391 TYPE_POINTER_TO (variant_type),
5392 array_adr,
5393 fold
5394 (build1
5395 (NOP_EXPR,
5396 TYPE_POINTER_TO (variant_type),
5397 fold (build (MULT_EXPR, TREE_TYPE (index),
5398 index,
5399 convert (TREE_TYPE (index),
5400 size))))))));;
5401
5402 /* Volatility, etc., of new expression is same as old
5403 expression. */
5404 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5405 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5406 TREE_READONLY (elt) = TREE_READONLY (exp);
5407
5408 return expand_expr (elt, target, tmode, modifier);
5409 }
5410
5411 /* Fold an expression like: "foo"[2].
5412 This is not done in fold so it won't happen inside &.
5413 Don't fold if this is for wide characters since it's too
5414 difficult to do correctly and this is a very rare case. */
5415
5416 if (TREE_CODE (array) == STRING_CST
5417 && TREE_CODE (index) == INTEGER_CST
5418 && !TREE_INT_CST_HIGH (index)
5419 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5420 && GET_MODE_CLASS (mode) == MODE_INT
5421 && GET_MODE_SIZE (mode) == 1)
5422 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5423
5424 /* If this is a constant index into a constant array,
5425 just get the value from the array. Handle both the cases when
5426 we have an explicit constructor and when our operand is a variable
5427 that was declared const. */
5428
5429 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5430 {
5431 if (TREE_CODE (index) == INTEGER_CST
5432 && TREE_INT_CST_HIGH (index) == 0)
5433 {
5434 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5435
5436 i = TREE_INT_CST_LOW (index);
5437 while (elem && i--)
5438 elem = TREE_CHAIN (elem);
5439 if (elem)
5440 return expand_expr (fold (TREE_VALUE (elem)), target,
5441 tmode, modifier);
5442 }
5443 }
5444
5445 else if (optimize >= 1
5446 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5447 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5448 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5449 {
5450 if (TREE_CODE (index) == INTEGER_CST
5451 && TREE_INT_CST_HIGH (index) == 0)
5452 {
5453 tree init = DECL_INITIAL (array);
5454
5455 i = TREE_INT_CST_LOW (index);
5456 if (TREE_CODE (init) == CONSTRUCTOR)
5457 {
5458 tree elem = CONSTRUCTOR_ELTS (init);
5459
5460 while (elem
5461 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5462 elem = TREE_CHAIN (elem);
5463 if (elem)
5464 return expand_expr (fold (TREE_VALUE (elem)), target,
5465 tmode, modifier);
5466 }
5467 else if (TREE_CODE (init) == STRING_CST
5468 && i < TREE_STRING_LENGTH (init))
5469 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5470 }
5471 }
5472 }
5473
5474 /* Treat array-ref with constant index as a component-ref. */
5475
5476 case COMPONENT_REF:
5477 case BIT_FIELD_REF:
5478 /* If the operand is a CONSTRUCTOR, we can just extract the
5479 appropriate field if it is present. Don't do this if we have
5480 already written the data since we want to refer to that copy
5481 and varasm.c assumes that's what we'll do. */
5482 if (code != ARRAY_REF
5483 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5484 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5485 {
5486 tree elt;
5487
5488 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5489 elt = TREE_CHAIN (elt))
5490 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5491 /* We can normally use the value of the field in the
5492 CONSTRUCTOR. However, if this is a bitfield in
5493 an integral mode that we can fit in a HOST_WIDE_INT,
5494 we must mask only the number of bits in the bitfield,
5495 since this is done implicitly by the constructor. If
5496 the bitfield does not meet either of those conditions,
5497 we can't do this optimization. */
5498 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5499 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5500 == MODE_INT)
5501 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5502 <= HOST_BITS_PER_WIDE_INT))))
5503 {
5504 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5505 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5506 {
5507 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5508 enum machine_mode imode
5509 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5510
5511 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5512 {
5513 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5514 op0 = expand_and (op0, op1, target);
5515 }
5516 else
5517 {
5518 tree count
5519 = build_int_2 (imode - bitsize, 0);
5520
5521 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5522 target, 0);
5523 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5524 target, 0);
5525 }
5526 }
5527
5528 return op0;
5529 }
5530 }
5531
5532 {
5533 enum machine_mode mode1;
5534 int bitsize;
5535 int bitpos;
5536 tree offset;
5537 int volatilep = 0;
5538 int alignment;
5539 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5540 &mode1, &unsignedp, &volatilep,
5541 &alignment);
5542
5543 /* If we got back the original object, something is wrong. Perhaps
5544 we are evaluating an expression too early. In any event, don't
5545 infinitely recurse. */
5546 if (tem == exp)
5547 abort ();
5548
5549 /* If TEM's type is a union of variable size, pass TARGET to the inner
5550 computation, since it will need a temporary and TARGET is known
5551 to have to do. This occurs in unchecked conversion in Ada. */
5552
5553 op0 = expand_expr (tem,
5554 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5555 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5556 != INTEGER_CST)
5557 ? target : NULL_RTX),
5558 VOIDmode,
5559 modifier == EXPAND_INITIALIZER ? modifier : 0);
5560
5561 /* If this is a constant, put it into a register if it is a
5562 legitimate constant and memory if it isn't. */
5563 if (CONSTANT_P (op0))
5564 {
5565 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5566 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5567 op0 = force_reg (mode, op0);
5568 else
5569 op0 = validize_mem (force_const_mem (mode, op0));
5570 }
5571
5572 if (offset != 0)
5573 {
5574 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5575
5576 if (GET_CODE (op0) != MEM)
5577 abort ();
5578 op0 = change_address (op0, VOIDmode,
5579 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5580 force_reg (ptr_mode, offset_rtx)));
5581 }
5582
5583 /* Don't forget about volatility even if this is a bitfield. */
5584 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5585 {
5586 op0 = copy_rtx (op0);
5587 MEM_VOLATILE_P (op0) = 1;
5588 }
5589
5590 /* In cases where an aligned union has an unaligned object
5591 as a field, we might be extracting a BLKmode value from
5592 an integer-mode (e.g., SImode) object. Handle this case
5593 by doing the extract into an object as wide as the field
5594 (which we know to be the width of a basic mode), then
5595 storing into memory, and changing the mode to BLKmode.
5596 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5597 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5598 if (mode1 == VOIDmode
5599 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5600 || (modifier != EXPAND_CONST_ADDRESS
5601 && modifier != EXPAND_INITIALIZER
5602 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5603 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5604 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5605 /* If the field isn't aligned enough to fetch as a memref,
5606 fetch it as a bit field. */
5607 || (SLOW_UNALIGNED_ACCESS
5608 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5609 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5610 {
5611 enum machine_mode ext_mode = mode;
5612
5613 if (ext_mode == BLKmode)
5614 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5615
5616 if (ext_mode == BLKmode)
5617 {
5618 /* In this case, BITPOS must start at a byte boundary and
5619 TARGET, if specified, must be a MEM. */
5620 if (GET_CODE (op0) != MEM
5621 || (target != 0 && GET_CODE (target) != MEM)
5622 || bitpos % BITS_PER_UNIT != 0)
5623 abort ();
5624
5625 op0 = change_address (op0, VOIDmode,
5626 plus_constant (XEXP (op0, 0),
5627 bitpos / BITS_PER_UNIT));
5628 if (target == 0)
5629 target = assign_temp (type, 0, 1, 1);
5630
5631 emit_block_move (target, op0,
5632 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5633 / BITS_PER_UNIT),
5634 1);
5635
5636 return target;
5637 }
5638
5639 op0 = validize_mem (op0);
5640
5641 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5642 mark_reg_pointer (XEXP (op0, 0), alignment);
5643
5644 op0 = extract_bit_field (op0, bitsize, bitpos,
5645 unsignedp, target, ext_mode, ext_mode,
5646 alignment,
5647 int_size_in_bytes (TREE_TYPE (tem)));
5648
5649 /* If the result is a record type and BITSIZE is narrower than
5650 the mode of OP0, an integral mode, and this is a big endian
5651 machine, we must put the field into the high-order bits. */
5652 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5653 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5654 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5655 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5656 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5657 - bitsize),
5658 op0, 1);
5659
5660 if (mode == BLKmode)
5661 {
5662 rtx new = assign_stack_temp (ext_mode,
5663 bitsize / BITS_PER_UNIT, 0);
5664
5665 emit_move_insn (new, op0);
5666 op0 = copy_rtx (new);
5667 PUT_MODE (op0, BLKmode);
5668 MEM_IN_STRUCT_P (op0) = 1;
5669 }
5670
5671 return op0;
5672 }
5673
5674 /* If the result is BLKmode, use that to access the object
5675 now as well. */
5676 if (mode == BLKmode)
5677 mode1 = BLKmode;
5678
5679 /* Get a reference to just this component. */
5680 if (modifier == EXPAND_CONST_ADDRESS
5681 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5682 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5683 (bitpos / BITS_PER_UNIT)));
5684 else
5685 op0 = change_address (op0, mode1,
5686 plus_constant (XEXP (op0, 0),
5687 (bitpos / BITS_PER_UNIT)));
5688 if (GET_CODE (XEXP (op0, 0)) == REG)
5689 mark_reg_pointer (XEXP (op0, 0), alignment);
5690
5691 MEM_IN_STRUCT_P (op0) = 1;
5692 MEM_VOLATILE_P (op0) |= volatilep;
5693 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5694 || modifier == EXPAND_CONST_ADDRESS
5695 || modifier == EXPAND_INITIALIZER)
5696 return op0;
5697 else if (target == 0)
5698 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5699
5700 convert_move (target, op0, unsignedp);
5701 return target;
5702 }
5703
5704 /* Intended for a reference to a buffer of a file-object in Pascal.
5705 But it's not certain that a special tree code will really be
5706 necessary for these. INDIRECT_REF might work for them. */
5707 case BUFFER_REF:
5708 abort ();
5709
5710 case IN_EXPR:
5711 {
5712 /* Pascal set IN expression.
5713
5714 Algorithm:
5715 rlo = set_low - (set_low%bits_per_word);
5716 the_word = set [ (index - rlo)/bits_per_word ];
5717 bit_index = index % bits_per_word;
5718 bitmask = 1 << bit_index;
5719 return !!(the_word & bitmask); */
5720
5721 tree set = TREE_OPERAND (exp, 0);
5722 tree index = TREE_OPERAND (exp, 1);
5723 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5724 tree set_type = TREE_TYPE (set);
5725 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5726 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5727 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5728 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5729 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5730 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5731 rtx setaddr = XEXP (setval, 0);
5732 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5733 rtx rlow;
5734 rtx diff, quo, rem, addr, bit, result;
5735
5736 preexpand_calls (exp);
5737
5738 /* If domain is empty, answer is no. Likewise if index is constant
5739 and out of bounds. */
5740 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5741 && TREE_CODE (set_low_bound) == INTEGER_CST
5742 && tree_int_cst_lt (set_high_bound, set_low_bound)
5743 || (TREE_CODE (index) == INTEGER_CST
5744 && TREE_CODE (set_low_bound) == INTEGER_CST
5745 && tree_int_cst_lt (index, set_low_bound))
5746 || (TREE_CODE (set_high_bound) == INTEGER_CST
5747 && TREE_CODE (index) == INTEGER_CST
5748 && tree_int_cst_lt (set_high_bound, index))))
5749 return const0_rtx;
5750
5751 if (target == 0)
5752 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5753
5754 /* If we get here, we have to generate the code for both cases
5755 (in range and out of range). */
5756
5757 op0 = gen_label_rtx ();
5758 op1 = gen_label_rtx ();
5759
5760 if (! (GET_CODE (index_val) == CONST_INT
5761 && GET_CODE (lo_r) == CONST_INT))
5762 {
5763 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5764 GET_MODE (index_val), iunsignedp, 0);
5765 emit_jump_insn (gen_blt (op1));
5766 }
5767
5768 if (! (GET_CODE (index_val) == CONST_INT
5769 && GET_CODE (hi_r) == CONST_INT))
5770 {
5771 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5772 GET_MODE (index_val), iunsignedp, 0);
5773 emit_jump_insn (gen_bgt (op1));
5774 }
5775
5776 /* Calculate the element number of bit zero in the first word
5777 of the set. */
5778 if (GET_CODE (lo_r) == CONST_INT)
5779 rlow = GEN_INT (INTVAL (lo_r)
5780 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5781 else
5782 rlow = expand_binop (index_mode, and_optab, lo_r,
5783 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5784 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5785
5786 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5787 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5788
5789 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5790 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5791 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5792 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5793
5794 addr = memory_address (byte_mode,
5795 expand_binop (index_mode, add_optab, diff,
5796 setaddr, NULL_RTX, iunsignedp,
5797 OPTAB_LIB_WIDEN));
5798
5799 /* Extract the bit we want to examine */
5800 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5801 gen_rtx (MEM, byte_mode, addr),
5802 make_tree (TREE_TYPE (index), rem),
5803 NULL_RTX, 1);
5804 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5805 GET_MODE (target) == byte_mode ? target : 0,
5806 1, OPTAB_LIB_WIDEN);
5807
5808 if (result != target)
5809 convert_move (target, result, 1);
5810
5811 /* Output the code to handle the out-of-range case. */
5812 emit_jump (op0);
5813 emit_label (op1);
5814 emit_move_insn (target, const0_rtx);
5815 emit_label (op0);
5816 return target;
5817 }
5818
5819 case WITH_CLEANUP_EXPR:
5820 if (RTL_EXPR_RTL (exp) == 0)
5821 {
5822 RTL_EXPR_RTL (exp)
5823 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5824 cleanups_this_call
5825 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5826 /* That's it for this cleanup. */
5827 TREE_OPERAND (exp, 2) = 0;
5828 expand_eh_region_start ();
5829 }
5830 return RTL_EXPR_RTL (exp);
5831
5832 case CLEANUP_POINT_EXPR:
5833 {
5834 extern int temp_slot_level;
5835 tree old_cleanups = cleanups_this_call;
5836 int old_temp_level = target_temp_slot_level;
5837 push_temp_slots ();
5838 target_temp_slot_level = temp_slot_level;
5839 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5840 /* If we're going to use this value, load it up now. */
5841 if (! ignore)
5842 op0 = force_not_mem (op0);
5843 expand_cleanups_to (old_cleanups);
5844 preserve_temp_slots (op0);
5845 free_temp_slots ();
5846 pop_temp_slots ();
5847 target_temp_slot_level = old_temp_level;
5848 }
5849 return op0;
5850
5851 case CALL_EXPR:
5852 /* Check for a built-in function. */
5853 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5854 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5855 == FUNCTION_DECL)
5856 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5857 return expand_builtin (exp, target, subtarget, tmode, ignore);
5858
5859 /* If this call was expanded already by preexpand_calls,
5860 just return the result we got. */
5861 if (CALL_EXPR_RTL (exp) != 0)
5862 return CALL_EXPR_RTL (exp);
5863
5864 return expand_call (exp, target, ignore);
5865
5866 case NON_LVALUE_EXPR:
5867 case NOP_EXPR:
5868 case CONVERT_EXPR:
5869 case REFERENCE_EXPR:
5870 if (TREE_CODE (type) == UNION_TYPE)
5871 {
5872 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5873 if (target == 0)
5874 {
5875 if (mode != BLKmode)
5876 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5877 else
5878 target = assign_temp (type, 0, 1, 1);
5879 }
5880
5881 if (GET_CODE (target) == MEM)
5882 /* Store data into beginning of memory target. */
5883 store_expr (TREE_OPERAND (exp, 0),
5884 change_address (target, TYPE_MODE (valtype), 0), 0);
5885
5886 else if (GET_CODE (target) == REG)
5887 /* Store this field into a union of the proper type. */
5888 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5889 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5890 VOIDmode, 0, 1,
5891 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5892 else
5893 abort ();
5894
5895 /* Return the entire union. */
5896 return target;
5897 }
5898
5899 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5900 {
5901 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5902 modifier);
5903
5904 /* If the signedness of the conversion differs and OP0 is
5905 a promoted SUBREG, clear that indication since we now
5906 have to do the proper extension. */
5907 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5908 && GET_CODE (op0) == SUBREG)
5909 SUBREG_PROMOTED_VAR_P (op0) = 0;
5910
5911 return op0;
5912 }
5913
5914 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5915 if (GET_MODE (op0) == mode)
5916 return op0;
5917
5918 /* If OP0 is a constant, just convert it into the proper mode. */
5919 if (CONSTANT_P (op0))
5920 return
5921 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5922 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5923
5924 if (modifier == EXPAND_INITIALIZER)
5925 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5926
5927 if (target == 0)
5928 return
5929 convert_to_mode (mode, op0,
5930 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5931 else
5932 convert_move (target, op0,
5933 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5934 return target;
5935
5936 case PLUS_EXPR:
5937 /* We come here from MINUS_EXPR when the second operand is a
5938 constant. */
5939 plus_expr:
5940 this_optab = add_optab;
5941
5942 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5943 something else, make sure we add the register to the constant and
5944 then to the other thing. This case can occur during strength
5945 reduction and doing it this way will produce better code if the
5946 frame pointer or argument pointer is eliminated.
5947
5948 fold-const.c will ensure that the constant is always in the inner
5949 PLUS_EXPR, so the only case we need to do anything about is if
5950 sp, ap, or fp is our second argument, in which case we must swap
5951 the innermost first argument and our second argument. */
5952
5953 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5954 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5955 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5956 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5957 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5958 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5959 {
5960 tree t = TREE_OPERAND (exp, 1);
5961
5962 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5963 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5964 }
5965
5966 /* If the result is to be ptr_mode and we are adding an integer to
5967 something, we might be forming a constant. So try to use
5968 plus_constant. If it produces a sum and we can't accept it,
5969 use force_operand. This allows P = &ARR[const] to generate
5970 efficient code on machines where a SYMBOL_REF is not a valid
5971 address.
5972
5973 If this is an EXPAND_SUM call, always return the sum. */
5974 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5975 || mode == ptr_mode)
5976 {
5977 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5978 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5979 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5980 {
5981 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5982 EXPAND_SUM);
5983 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5984 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5985 op1 = force_operand (op1, target);
5986 return op1;
5987 }
5988
5989 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5990 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5991 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5992 {
5993 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5994 EXPAND_SUM);
5995 if (! CONSTANT_P (op0))
5996 {
5997 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5998 VOIDmode, modifier);
5999 /* Don't go to both_summands if modifier
6000 says it's not right to return a PLUS. */
6001 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6002 goto binop2;
6003 goto both_summands;
6004 }
6005 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6006 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6007 op0 = force_operand (op0, target);
6008 return op0;
6009 }
6010 }
6011
6012 /* No sense saving up arithmetic to be done
6013 if it's all in the wrong mode to form part of an address.
6014 And force_operand won't know whether to sign-extend or
6015 zero-extend. */
6016 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6017 || mode != ptr_mode)
6018 goto binop;
6019
6020 preexpand_calls (exp);
6021 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6022 subtarget = 0;
6023
6024 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
6025 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
6026
6027 both_summands:
6028 /* Make sure any term that's a sum with a constant comes last. */
6029 if (GET_CODE (op0) == PLUS
6030 && CONSTANT_P (XEXP (op0, 1)))
6031 {
6032 temp = op0;
6033 op0 = op1;
6034 op1 = temp;
6035 }
6036 /* If adding to a sum including a constant,
6037 associate it to put the constant outside. */
6038 if (GET_CODE (op1) == PLUS
6039 && CONSTANT_P (XEXP (op1, 1)))
6040 {
6041 rtx constant_term = const0_rtx;
6042
6043 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6044 if (temp != 0)
6045 op0 = temp;
6046 /* Ensure that MULT comes first if there is one. */
6047 else if (GET_CODE (op0) == MULT)
6048 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6049 else
6050 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6051
6052 /* Let's also eliminate constants from op0 if possible. */
6053 op0 = eliminate_constant_term (op0, &constant_term);
6054
6055 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6056 their sum should be a constant. Form it into OP1, since the
6057 result we want will then be OP0 + OP1. */
6058
6059 temp = simplify_binary_operation (PLUS, mode, constant_term,
6060 XEXP (op1, 1));
6061 if (temp != 0)
6062 op1 = temp;
6063 else
6064 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6065 }
6066
6067 /* Put a constant term last and put a multiplication first. */
6068 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6069 temp = op1, op1 = op0, op0 = temp;
6070
6071 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6072 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6073
6074 case MINUS_EXPR:
6075 /* For initializers, we are allowed to return a MINUS of two
6076 symbolic constants. Here we handle all cases when both operands
6077 are constant. */
6078 /* Handle difference of two symbolic constants,
6079 for the sake of an initializer. */
6080 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6081 && really_constant_p (TREE_OPERAND (exp, 0))
6082 && really_constant_p (TREE_OPERAND (exp, 1)))
6083 {
6084 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6085 VOIDmode, modifier);
6086 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6087 VOIDmode, modifier);
6088
6089 /* If the last operand is a CONST_INT, use plus_constant of
6090 the negated constant. Else make the MINUS. */
6091 if (GET_CODE (op1) == CONST_INT)
6092 return plus_constant (op0, - INTVAL (op1));
6093 else
6094 return gen_rtx (MINUS, mode, op0, op1);
6095 }
6096 /* Convert A - const to A + (-const). */
6097 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6098 {
6099 tree negated = fold (build1 (NEGATE_EXPR, type,
6100 TREE_OPERAND (exp, 1)));
6101
6102 /* Deal with the case where we can't negate the constant
6103 in TYPE. */
6104 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6105 {
6106 tree newtype = signed_type (type);
6107 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6108 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6109 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6110
6111 if (! TREE_OVERFLOW (newneg))
6112 return expand_expr (convert (type,
6113 build (PLUS_EXPR, newtype,
6114 newop0, newneg)),
6115 target, tmode, modifier);
6116 }
6117 else
6118 {
6119 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6120 goto plus_expr;
6121 }
6122 }
6123 this_optab = sub_optab;
6124 goto binop;
6125
6126 case MULT_EXPR:
6127 preexpand_calls (exp);
6128 /* If first operand is constant, swap them.
6129 Thus the following special case checks need only
6130 check the second operand. */
6131 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6132 {
6133 register tree t1 = TREE_OPERAND (exp, 0);
6134 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6135 TREE_OPERAND (exp, 1) = t1;
6136 }
6137
6138 /* Attempt to return something suitable for generating an
6139 indexed address, for machines that support that. */
6140
6141 if (modifier == EXPAND_SUM && mode == ptr_mode
6142 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6143 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6144 {
6145 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6146
6147 /* Apply distributive law if OP0 is x+c. */
6148 if (GET_CODE (op0) == PLUS
6149 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6150 return gen_rtx (PLUS, mode,
6151 gen_rtx (MULT, mode, XEXP (op0, 0),
6152 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6153 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6154 * INTVAL (XEXP (op0, 1))));
6155
6156 if (GET_CODE (op0) != REG)
6157 op0 = force_operand (op0, NULL_RTX);
6158 if (GET_CODE (op0) != REG)
6159 op0 = copy_to_mode_reg (mode, op0);
6160
6161 return gen_rtx (MULT, mode, op0,
6162 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6163 }
6164
6165 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6166 subtarget = 0;
6167
6168 /* Check for multiplying things that have been extended
6169 from a narrower type. If this machine supports multiplying
6170 in that narrower type with a result in the desired type,
6171 do it that way, and avoid the explicit type-conversion. */
6172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6173 && TREE_CODE (type) == INTEGER_TYPE
6174 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6175 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6176 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6177 && int_fits_type_p (TREE_OPERAND (exp, 1),
6178 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6179 /* Don't use a widening multiply if a shift will do. */
6180 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6181 > HOST_BITS_PER_WIDE_INT)
6182 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6183 ||
6184 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6185 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6186 ==
6187 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6188 /* If both operands are extended, they must either both
6189 be zero-extended or both be sign-extended. */
6190 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6191 ==
6192 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6193 {
6194 enum machine_mode innermode
6195 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6196 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6197 ? smul_widen_optab : umul_widen_optab);
6198 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6199 ? umul_widen_optab : smul_widen_optab);
6200 if (mode == GET_MODE_WIDER_MODE (innermode))
6201 {
6202 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6203 {
6204 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6205 NULL_RTX, VOIDmode, 0);
6206 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6207 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6208 VOIDmode, 0);
6209 else
6210 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6211 NULL_RTX, VOIDmode, 0);
6212 goto binop2;
6213 }
6214 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6215 && innermode == word_mode)
6216 {
6217 rtx htem;
6218 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6219 NULL_RTX, VOIDmode, 0);
6220 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6221 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6222 VOIDmode, 0);
6223 else
6224 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6225 NULL_RTX, VOIDmode, 0);
6226 temp = expand_binop (mode, other_optab, op0, op1, target,
6227 unsignedp, OPTAB_LIB_WIDEN);
6228 htem = expand_mult_highpart_adjust (innermode,
6229 gen_highpart (innermode, temp),
6230 op0, op1,
6231 gen_highpart (innermode, temp),
6232 unsignedp);
6233 emit_move_insn (gen_highpart (innermode, temp), htem);
6234 return temp;
6235 }
6236 }
6237 }
6238 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6239 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6240 return expand_mult (mode, op0, op1, target, unsignedp);
6241
6242 case TRUNC_DIV_EXPR:
6243 case FLOOR_DIV_EXPR:
6244 case CEIL_DIV_EXPR:
6245 case ROUND_DIV_EXPR:
6246 case EXACT_DIV_EXPR:
6247 preexpand_calls (exp);
6248 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6249 subtarget = 0;
6250 /* Possible optimization: compute the dividend with EXPAND_SUM
6251 then if the divisor is constant can optimize the case
6252 where some terms of the dividend have coeffs divisible by it. */
6253 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6254 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6255 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6256
6257 case RDIV_EXPR:
6258 this_optab = flodiv_optab;
6259 goto binop;
6260
6261 case TRUNC_MOD_EXPR:
6262 case FLOOR_MOD_EXPR:
6263 case CEIL_MOD_EXPR:
6264 case ROUND_MOD_EXPR:
6265 preexpand_calls (exp);
6266 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6267 subtarget = 0;
6268 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6269 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6270 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6271
6272 case FIX_ROUND_EXPR:
6273 case FIX_FLOOR_EXPR:
6274 case FIX_CEIL_EXPR:
6275 abort (); /* Not used for C. */
6276
6277 case FIX_TRUNC_EXPR:
6278 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6279 if (target == 0)
6280 target = gen_reg_rtx (mode);
6281 expand_fix (target, op0, unsignedp);
6282 return target;
6283
6284 case FLOAT_EXPR:
6285 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6286 if (target == 0)
6287 target = gen_reg_rtx (mode);
6288 /* expand_float can't figure out what to do if FROM has VOIDmode.
6289 So give it the correct mode. With -O, cse will optimize this. */
6290 if (GET_MODE (op0) == VOIDmode)
6291 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6292 op0);
6293 expand_float (target, op0,
6294 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6295 return target;
6296
6297 case NEGATE_EXPR:
6298 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6299 temp = expand_unop (mode, neg_optab, op0, target, 0);
6300 if (temp == 0)
6301 abort ();
6302 return temp;
6303
6304 case ABS_EXPR:
6305 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6306
6307 /* Handle complex values specially. */
6308 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6309 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6310 return expand_complex_abs (mode, op0, target, unsignedp);
6311
6312 /* Unsigned abs is simply the operand. Testing here means we don't
6313 risk generating incorrect code below. */
6314 if (TREE_UNSIGNED (type))
6315 return op0;
6316
6317 return expand_abs (mode, op0, target, unsignedp,
6318 safe_from_p (target, TREE_OPERAND (exp, 0)));
6319
6320 case MAX_EXPR:
6321 case MIN_EXPR:
6322 target = original_target;
6323 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6324 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6325 || GET_MODE (target) != mode
6326 || (GET_CODE (target) == REG
6327 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6328 target = gen_reg_rtx (mode);
6329 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6330 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6331
6332 /* First try to do it with a special MIN or MAX instruction.
6333 If that does not win, use a conditional jump to select the proper
6334 value. */
6335 this_optab = (TREE_UNSIGNED (type)
6336 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6337 : (code == MIN_EXPR ? smin_optab : smax_optab));
6338
6339 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6340 OPTAB_WIDEN);
6341 if (temp != 0)
6342 return temp;
6343
6344 /* At this point, a MEM target is no longer useful; we will get better
6345 code without it. */
6346
6347 if (GET_CODE (target) == MEM)
6348 target = gen_reg_rtx (mode);
6349
6350 if (target != op0)
6351 emit_move_insn (target, op0);
6352
6353 op0 = gen_label_rtx ();
6354
6355 /* If this mode is an integer too wide to compare properly,
6356 compare word by word. Rely on cse to optimize constant cases. */
6357 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6358 {
6359 if (code == MAX_EXPR)
6360 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6361 target, op1, NULL_RTX, op0);
6362 else
6363 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6364 op1, target, NULL_RTX, op0);
6365 emit_move_insn (target, op1);
6366 }
6367 else
6368 {
6369 if (code == MAX_EXPR)
6370 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6371 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6372 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6373 else
6374 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6375 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6376 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6377 if (temp == const0_rtx)
6378 emit_move_insn (target, op1);
6379 else if (temp != const_true_rtx)
6380 {
6381 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6382 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6383 else
6384 abort ();
6385 emit_move_insn (target, op1);
6386 }
6387 }
6388 emit_label (op0);
6389 return target;
6390
6391 case BIT_NOT_EXPR:
6392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6393 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6394 if (temp == 0)
6395 abort ();
6396 return temp;
6397
6398 case FFS_EXPR:
6399 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6400 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6401 if (temp == 0)
6402 abort ();
6403 return temp;
6404
6405 /* ??? Can optimize bitwise operations with one arg constant.
6406 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6407 and (a bitwise1 b) bitwise2 b (etc)
6408 but that is probably not worth while. */
6409
6410 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6411 boolean values when we want in all cases to compute both of them. In
6412 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6413 as actual zero-or-1 values and then bitwise anding. In cases where
6414 there cannot be any side effects, better code would be made by
6415 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6416 how to recognize those cases. */
6417
6418 case TRUTH_AND_EXPR:
6419 case BIT_AND_EXPR:
6420 this_optab = and_optab;
6421 goto binop;
6422
6423 case TRUTH_OR_EXPR:
6424 case BIT_IOR_EXPR:
6425 this_optab = ior_optab;
6426 goto binop;
6427
6428 case TRUTH_XOR_EXPR:
6429 case BIT_XOR_EXPR:
6430 this_optab = xor_optab;
6431 goto binop;
6432
6433 case LSHIFT_EXPR:
6434 case RSHIFT_EXPR:
6435 case LROTATE_EXPR:
6436 case RROTATE_EXPR:
6437 preexpand_calls (exp);
6438 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6439 subtarget = 0;
6440 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6441 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6442 unsignedp);
6443
6444 /* Could determine the answer when only additive constants differ. Also,
6445 the addition of one can be handled by changing the condition. */
6446 case LT_EXPR:
6447 case LE_EXPR:
6448 case GT_EXPR:
6449 case GE_EXPR:
6450 case EQ_EXPR:
6451 case NE_EXPR:
6452 preexpand_calls (exp);
6453 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6454 if (temp != 0)
6455 return temp;
6456
6457 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6458 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6459 && original_target
6460 && GET_CODE (original_target) == REG
6461 && (GET_MODE (original_target)
6462 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6463 {
6464 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6465 VOIDmode, 0);
6466
6467 if (temp != original_target)
6468 temp = copy_to_reg (temp);
6469
6470 op1 = gen_label_rtx ();
6471 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6472 GET_MODE (temp), unsignedp, 0);
6473 emit_jump_insn (gen_beq (op1));
6474 emit_move_insn (temp, const1_rtx);
6475 emit_label (op1);
6476 return temp;
6477 }
6478
6479 /* If no set-flag instruction, must generate a conditional
6480 store into a temporary variable. Drop through
6481 and handle this like && and ||. */
6482
6483 case TRUTH_ANDIF_EXPR:
6484 case TRUTH_ORIF_EXPR:
6485 if (! ignore
6486 && (target == 0 || ! safe_from_p (target, exp)
6487 /* Make sure we don't have a hard reg (such as function's return
6488 value) live across basic blocks, if not optimizing. */
6489 || (!optimize && GET_CODE (target) == REG
6490 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6491 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6492
6493 if (target)
6494 emit_clr_insn (target);
6495
6496 op1 = gen_label_rtx ();
6497 jumpifnot (exp, op1);
6498
6499 if (target)
6500 emit_0_to_1_insn (target);
6501
6502 emit_label (op1);
6503 return ignore ? const0_rtx : target;
6504
6505 case TRUTH_NOT_EXPR:
6506 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6507 /* The parser is careful to generate TRUTH_NOT_EXPR
6508 only with operands that are always zero or one. */
6509 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6510 target, 1, OPTAB_LIB_WIDEN);
6511 if (temp == 0)
6512 abort ();
6513 return temp;
6514
6515 case COMPOUND_EXPR:
6516 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6517 emit_queue ();
6518 return expand_expr (TREE_OPERAND (exp, 1),
6519 (ignore ? const0_rtx : target),
6520 VOIDmode, 0);
6521
6522 case COND_EXPR:
6523 /* If we would have a "singleton" (see below) were it not for a
6524 conversion in each arm, bring that conversion back out. */
6525 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6526 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6527 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6528 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6529 {
6530 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6531 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6532
6533 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6534 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6535 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6536 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6537 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6538 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6539 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6540 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6541 return expand_expr (build1 (NOP_EXPR, type,
6542 build (COND_EXPR, TREE_TYPE (true),
6543 TREE_OPERAND (exp, 0),
6544 true, false)),
6545 target, tmode, modifier);
6546 }
6547
6548 {
6549 rtx flag = NULL_RTX;
6550 tree left_cleanups = NULL_TREE;
6551 tree right_cleanups = NULL_TREE;
6552
6553 /* Used to save a pointer to the place to put the setting of
6554 the flag that indicates if this side of the conditional was
6555 taken. We backpatch the code, if we find out later that we
6556 have any conditional cleanups that need to be performed. */
6557 rtx dest_right_flag = NULL_RTX;
6558 rtx dest_left_flag = NULL_RTX;
6559
6560 /* Note that COND_EXPRs whose type is a structure or union
6561 are required to be constructed to contain assignments of
6562 a temporary variable, so that we can evaluate them here
6563 for side effect only. If type is void, we must do likewise. */
6564
6565 /* If an arm of the branch requires a cleanup,
6566 only that cleanup is performed. */
6567
6568 tree singleton = 0;
6569 tree binary_op = 0, unary_op = 0;
6570 tree old_cleanups = cleanups_this_call;
6571
6572 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6573 convert it to our mode, if necessary. */
6574 if (integer_onep (TREE_OPERAND (exp, 1))
6575 && integer_zerop (TREE_OPERAND (exp, 2))
6576 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6577 {
6578 if (ignore)
6579 {
6580 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6581 modifier);
6582 return const0_rtx;
6583 }
6584
6585 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6586 if (GET_MODE (op0) == mode)
6587 return op0;
6588
6589 if (target == 0)
6590 target = gen_reg_rtx (mode);
6591 convert_move (target, op0, unsignedp);
6592 return target;
6593 }
6594
6595 /* Check for X ? A + B : A. If we have this, we can copy A to the
6596 output and conditionally add B. Similarly for unary operations.
6597 Don't do this if X has side-effects because those side effects
6598 might affect A or B and the "?" operation is a sequence point in
6599 ANSI. (operand_equal_p tests for side effects.) */
6600
6601 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6602 && operand_equal_p (TREE_OPERAND (exp, 2),
6603 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6604 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6605 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6606 && operand_equal_p (TREE_OPERAND (exp, 1),
6607 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6608 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6609 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6610 && operand_equal_p (TREE_OPERAND (exp, 2),
6611 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6612 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6613 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6614 && operand_equal_p (TREE_OPERAND (exp, 1),
6615 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6616 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6617
6618 /* If we are not to produce a result, we have no target. Otherwise,
6619 if a target was specified use it; it will not be used as an
6620 intermediate target unless it is safe. If no target, use a
6621 temporary. */
6622
6623 if (ignore)
6624 temp = 0;
6625 else if (original_target
6626 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6627 || (singleton && GET_CODE (original_target) == REG
6628 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6629 && original_target == var_rtx (singleton)))
6630 && GET_MODE (original_target) == mode
6631 && ! (GET_CODE (original_target) == MEM
6632 && MEM_VOLATILE_P (original_target)))
6633 temp = original_target;
6634 else if (TREE_ADDRESSABLE (type))
6635 abort ();
6636 else
6637 temp = assign_temp (type, 0, 0, 1);
6638
6639 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6640 do the test of X as a store-flag operation, do this as
6641 A + ((X != 0) << log C). Similarly for other simple binary
6642 operators. Only do for C == 1 if BRANCH_COST is low. */
6643 if (temp && singleton && binary_op
6644 && (TREE_CODE (binary_op) == PLUS_EXPR
6645 || TREE_CODE (binary_op) == MINUS_EXPR
6646 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6647 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6648 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6649 : integer_onep (TREE_OPERAND (binary_op, 1)))
6650 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6651 {
6652 rtx result;
6653 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6654 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6655 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6656 : xor_optab);
6657
6658 /* If we had X ? A : A + 1, do this as A + (X == 0).
6659
6660 We have to invert the truth value here and then put it
6661 back later if do_store_flag fails. We cannot simply copy
6662 TREE_OPERAND (exp, 0) to another variable and modify that
6663 because invert_truthvalue can modify the tree pointed to
6664 by its argument. */
6665 if (singleton == TREE_OPERAND (exp, 1))
6666 TREE_OPERAND (exp, 0)
6667 = invert_truthvalue (TREE_OPERAND (exp, 0));
6668
6669 result = do_store_flag (TREE_OPERAND (exp, 0),
6670 (safe_from_p (temp, singleton)
6671 ? temp : NULL_RTX),
6672 mode, BRANCH_COST <= 1);
6673
6674 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6675 result = expand_shift (LSHIFT_EXPR, mode, result,
6676 build_int_2 (tree_log2
6677 (TREE_OPERAND
6678 (binary_op, 1)),
6679 0),
6680 (safe_from_p (temp, singleton)
6681 ? temp : NULL_RTX), 0);
6682
6683 if (result)
6684 {
6685 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6686 return expand_binop (mode, boptab, op1, result, temp,
6687 unsignedp, OPTAB_LIB_WIDEN);
6688 }
6689 else if (singleton == TREE_OPERAND (exp, 1))
6690 TREE_OPERAND (exp, 0)
6691 = invert_truthvalue (TREE_OPERAND (exp, 0));
6692 }
6693
6694 do_pending_stack_adjust ();
6695 NO_DEFER_POP;
6696 op0 = gen_label_rtx ();
6697
6698 flag = gen_reg_rtx (word_mode);
6699 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6700 {
6701 if (temp != 0)
6702 {
6703 /* If the target conflicts with the other operand of the
6704 binary op, we can't use it. Also, we can't use the target
6705 if it is a hard register, because evaluating the condition
6706 might clobber it. */
6707 if ((binary_op
6708 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6709 || (GET_CODE (temp) == REG
6710 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6711 temp = gen_reg_rtx (mode);
6712 store_expr (singleton, temp, 0);
6713 }
6714 else
6715 expand_expr (singleton,
6716 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6717 dest_left_flag = get_last_insn ();
6718 if (singleton == TREE_OPERAND (exp, 1))
6719 jumpif (TREE_OPERAND (exp, 0), op0);
6720 else
6721 jumpifnot (TREE_OPERAND (exp, 0), op0);
6722
6723 /* Allows cleanups up to here. */
6724 old_cleanups = cleanups_this_call;
6725 if (binary_op && temp == 0)
6726 /* Just touch the other operand. */
6727 expand_expr (TREE_OPERAND (binary_op, 1),
6728 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6729 else if (binary_op)
6730 store_expr (build (TREE_CODE (binary_op), type,
6731 make_tree (type, temp),
6732 TREE_OPERAND (binary_op, 1)),
6733 temp, 0);
6734 else
6735 store_expr (build1 (TREE_CODE (unary_op), type,
6736 make_tree (type, temp)),
6737 temp, 0);
6738 op1 = op0;
6739 dest_right_flag = get_last_insn ();
6740 }
6741 #if 0
6742 /* This is now done in jump.c and is better done there because it
6743 produces shorter register lifetimes. */
6744
6745 /* Check for both possibilities either constants or variables
6746 in registers (but not the same as the target!). If so, can
6747 save branches by assigning one, branching, and assigning the
6748 other. */
6749 else if (temp && GET_MODE (temp) != BLKmode
6750 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6751 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6752 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6753 && DECL_RTL (TREE_OPERAND (exp, 1))
6754 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6755 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6756 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6757 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6758 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6759 && DECL_RTL (TREE_OPERAND (exp, 2))
6760 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6761 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6762 {
6763 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6764 temp = gen_reg_rtx (mode);
6765 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6766 dest_left_flag = get_last_insn ();
6767 jumpifnot (TREE_OPERAND (exp, 0), op0);
6768
6769 /* Allows cleanups up to here. */
6770 old_cleanups = cleanups_this_call;
6771 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6772 op1 = op0;
6773 dest_right_flag = get_last_insn ();
6774 }
6775 #endif
6776 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6777 comparison operator. If we have one of these cases, set the
6778 output to A, branch on A (cse will merge these two references),
6779 then set the output to FOO. */
6780 else if (temp
6781 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6782 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6783 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6784 TREE_OPERAND (exp, 1), 0)
6785 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6786 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6787 {
6788 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6789 temp = gen_reg_rtx (mode);
6790 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6791 dest_left_flag = get_last_insn ();
6792 jumpif (TREE_OPERAND (exp, 0), op0);
6793
6794 /* Allows cleanups up to here. */
6795 old_cleanups = cleanups_this_call;
6796 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6797 op1 = op0;
6798 dest_right_flag = get_last_insn ();
6799 }
6800 else if (temp
6801 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6802 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6803 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6804 TREE_OPERAND (exp, 2), 0)
6805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6806 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6807 {
6808 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6809 temp = gen_reg_rtx (mode);
6810 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6811 dest_left_flag = get_last_insn ();
6812 jumpifnot (TREE_OPERAND (exp, 0), op0);
6813
6814 /* Allows cleanups up to here. */
6815 old_cleanups = cleanups_this_call;
6816 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6817 op1 = op0;
6818 dest_right_flag = get_last_insn ();
6819 }
6820 else
6821 {
6822 op1 = gen_label_rtx ();
6823 jumpifnot (TREE_OPERAND (exp, 0), op0);
6824
6825 /* Allows cleanups up to here. */
6826 old_cleanups = cleanups_this_call;
6827 if (temp != 0)
6828 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6829 else
6830 expand_expr (TREE_OPERAND (exp, 1),
6831 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6832 dest_left_flag = get_last_insn ();
6833
6834 /* Handle conditional cleanups, if any. */
6835 left_cleanups = defer_cleanups_to (old_cleanups);
6836
6837 emit_queue ();
6838 emit_jump_insn (gen_jump (op1));
6839 emit_barrier ();
6840 emit_label (op0);
6841 if (temp != 0)
6842 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6843 else
6844 expand_expr (TREE_OPERAND (exp, 2),
6845 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6846 dest_right_flag = get_last_insn ();
6847 }
6848
6849 /* Handle conditional cleanups, if any. */
6850 right_cleanups = defer_cleanups_to (old_cleanups);
6851
6852 emit_queue ();
6853 emit_label (op1);
6854 OK_DEFER_POP;
6855
6856 /* Add back in, any conditional cleanups. */
6857 if (left_cleanups || right_cleanups)
6858 {
6859 tree new_cleanups;
6860 tree cond;
6861 rtx last;
6862
6863 /* Now that we know that a flag is needed, go back and add in the
6864 setting of the flag. */
6865
6866 /* Do the left side flag. */
6867 last = get_last_insn ();
6868 /* Flag left cleanups as needed. */
6869 emit_move_insn (flag, const1_rtx);
6870 /* ??? deprecated, use sequences instead. */
6871 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6872
6873 /* Do the right side flag. */
6874 last = get_last_insn ();
6875 /* Flag left cleanups as needed. */
6876 emit_move_insn (flag, const0_rtx);
6877 /* ??? deprecated, use sequences instead. */
6878 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6879
6880 /* All cleanups must be on the function_obstack. */
6881 push_obstacks_nochange ();
6882 resume_temporary_allocation ();
6883
6884 /* convert flag, which is an rtx, into a tree. */
6885 cond = make_node (RTL_EXPR);
6886 TREE_TYPE (cond) = integer_type_node;
6887 RTL_EXPR_RTL (cond) = flag;
6888 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6889 cond = save_expr (cond);
6890
6891 if (! left_cleanups)
6892 left_cleanups = integer_zero_node;
6893 if (! right_cleanups)
6894 right_cleanups = integer_zero_node;
6895 new_cleanups = build (COND_EXPR, void_type_node,
6896 truthvalue_conversion (cond),
6897 left_cleanups, right_cleanups);
6898 new_cleanups = fold (new_cleanups);
6899
6900 pop_obstacks ();
6901
6902 /* Now add in the conditionalized cleanups. */
6903 cleanups_this_call
6904 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6905 expand_eh_region_start ();
6906 }
6907 return temp;
6908 }
6909
6910 case TARGET_EXPR:
6911 {
6912 /* Something needs to be initialized, but we didn't know
6913 where that thing was when building the tree. For example,
6914 it could be the return value of a function, or a parameter
6915 to a function which lays down in the stack, or a temporary
6916 variable which must be passed by reference.
6917
6918 We guarantee that the expression will either be constructed
6919 or copied into our original target. */
6920
6921 tree slot = TREE_OPERAND (exp, 0);
6922 tree cleanups = NULL_TREE;
6923 tree exp1;
6924 rtx temp;
6925
6926 if (TREE_CODE (slot) != VAR_DECL)
6927 abort ();
6928
6929 if (! ignore)
6930 target = original_target;
6931
6932 if (target == 0)
6933 {
6934 if (DECL_RTL (slot) != 0)
6935 {
6936 target = DECL_RTL (slot);
6937 /* If we have already expanded the slot, so don't do
6938 it again. (mrs) */
6939 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6940 return target;
6941 }
6942 else
6943 {
6944 target = assign_temp (type, 2, 1, 1);
6945 /* All temp slots at this level must not conflict. */
6946 preserve_temp_slots (target);
6947 DECL_RTL (slot) = target;
6948
6949 /* Since SLOT is not known to the called function
6950 to belong to its stack frame, we must build an explicit
6951 cleanup. This case occurs when we must build up a reference
6952 to pass the reference as an argument. In this case,
6953 it is very likely that such a reference need not be
6954 built here. */
6955
6956 if (TREE_OPERAND (exp, 2) == 0)
6957 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6958 cleanups = TREE_OPERAND (exp, 2);
6959 }
6960 }
6961 else
6962 {
6963 /* This case does occur, when expanding a parameter which
6964 needs to be constructed on the stack. The target
6965 is the actual stack address that we want to initialize.
6966 The function we call will perform the cleanup in this case. */
6967
6968 /* If we have already assigned it space, use that space,
6969 not target that we were passed in, as our target
6970 parameter is only a hint. */
6971 if (DECL_RTL (slot) != 0)
6972 {
6973 target = DECL_RTL (slot);
6974 /* If we have already expanded the slot, so don't do
6975 it again. (mrs) */
6976 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6977 return target;
6978 }
6979
6980 DECL_RTL (slot) = target;
6981 }
6982
6983 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6984 /* Mark it as expanded. */
6985 TREE_OPERAND (exp, 1) = NULL_TREE;
6986
6987 store_expr (exp1, target, 0);
6988
6989 if (cleanups)
6990 {
6991 cleanups_this_call = tree_cons (NULL_TREE,
6992 cleanups,
6993 cleanups_this_call);
6994 expand_eh_region_start ();
6995 }
6996
6997 return target;
6998 }
6999
7000 case INIT_EXPR:
7001 {
7002 tree lhs = TREE_OPERAND (exp, 0);
7003 tree rhs = TREE_OPERAND (exp, 1);
7004 tree noncopied_parts = 0;
7005 tree lhs_type = TREE_TYPE (lhs);
7006
7007 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7008 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7009 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7010 TYPE_NONCOPIED_PARTS (lhs_type));
7011 while (noncopied_parts != 0)
7012 {
7013 expand_assignment (TREE_VALUE (noncopied_parts),
7014 TREE_PURPOSE (noncopied_parts), 0, 0);
7015 noncopied_parts = TREE_CHAIN (noncopied_parts);
7016 }
7017 return temp;
7018 }
7019
7020 case MODIFY_EXPR:
7021 {
7022 /* If lhs is complex, expand calls in rhs before computing it.
7023 That's so we don't compute a pointer and save it over a call.
7024 If lhs is simple, compute it first so we can give it as a
7025 target if the rhs is just a call. This avoids an extra temp and copy
7026 and that prevents a partial-subsumption which makes bad code.
7027 Actually we could treat component_ref's of vars like vars. */
7028
7029 tree lhs = TREE_OPERAND (exp, 0);
7030 tree rhs = TREE_OPERAND (exp, 1);
7031 tree noncopied_parts = 0;
7032 tree lhs_type = TREE_TYPE (lhs);
7033
7034 temp = 0;
7035
7036 if (TREE_CODE (lhs) != VAR_DECL
7037 && TREE_CODE (lhs) != RESULT_DECL
7038 && TREE_CODE (lhs) != PARM_DECL)
7039 preexpand_calls (exp);
7040
7041 /* Check for |= or &= of a bitfield of size one into another bitfield
7042 of size 1. In this case, (unless we need the result of the
7043 assignment) we can do this more efficiently with a
7044 test followed by an assignment, if necessary.
7045
7046 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7047 things change so we do, this code should be enhanced to
7048 support it. */
7049 if (ignore
7050 && TREE_CODE (lhs) == COMPONENT_REF
7051 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7052 || TREE_CODE (rhs) == BIT_AND_EXPR)
7053 && TREE_OPERAND (rhs, 0) == lhs
7054 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7055 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7056 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7057 {
7058 rtx label = gen_label_rtx ();
7059
7060 do_jump (TREE_OPERAND (rhs, 1),
7061 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7062 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7063 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7064 (TREE_CODE (rhs) == BIT_IOR_EXPR
7065 ? integer_one_node
7066 : integer_zero_node)),
7067 0, 0);
7068 do_pending_stack_adjust ();
7069 emit_label (label);
7070 return const0_rtx;
7071 }
7072
7073 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7074 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7075 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7076 TYPE_NONCOPIED_PARTS (lhs_type));
7077
7078 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7079 while (noncopied_parts != 0)
7080 {
7081 expand_assignment (TREE_PURPOSE (noncopied_parts),
7082 TREE_VALUE (noncopied_parts), 0, 0);
7083 noncopied_parts = TREE_CHAIN (noncopied_parts);
7084 }
7085 return temp;
7086 }
7087
7088 case PREINCREMENT_EXPR:
7089 case PREDECREMENT_EXPR:
7090 return expand_increment (exp, 0, ignore);
7091
7092 case POSTINCREMENT_EXPR:
7093 case POSTDECREMENT_EXPR:
7094 /* Faster to treat as pre-increment if result is not used. */
7095 return expand_increment (exp, ! ignore, ignore);
7096
7097 case ADDR_EXPR:
7098 /* If nonzero, TEMP will be set to the address of something that might
7099 be a MEM corresponding to a stack slot. */
7100 temp = 0;
7101
7102 /* Are we taking the address of a nested function? */
7103 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7104 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7105 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7106 {
7107 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7108 op0 = force_operand (op0, target);
7109 }
7110 /* If we are taking the address of something erroneous, just
7111 return a zero. */
7112 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7113 return const0_rtx;
7114 else
7115 {
7116 /* We make sure to pass const0_rtx down if we came in with
7117 ignore set, to avoid doing the cleanups twice for something. */
7118 op0 = expand_expr (TREE_OPERAND (exp, 0),
7119 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7120 (modifier == EXPAND_INITIALIZER
7121 ? modifier : EXPAND_CONST_ADDRESS));
7122
7123 /* If we are going to ignore the result, OP0 will have been set
7124 to const0_rtx, so just return it. Don't get confused and
7125 think we are taking the address of the constant. */
7126 if (ignore)
7127 return op0;
7128
7129 op0 = protect_from_queue (op0, 0);
7130
7131 /* We would like the object in memory. If it is a constant,
7132 we can have it be statically allocated into memory. For
7133 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7134 memory and store the value into it. */
7135
7136 if (CONSTANT_P (op0))
7137 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7138 op0);
7139 else if (GET_CODE (op0) == MEM)
7140 {
7141 mark_temp_addr_taken (op0);
7142 temp = XEXP (op0, 0);
7143 }
7144
7145 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7146 || GET_CODE (op0) == CONCAT)
7147 {
7148 /* If this object is in a register, it must be not
7149 be BLKmode. */
7150 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7151 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7152
7153 mark_temp_addr_taken (memloc);
7154 emit_move_insn (memloc, op0);
7155 op0 = memloc;
7156 }
7157
7158 if (GET_CODE (op0) != MEM)
7159 abort ();
7160
7161 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7162 {
7163 temp = XEXP (op0, 0);
7164 #ifdef POINTERS_EXTEND_UNSIGNED
7165 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7166 && mode == ptr_mode)
7167 temp = convert_memory_address (ptr_mode, temp);
7168 #endif
7169 return temp;
7170 }
7171
7172 op0 = force_operand (XEXP (op0, 0), target);
7173 }
7174
7175 if (flag_force_addr && GET_CODE (op0) != REG)
7176 op0 = force_reg (Pmode, op0);
7177
7178 if (GET_CODE (op0) == REG
7179 && ! REG_USERVAR_P (op0))
7180 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7181
7182 /* If we might have had a temp slot, add an equivalent address
7183 for it. */
7184 if (temp != 0)
7185 update_temp_slot_address (temp, op0);
7186
7187 #ifdef POINTERS_EXTEND_UNSIGNED
7188 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7189 && mode == ptr_mode)
7190 op0 = convert_memory_address (ptr_mode, op0);
7191 #endif
7192
7193 return op0;
7194
7195 case ENTRY_VALUE_EXPR:
7196 abort ();
7197
7198 /* COMPLEX type for Extended Pascal & Fortran */
7199 case COMPLEX_EXPR:
7200 {
7201 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7202 rtx insns;
7203
7204 /* Get the rtx code of the operands. */
7205 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7206 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7207
7208 if (! target)
7209 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7210
7211 start_sequence ();
7212
7213 /* Move the real (op0) and imaginary (op1) parts to their location. */
7214 emit_move_insn (gen_realpart (mode, target), op0);
7215 emit_move_insn (gen_imagpart (mode, target), op1);
7216
7217 insns = get_insns ();
7218 end_sequence ();
7219
7220 /* Complex construction should appear as a single unit. */
7221 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7222 each with a separate pseudo as destination.
7223 It's not correct for flow to treat them as a unit. */
7224 if (GET_CODE (target) != CONCAT)
7225 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7226 else
7227 emit_insns (insns);
7228
7229 return target;
7230 }
7231
7232 case REALPART_EXPR:
7233 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7234 return gen_realpart (mode, op0);
7235
7236 case IMAGPART_EXPR:
7237 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7238 return gen_imagpart (mode, op0);
7239
7240 case CONJ_EXPR:
7241 {
7242 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7243 rtx imag_t;
7244 rtx insns;
7245
7246 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7247
7248 if (! target)
7249 target = gen_reg_rtx (mode);
7250
7251 start_sequence ();
7252
7253 /* Store the realpart and the negated imagpart to target. */
7254 emit_move_insn (gen_realpart (partmode, target),
7255 gen_realpart (partmode, op0));
7256
7257 imag_t = gen_imagpart (partmode, target);
7258 temp = expand_unop (partmode, neg_optab,
7259 gen_imagpart (partmode, op0), imag_t, 0);
7260 if (temp != imag_t)
7261 emit_move_insn (imag_t, temp);
7262
7263 insns = get_insns ();
7264 end_sequence ();
7265
7266 /* Conjugate should appear as a single unit
7267 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7268 each with a separate pseudo as destination.
7269 It's not correct for flow to treat them as a unit. */
7270 if (GET_CODE (target) != CONCAT)
7271 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7272 else
7273 emit_insns (insns);
7274
7275 return target;
7276 }
7277
7278 case ERROR_MARK:
7279 op0 = CONST0_RTX (tmode);
7280 if (op0 != 0)
7281 return op0;
7282 return const0_rtx;
7283
7284 default:
7285 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7286 }
7287
7288 /* Here to do an ordinary binary operator, generating an instruction
7289 from the optab already placed in `this_optab'. */
7290 binop:
7291 preexpand_calls (exp);
7292 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7293 subtarget = 0;
7294 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7295 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7296 binop2:
7297 temp = expand_binop (mode, this_optab, op0, op1, target,
7298 unsignedp, OPTAB_LIB_WIDEN);
7299 if (temp == 0)
7300 abort ();
7301 return temp;
7302 }
7303
7304
7305 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7306
7307 void
7308 bc_expand_expr (exp)
7309 tree exp;
7310 {
7311 enum tree_code code;
7312 tree type, arg0;
7313 rtx r;
7314 struct binary_operator *binoptab;
7315 struct unary_operator *unoptab;
7316 struct increment_operator *incroptab;
7317 struct bc_label *lab, *lab1;
7318 enum bytecode_opcode opcode;
7319
7320
7321 code = TREE_CODE (exp);
7322
7323 switch (code)
7324 {
7325 case PARM_DECL:
7326
7327 if (DECL_RTL (exp) == 0)
7328 {
7329 error_with_decl (exp, "prior parameter's size depends on `%s'");
7330 return;
7331 }
7332
7333 bc_load_parmaddr (DECL_RTL (exp));
7334 bc_load_memory (TREE_TYPE (exp), exp);
7335
7336 return;
7337
7338 case VAR_DECL:
7339
7340 if (DECL_RTL (exp) == 0)
7341 abort ();
7342
7343 #if 0
7344 if (BYTECODE_LABEL (DECL_RTL (exp)))
7345 bc_load_externaddr (DECL_RTL (exp));
7346 else
7347 bc_load_localaddr (DECL_RTL (exp));
7348 #endif
7349 if (TREE_PUBLIC (exp))
7350 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7351 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7352 else
7353 bc_load_localaddr (DECL_RTL (exp));
7354
7355 bc_load_memory (TREE_TYPE (exp), exp);
7356 return;
7357
7358 case INTEGER_CST:
7359
7360 #ifdef DEBUG_PRINT_CODE
7361 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7362 #endif
7363 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7364 ? SImode
7365 : TYPE_MODE (TREE_TYPE (exp)))],
7366 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7367 return;
7368
7369 case REAL_CST:
7370
7371 #if 0
7372 #ifdef DEBUG_PRINT_CODE
7373 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7374 #endif
7375 /* FIX THIS: find a better way to pass real_cst's. -bson */
7376 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7377 (double) TREE_REAL_CST (exp));
7378 #else
7379 abort ();
7380 #endif
7381
7382 return;
7383
7384 case CALL_EXPR:
7385
7386 /* We build a call description vector describing the type of
7387 the return value and of the arguments; this call vector,
7388 together with a pointer to a location for the return value
7389 and the base of the argument list, is passed to the low
7390 level machine dependent call subroutine, which is responsible
7391 for putting the arguments wherever real functions expect
7392 them, as well as getting the return value back. */
7393 {
7394 tree calldesc = 0, arg;
7395 int nargs = 0, i;
7396 rtx retval;
7397
7398 /* Push the evaluated args on the evaluation stack in reverse
7399 order. Also make an entry for each arg in the calldesc
7400 vector while we're at it. */
7401
7402 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7403
7404 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7405 {
7406 ++nargs;
7407 bc_expand_expr (TREE_VALUE (arg));
7408
7409 calldesc = tree_cons ((tree) 0,
7410 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7411 calldesc);
7412 calldesc = tree_cons ((tree) 0,
7413 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7414 calldesc);
7415 }
7416
7417 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7418
7419 /* Allocate a location for the return value and push its
7420 address on the evaluation stack. Also make an entry
7421 at the front of the calldesc for the return value type. */
7422
7423 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7424 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7425 bc_load_localaddr (retval);
7426
7427 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7428 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7429
7430 /* Prepend the argument count. */
7431 calldesc = tree_cons ((tree) 0,
7432 build_int_2 (nargs, 0),
7433 calldesc);
7434
7435 /* Push the address of the call description vector on the stack. */
7436 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7437 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7438 build_index_type (build_int_2 (nargs * 2, 0)));
7439 r = output_constant_def (calldesc);
7440 bc_load_externaddr (r);
7441
7442 /* Push the address of the function to be called. */
7443 bc_expand_expr (TREE_OPERAND (exp, 0));
7444
7445 /* Call the function, popping its address and the calldesc vector
7446 address off the evaluation stack in the process. */
7447 bc_emit_instruction (call);
7448
7449 /* Pop the arguments off the stack. */
7450 bc_adjust_stack (nargs);
7451
7452 /* Load the return value onto the stack. */
7453 bc_load_localaddr (retval);
7454 bc_load_memory (type, TREE_OPERAND (exp, 0));
7455 }
7456 return;
7457
7458 case SAVE_EXPR:
7459
7460 if (!SAVE_EXPR_RTL (exp))
7461 {
7462 /* First time around: copy to local variable */
7463 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7464 TYPE_ALIGN (TREE_TYPE(exp)));
7465 bc_expand_expr (TREE_OPERAND (exp, 0));
7466 bc_emit_instruction (duplicate);
7467
7468 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7469 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7470 }
7471 else
7472 {
7473 /* Consecutive reference: use saved copy */
7474 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7475 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7476 }
7477 return;
7478
7479 #if 0
7480 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7481 how are they handled instead? */
7482 case LET_STMT:
7483
7484 TREE_USED (exp) = 1;
7485 bc_expand_expr (STMT_BODY (exp));
7486 return;
7487 #endif
7488
7489 case NOP_EXPR:
7490 case CONVERT_EXPR:
7491
7492 bc_expand_expr (TREE_OPERAND (exp, 0));
7493 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7494 return;
7495
7496 case MODIFY_EXPR:
7497
7498 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7499 return;
7500
7501 case ADDR_EXPR:
7502
7503 bc_expand_address (TREE_OPERAND (exp, 0));
7504 return;
7505
7506 case INDIRECT_REF:
7507
7508 bc_expand_expr (TREE_OPERAND (exp, 0));
7509 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7510 return;
7511
7512 case ARRAY_REF:
7513
7514 bc_expand_expr (bc_canonicalize_array_ref (exp));
7515 return;
7516
7517 case COMPONENT_REF:
7518
7519 bc_expand_component_address (exp);
7520
7521 /* If we have a bitfield, generate a proper load */
7522 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7523 return;
7524
7525 case COMPOUND_EXPR:
7526
7527 bc_expand_expr (TREE_OPERAND (exp, 0));
7528 bc_emit_instruction (drop);
7529 bc_expand_expr (TREE_OPERAND (exp, 1));
7530 return;
7531
7532 case COND_EXPR:
7533
7534 bc_expand_expr (TREE_OPERAND (exp, 0));
7535 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7536 lab = bc_get_bytecode_label ();
7537 bc_emit_bytecode (xjumpifnot);
7538 bc_emit_bytecode_labelref (lab);
7539
7540 #ifdef DEBUG_PRINT_CODE
7541 fputc ('\n', stderr);
7542 #endif
7543 bc_expand_expr (TREE_OPERAND (exp, 1));
7544 lab1 = bc_get_bytecode_label ();
7545 bc_emit_bytecode (jump);
7546 bc_emit_bytecode_labelref (lab1);
7547
7548 #ifdef DEBUG_PRINT_CODE
7549 fputc ('\n', stderr);
7550 #endif
7551
7552 bc_emit_bytecode_labeldef (lab);
7553 bc_expand_expr (TREE_OPERAND (exp, 2));
7554 bc_emit_bytecode_labeldef (lab1);
7555 return;
7556
7557 case TRUTH_ANDIF_EXPR:
7558
7559 opcode = xjumpifnot;
7560 goto andorif;
7561
7562 case TRUTH_ORIF_EXPR:
7563
7564 opcode = xjumpif;
7565 goto andorif;
7566
7567 case PLUS_EXPR:
7568
7569 binoptab = optab_plus_expr;
7570 goto binop;
7571
7572 case MINUS_EXPR:
7573
7574 binoptab = optab_minus_expr;
7575 goto binop;
7576
7577 case MULT_EXPR:
7578
7579 binoptab = optab_mult_expr;
7580 goto binop;
7581
7582 case TRUNC_DIV_EXPR:
7583 case FLOOR_DIV_EXPR:
7584 case CEIL_DIV_EXPR:
7585 case ROUND_DIV_EXPR:
7586 case EXACT_DIV_EXPR:
7587
7588 binoptab = optab_trunc_div_expr;
7589 goto binop;
7590
7591 case TRUNC_MOD_EXPR:
7592 case FLOOR_MOD_EXPR:
7593 case CEIL_MOD_EXPR:
7594 case ROUND_MOD_EXPR:
7595
7596 binoptab = optab_trunc_mod_expr;
7597 goto binop;
7598
7599 case FIX_ROUND_EXPR:
7600 case FIX_FLOOR_EXPR:
7601 case FIX_CEIL_EXPR:
7602 abort (); /* Not used for C. */
7603
7604 case FIX_TRUNC_EXPR:
7605 case FLOAT_EXPR:
7606 case MAX_EXPR:
7607 case MIN_EXPR:
7608 case FFS_EXPR:
7609 case LROTATE_EXPR:
7610 case RROTATE_EXPR:
7611 abort (); /* FIXME */
7612
7613 case RDIV_EXPR:
7614
7615 binoptab = optab_rdiv_expr;
7616 goto binop;
7617
7618 case BIT_AND_EXPR:
7619
7620 binoptab = optab_bit_and_expr;
7621 goto binop;
7622
7623 case BIT_IOR_EXPR:
7624
7625 binoptab = optab_bit_ior_expr;
7626 goto binop;
7627
7628 case BIT_XOR_EXPR:
7629
7630 binoptab = optab_bit_xor_expr;
7631 goto binop;
7632
7633 case LSHIFT_EXPR:
7634
7635 binoptab = optab_lshift_expr;
7636 goto binop;
7637
7638 case RSHIFT_EXPR:
7639
7640 binoptab = optab_rshift_expr;
7641 goto binop;
7642
7643 case TRUTH_AND_EXPR:
7644
7645 binoptab = optab_truth_and_expr;
7646 goto binop;
7647
7648 case TRUTH_OR_EXPR:
7649
7650 binoptab = optab_truth_or_expr;
7651 goto binop;
7652
7653 case LT_EXPR:
7654
7655 binoptab = optab_lt_expr;
7656 goto binop;
7657
7658 case LE_EXPR:
7659
7660 binoptab = optab_le_expr;
7661 goto binop;
7662
7663 case GE_EXPR:
7664
7665 binoptab = optab_ge_expr;
7666 goto binop;
7667
7668 case GT_EXPR:
7669
7670 binoptab = optab_gt_expr;
7671 goto binop;
7672
7673 case EQ_EXPR:
7674
7675 binoptab = optab_eq_expr;
7676 goto binop;
7677
7678 case NE_EXPR:
7679
7680 binoptab = optab_ne_expr;
7681 goto binop;
7682
7683 case NEGATE_EXPR:
7684
7685 unoptab = optab_negate_expr;
7686 goto unop;
7687
7688 case BIT_NOT_EXPR:
7689
7690 unoptab = optab_bit_not_expr;
7691 goto unop;
7692
7693 case TRUTH_NOT_EXPR:
7694
7695 unoptab = optab_truth_not_expr;
7696 goto unop;
7697
7698 case PREDECREMENT_EXPR:
7699
7700 incroptab = optab_predecrement_expr;
7701 goto increment;
7702
7703 case PREINCREMENT_EXPR:
7704
7705 incroptab = optab_preincrement_expr;
7706 goto increment;
7707
7708 case POSTDECREMENT_EXPR:
7709
7710 incroptab = optab_postdecrement_expr;
7711 goto increment;
7712
7713 case POSTINCREMENT_EXPR:
7714
7715 incroptab = optab_postincrement_expr;
7716 goto increment;
7717
7718 case CONSTRUCTOR:
7719
7720 bc_expand_constructor (exp);
7721 return;
7722
7723 case ERROR_MARK:
7724 case RTL_EXPR:
7725
7726 return;
7727
7728 case BIND_EXPR:
7729 {
7730 tree vars = TREE_OPERAND (exp, 0);
7731 int vars_need_expansion = 0;
7732
7733 /* Need to open a binding contour here because
7734 if there are any cleanups they most be contained here. */
7735 expand_start_bindings (0);
7736
7737 /* Mark the corresponding BLOCK for output. */
7738 if (TREE_OPERAND (exp, 2) != 0)
7739 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7740
7741 /* If VARS have not yet been expanded, expand them now. */
7742 while (vars)
7743 {
7744 if (DECL_RTL (vars) == 0)
7745 {
7746 vars_need_expansion = 1;
7747 expand_decl (vars);
7748 }
7749 expand_decl_init (vars);
7750 vars = TREE_CHAIN (vars);
7751 }
7752
7753 bc_expand_expr (TREE_OPERAND (exp, 1));
7754
7755 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7756
7757 return;
7758 }
7759 }
7760
7761 abort ();
7762
7763 binop:
7764
7765 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7766 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7767 return;
7768
7769
7770 unop:
7771
7772 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7773 return;
7774
7775
7776 andorif:
7777
7778 bc_expand_expr (TREE_OPERAND (exp, 0));
7779 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7780 lab = bc_get_bytecode_label ();
7781
7782 bc_emit_instruction (duplicate);
7783 bc_emit_bytecode (opcode);
7784 bc_emit_bytecode_labelref (lab);
7785
7786 #ifdef DEBUG_PRINT_CODE
7787 fputc ('\n', stderr);
7788 #endif
7789
7790 bc_emit_instruction (drop);
7791
7792 bc_expand_expr (TREE_OPERAND (exp, 1));
7793 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7794 bc_emit_bytecode_labeldef (lab);
7795 return;
7796
7797
7798 increment:
7799
7800 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7801
7802 /* Push the quantum. */
7803 bc_expand_expr (TREE_OPERAND (exp, 1));
7804
7805 /* Convert it to the lvalue's type. */
7806 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7807
7808 /* Push the address of the lvalue */
7809 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7810
7811 /* Perform actual increment */
7812 bc_expand_increment (incroptab, type);
7813 return;
7814 }
7815 \f
7816 /* Return the alignment in bits of EXP, a pointer valued expression.
7817 But don't return more than MAX_ALIGN no matter what.
7818 The alignment returned is, by default, the alignment of the thing that
7819 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7820
7821 Otherwise, look at the expression to see if we can do better, i.e., if the
7822 expression is actually pointing at an object whose alignment is tighter. */
7823
7824 static int
7825 get_pointer_alignment (exp, max_align)
7826 tree exp;
7827 unsigned max_align;
7828 {
7829 unsigned align, inner;
7830
7831 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7832 return 0;
7833
7834 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7835 align = MIN (align, max_align);
7836
7837 while (1)
7838 {
7839 switch (TREE_CODE (exp))
7840 {
7841 case NOP_EXPR:
7842 case CONVERT_EXPR:
7843 case NON_LVALUE_EXPR:
7844 exp = TREE_OPERAND (exp, 0);
7845 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7846 return align;
7847 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7848 align = MIN (inner, max_align);
7849 break;
7850
7851 case PLUS_EXPR:
7852 /* If sum of pointer + int, restrict our maximum alignment to that
7853 imposed by the integer. If not, we can't do any better than
7854 ALIGN. */
7855 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7856 return align;
7857
7858 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7859 & (max_align - 1))
7860 != 0)
7861 max_align >>= 1;
7862
7863 exp = TREE_OPERAND (exp, 0);
7864 break;
7865
7866 case ADDR_EXPR:
7867 /* See what we are pointing at and look at its alignment. */
7868 exp = TREE_OPERAND (exp, 0);
7869 if (TREE_CODE (exp) == FUNCTION_DECL)
7870 align = FUNCTION_BOUNDARY;
7871 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7872 align = DECL_ALIGN (exp);
7873 #ifdef CONSTANT_ALIGNMENT
7874 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7875 align = CONSTANT_ALIGNMENT (exp, align);
7876 #endif
7877 return MIN (align, max_align);
7878
7879 default:
7880 return align;
7881 }
7882 }
7883 }
7884 \f
7885 /* Return the tree node and offset if a given argument corresponds to
7886 a string constant. */
7887
7888 static tree
7889 string_constant (arg, ptr_offset)
7890 tree arg;
7891 tree *ptr_offset;
7892 {
7893 STRIP_NOPS (arg);
7894
7895 if (TREE_CODE (arg) == ADDR_EXPR
7896 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7897 {
7898 *ptr_offset = integer_zero_node;
7899 return TREE_OPERAND (arg, 0);
7900 }
7901 else if (TREE_CODE (arg) == PLUS_EXPR)
7902 {
7903 tree arg0 = TREE_OPERAND (arg, 0);
7904 tree arg1 = TREE_OPERAND (arg, 1);
7905
7906 STRIP_NOPS (arg0);
7907 STRIP_NOPS (arg1);
7908
7909 if (TREE_CODE (arg0) == ADDR_EXPR
7910 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7911 {
7912 *ptr_offset = arg1;
7913 return TREE_OPERAND (arg0, 0);
7914 }
7915 else if (TREE_CODE (arg1) == ADDR_EXPR
7916 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7917 {
7918 *ptr_offset = arg0;
7919 return TREE_OPERAND (arg1, 0);
7920 }
7921 }
7922
7923 return 0;
7924 }
7925
7926 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7927 way, because it could contain a zero byte in the middle.
7928 TREE_STRING_LENGTH is the size of the character array, not the string.
7929
7930 Unfortunately, string_constant can't access the values of const char
7931 arrays with initializers, so neither can we do so here. */
7932
7933 static tree
7934 c_strlen (src)
7935 tree src;
7936 {
7937 tree offset_node;
7938 int offset, max;
7939 char *ptr;
7940
7941 src = string_constant (src, &offset_node);
7942 if (src == 0)
7943 return 0;
7944 max = TREE_STRING_LENGTH (src);
7945 ptr = TREE_STRING_POINTER (src);
7946 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7947 {
7948 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7949 compute the offset to the following null if we don't know where to
7950 start searching for it. */
7951 int i;
7952 for (i = 0; i < max; i++)
7953 if (ptr[i] == 0)
7954 return 0;
7955 /* We don't know the starting offset, but we do know that the string
7956 has no internal zero bytes. We can assume that the offset falls
7957 within the bounds of the string; otherwise, the programmer deserves
7958 what he gets. Subtract the offset from the length of the string,
7959 and return that. */
7960 /* This would perhaps not be valid if we were dealing with named
7961 arrays in addition to literal string constants. */
7962 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7963 }
7964
7965 /* We have a known offset into the string. Start searching there for
7966 a null character. */
7967 if (offset_node == 0)
7968 offset = 0;
7969 else
7970 {
7971 /* Did we get a long long offset? If so, punt. */
7972 if (TREE_INT_CST_HIGH (offset_node) != 0)
7973 return 0;
7974 offset = TREE_INT_CST_LOW (offset_node);
7975 }
7976 /* If the offset is known to be out of bounds, warn, and call strlen at
7977 runtime. */
7978 if (offset < 0 || offset > max)
7979 {
7980 warning ("offset outside bounds of constant string");
7981 return 0;
7982 }
7983 /* Use strlen to search for the first zero byte. Since any strings
7984 constructed with build_string will have nulls appended, we win even
7985 if we get handed something like (char[4])"abcd".
7986
7987 Since OFFSET is our starting index into the string, no further
7988 calculation is needed. */
7989 return size_int (strlen (ptr + offset));
7990 }
7991
7992 rtx
7993 expand_builtin_return_addr (fndecl_code, count, tem)
7994 enum built_in_function fndecl_code;
7995 int count;
7996 rtx tem;
7997 {
7998 int i;
7999
8000 /* Some machines need special handling before we can access
8001 arbitrary frames. For example, on the sparc, we must first flush
8002 all register windows to the stack. */
8003 #ifdef SETUP_FRAME_ADDRESSES
8004 SETUP_FRAME_ADDRESSES ();
8005 #endif
8006
8007 /* On the sparc, the return address is not in the frame, it is in a
8008 register. There is no way to access it off of the current frame
8009 pointer, but it can be accessed off the previous frame pointer by
8010 reading the value from the register window save area. */
8011 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8012 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8013 count--;
8014 #endif
8015
8016 /* Scan back COUNT frames to the specified frame. */
8017 for (i = 0; i < count; i++)
8018 {
8019 /* Assume the dynamic chain pointer is in the word that the
8020 frame address points to, unless otherwise specified. */
8021 #ifdef DYNAMIC_CHAIN_ADDRESS
8022 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8023 #endif
8024 tem = memory_address (Pmode, tem);
8025 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8026 }
8027
8028 /* For __builtin_frame_address, return what we've got. */
8029 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8030 return tem;
8031
8032 /* For __builtin_return_address, Get the return address from that
8033 frame. */
8034 #ifdef RETURN_ADDR_RTX
8035 tem = RETURN_ADDR_RTX (count, tem);
8036 #else
8037 tem = memory_address (Pmode,
8038 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8039 tem = gen_rtx (MEM, Pmode, tem);
8040 #endif
8041 return tem;
8042 }
8043 \f
8044 /* Expand an expression EXP that calls a built-in function,
8045 with result going to TARGET if that's convenient
8046 (and in mode MODE if that's convenient).
8047 SUBTARGET may be used as the target for computing one of EXP's operands.
8048 IGNORE is nonzero if the value is to be ignored. */
8049
8050 #define CALLED_AS_BUILT_IN(NODE) \
8051 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8052
8053 static rtx
8054 expand_builtin (exp, target, subtarget, mode, ignore)
8055 tree exp;
8056 rtx target;
8057 rtx subtarget;
8058 enum machine_mode mode;
8059 int ignore;
8060 {
8061 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8062 tree arglist = TREE_OPERAND (exp, 1);
8063 rtx op0;
8064 rtx lab1, insns;
8065 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8066 optab builtin_optab;
8067
8068 switch (DECL_FUNCTION_CODE (fndecl))
8069 {
8070 case BUILT_IN_ABS:
8071 case BUILT_IN_LABS:
8072 case BUILT_IN_FABS:
8073 /* build_function_call changes these into ABS_EXPR. */
8074 abort ();
8075
8076 case BUILT_IN_SIN:
8077 case BUILT_IN_COS:
8078 /* Treat these like sqrt, but only if the user asks for them. */
8079 if (! flag_fast_math)
8080 break;
8081 case BUILT_IN_FSQRT:
8082 /* If not optimizing, call the library function. */
8083 if (! optimize)
8084 break;
8085
8086 if (arglist == 0
8087 /* Arg could be wrong type if user redeclared this fcn wrong. */
8088 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8089 break;
8090
8091 /* Stabilize and compute the argument. */
8092 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8093 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8094 {
8095 exp = copy_node (exp);
8096 arglist = copy_node (arglist);
8097 TREE_OPERAND (exp, 1) = arglist;
8098 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8099 }
8100 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8101
8102 /* Make a suitable register to place result in. */
8103 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8104
8105 emit_queue ();
8106 start_sequence ();
8107
8108 switch (DECL_FUNCTION_CODE (fndecl))
8109 {
8110 case BUILT_IN_SIN:
8111 builtin_optab = sin_optab; break;
8112 case BUILT_IN_COS:
8113 builtin_optab = cos_optab; break;
8114 case BUILT_IN_FSQRT:
8115 builtin_optab = sqrt_optab; break;
8116 default:
8117 abort ();
8118 }
8119
8120 /* Compute into TARGET.
8121 Set TARGET to wherever the result comes back. */
8122 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8123 builtin_optab, op0, target, 0);
8124
8125 /* If we were unable to expand via the builtin, stop the
8126 sequence (without outputting the insns) and break, causing
8127 a call the the library function. */
8128 if (target == 0)
8129 {
8130 end_sequence ();
8131 break;
8132 }
8133
8134 /* Check the results by default. But if flag_fast_math is turned on,
8135 then assume sqrt will always be called with valid arguments. */
8136
8137 if (! flag_fast_math)
8138 {
8139 /* Don't define the builtin FP instructions
8140 if your machine is not IEEE. */
8141 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8142 abort ();
8143
8144 lab1 = gen_label_rtx ();
8145
8146 /* Test the result; if it is NaN, set errno=EDOM because
8147 the argument was not in the domain. */
8148 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8149 emit_jump_insn (gen_beq (lab1));
8150
8151 #ifdef TARGET_EDOM
8152 {
8153 #ifdef GEN_ERRNO_RTX
8154 rtx errno_rtx = GEN_ERRNO_RTX;
8155 #else
8156 rtx errno_rtx
8157 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8158 #endif
8159
8160 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8161 }
8162 #else
8163 /* We can't set errno=EDOM directly; let the library call do it.
8164 Pop the arguments right away in case the call gets deleted. */
8165 NO_DEFER_POP;
8166 expand_call (exp, target, 0);
8167 OK_DEFER_POP;
8168 #endif
8169
8170 emit_label (lab1);
8171 }
8172
8173 /* Output the entire sequence. */
8174 insns = get_insns ();
8175 end_sequence ();
8176 emit_insns (insns);
8177
8178 return target;
8179
8180 /* __builtin_apply_args returns block of memory allocated on
8181 the stack into which is stored the arg pointer, structure
8182 value address, static chain, and all the registers that might
8183 possibly be used in performing a function call. The code is
8184 moved to the start of the function so the incoming values are
8185 saved. */
8186 case BUILT_IN_APPLY_ARGS:
8187 /* Don't do __builtin_apply_args more than once in a function.
8188 Save the result of the first call and reuse it. */
8189 if (apply_args_value != 0)
8190 return apply_args_value;
8191 {
8192 /* When this function is called, it means that registers must be
8193 saved on entry to this function. So we migrate the
8194 call to the first insn of this function. */
8195 rtx temp;
8196 rtx seq;
8197
8198 start_sequence ();
8199 temp = expand_builtin_apply_args ();
8200 seq = get_insns ();
8201 end_sequence ();
8202
8203 apply_args_value = temp;
8204
8205 /* Put the sequence after the NOTE that starts the function.
8206 If this is inside a SEQUENCE, make the outer-level insn
8207 chain current, so the code is placed at the start of the
8208 function. */
8209 push_topmost_sequence ();
8210 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8211 pop_topmost_sequence ();
8212 return temp;
8213 }
8214
8215 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8216 FUNCTION with a copy of the parameters described by
8217 ARGUMENTS, and ARGSIZE. It returns a block of memory
8218 allocated on the stack into which is stored all the registers
8219 that might possibly be used for returning the result of a
8220 function. ARGUMENTS is the value returned by
8221 __builtin_apply_args. ARGSIZE is the number of bytes of
8222 arguments that must be copied. ??? How should this value be
8223 computed? We'll also need a safe worst case value for varargs
8224 functions. */
8225 case BUILT_IN_APPLY:
8226 if (arglist == 0
8227 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8228 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8229 || TREE_CHAIN (arglist) == 0
8230 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8231 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8232 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8233 return const0_rtx;
8234 else
8235 {
8236 int i;
8237 tree t;
8238 rtx ops[3];
8239
8240 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8241 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8242
8243 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8244 }
8245
8246 /* __builtin_return (RESULT) causes the function to return the
8247 value described by RESULT. RESULT is address of the block of
8248 memory returned by __builtin_apply. */
8249 case BUILT_IN_RETURN:
8250 if (arglist
8251 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8252 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8253 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8254 NULL_RTX, VOIDmode, 0));
8255 return const0_rtx;
8256
8257 case BUILT_IN_SAVEREGS:
8258 /* Don't do __builtin_saveregs more than once in a function.
8259 Save the result of the first call and reuse it. */
8260 if (saveregs_value != 0)
8261 return saveregs_value;
8262 {
8263 /* When this function is called, it means that registers must be
8264 saved on entry to this function. So we migrate the
8265 call to the first insn of this function. */
8266 rtx temp;
8267 rtx seq;
8268
8269 /* Now really call the function. `expand_call' does not call
8270 expand_builtin, so there is no danger of infinite recursion here. */
8271 start_sequence ();
8272
8273 #ifdef EXPAND_BUILTIN_SAVEREGS
8274 /* Do whatever the machine needs done in this case. */
8275 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8276 #else
8277 /* The register where the function returns its value
8278 is likely to have something else in it, such as an argument.
8279 So preserve that register around the call. */
8280
8281 if (value_mode != VOIDmode)
8282 {
8283 rtx valreg = hard_libcall_value (value_mode);
8284 rtx saved_valreg = gen_reg_rtx (value_mode);
8285
8286 emit_move_insn (saved_valreg, valreg);
8287 temp = expand_call (exp, target, ignore);
8288 emit_move_insn (valreg, saved_valreg);
8289 }
8290 else
8291 /* Generate the call, putting the value in a pseudo. */
8292 temp = expand_call (exp, target, ignore);
8293 #endif
8294
8295 seq = get_insns ();
8296 end_sequence ();
8297
8298 saveregs_value = temp;
8299
8300 /* Put the sequence after the NOTE that starts the function.
8301 If this is inside a SEQUENCE, make the outer-level insn
8302 chain current, so the code is placed at the start of the
8303 function. */
8304 push_topmost_sequence ();
8305 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8306 pop_topmost_sequence ();
8307 return temp;
8308 }
8309
8310 /* __builtin_args_info (N) returns word N of the arg space info
8311 for the current function. The number and meanings of words
8312 is controlled by the definition of CUMULATIVE_ARGS. */
8313 case BUILT_IN_ARGS_INFO:
8314 {
8315 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8316 int i;
8317 int *word_ptr = (int *) &current_function_args_info;
8318 tree type, elts, result;
8319
8320 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8321 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8322 __FILE__, __LINE__);
8323
8324 if (arglist != 0)
8325 {
8326 tree arg = TREE_VALUE (arglist);
8327 if (TREE_CODE (arg) != INTEGER_CST)
8328 error ("argument of `__builtin_args_info' must be constant");
8329 else
8330 {
8331 int wordnum = TREE_INT_CST_LOW (arg);
8332
8333 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8334 error ("argument of `__builtin_args_info' out of range");
8335 else
8336 return GEN_INT (word_ptr[wordnum]);
8337 }
8338 }
8339 else
8340 error ("missing argument in `__builtin_args_info'");
8341
8342 return const0_rtx;
8343
8344 #if 0
8345 for (i = 0; i < nwords; i++)
8346 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8347
8348 type = build_array_type (integer_type_node,
8349 build_index_type (build_int_2 (nwords, 0)));
8350 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8351 TREE_CONSTANT (result) = 1;
8352 TREE_STATIC (result) = 1;
8353 result = build (INDIRECT_REF, build_pointer_type (type), result);
8354 TREE_CONSTANT (result) = 1;
8355 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8356 #endif
8357 }
8358
8359 /* Return the address of the first anonymous stack arg. */
8360 case BUILT_IN_NEXT_ARG:
8361 {
8362 tree fntype = TREE_TYPE (current_function_decl);
8363
8364 if ((TYPE_ARG_TYPES (fntype) == 0
8365 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8366 == void_type_node))
8367 && ! current_function_varargs)
8368 {
8369 error ("`va_start' used in function with fixed args");
8370 return const0_rtx;
8371 }
8372
8373 if (arglist)
8374 {
8375 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8376 tree arg = TREE_VALUE (arglist);
8377
8378 /* Strip off all nops for the sake of the comparison. This
8379 is not quite the same as STRIP_NOPS. It does more.
8380 We must also strip off INDIRECT_EXPR for C++ reference
8381 parameters. */
8382 while (TREE_CODE (arg) == NOP_EXPR
8383 || TREE_CODE (arg) == CONVERT_EXPR
8384 || TREE_CODE (arg) == NON_LVALUE_EXPR
8385 || TREE_CODE (arg) == INDIRECT_REF)
8386 arg = TREE_OPERAND (arg, 0);
8387 if (arg != last_parm)
8388 warning ("second parameter of `va_start' not last named argument");
8389 }
8390 else if (! current_function_varargs)
8391 /* Evidently an out of date version of <stdarg.h>; can't validate
8392 va_start's second argument, but can still work as intended. */
8393 warning ("`__builtin_next_arg' called without an argument");
8394 }
8395
8396 return expand_binop (Pmode, add_optab,
8397 current_function_internal_arg_pointer,
8398 current_function_arg_offset_rtx,
8399 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8400
8401 case BUILT_IN_CLASSIFY_TYPE:
8402 if (arglist != 0)
8403 {
8404 tree type = TREE_TYPE (TREE_VALUE (arglist));
8405 enum tree_code code = TREE_CODE (type);
8406 if (code == VOID_TYPE)
8407 return GEN_INT (void_type_class);
8408 if (code == INTEGER_TYPE)
8409 return GEN_INT (integer_type_class);
8410 if (code == CHAR_TYPE)
8411 return GEN_INT (char_type_class);
8412 if (code == ENUMERAL_TYPE)
8413 return GEN_INT (enumeral_type_class);
8414 if (code == BOOLEAN_TYPE)
8415 return GEN_INT (boolean_type_class);
8416 if (code == POINTER_TYPE)
8417 return GEN_INT (pointer_type_class);
8418 if (code == REFERENCE_TYPE)
8419 return GEN_INT (reference_type_class);
8420 if (code == OFFSET_TYPE)
8421 return GEN_INT (offset_type_class);
8422 if (code == REAL_TYPE)
8423 return GEN_INT (real_type_class);
8424 if (code == COMPLEX_TYPE)
8425 return GEN_INT (complex_type_class);
8426 if (code == FUNCTION_TYPE)
8427 return GEN_INT (function_type_class);
8428 if (code == METHOD_TYPE)
8429 return GEN_INT (method_type_class);
8430 if (code == RECORD_TYPE)
8431 return GEN_INT (record_type_class);
8432 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8433 return GEN_INT (union_type_class);
8434 if (code == ARRAY_TYPE)
8435 {
8436 if (TYPE_STRING_FLAG (type))
8437 return GEN_INT (string_type_class);
8438 else
8439 return GEN_INT (array_type_class);
8440 }
8441 if (code == SET_TYPE)
8442 return GEN_INT (set_type_class);
8443 if (code == FILE_TYPE)
8444 return GEN_INT (file_type_class);
8445 if (code == LANG_TYPE)
8446 return GEN_INT (lang_type_class);
8447 }
8448 return GEN_INT (no_type_class);
8449
8450 case BUILT_IN_CONSTANT_P:
8451 if (arglist == 0)
8452 return const0_rtx;
8453 else
8454 {
8455 tree arg = TREE_VALUE (arglist);
8456
8457 STRIP_NOPS (arg);
8458 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8459 || (TREE_CODE (arg) == ADDR_EXPR
8460 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8461 ? const1_rtx : const0_rtx);
8462 }
8463
8464 case BUILT_IN_FRAME_ADDRESS:
8465 /* The argument must be a nonnegative integer constant.
8466 It counts the number of frames to scan up the stack.
8467 The value is the address of that frame. */
8468 case BUILT_IN_RETURN_ADDRESS:
8469 /* The argument must be a nonnegative integer constant.
8470 It counts the number of frames to scan up the stack.
8471 The value is the return address saved in that frame. */
8472 if (arglist == 0)
8473 /* Warning about missing arg was already issued. */
8474 return const0_rtx;
8475 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8476 {
8477 error ("invalid arg to `__builtin_return_address'");
8478 return const0_rtx;
8479 }
8480 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8481 {
8482 error ("invalid arg to `__builtin_return_address'");
8483 return const0_rtx;
8484 }
8485 else
8486 {
8487 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8488 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8489 hard_frame_pointer_rtx);
8490
8491 /* For __builtin_frame_address, return what we've got. */
8492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8493 return tem;
8494
8495 if (GET_CODE (tem) != REG)
8496 tem = copy_to_reg (tem);
8497 return tem;
8498 }
8499
8500 case BUILT_IN_ALLOCA:
8501 if (arglist == 0
8502 /* Arg could be non-integer if user redeclared this fcn wrong. */
8503 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8504 break;
8505
8506 /* Compute the argument. */
8507 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8508
8509 /* Allocate the desired space. */
8510 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8511
8512 case BUILT_IN_FFS:
8513 /* If not optimizing, call the library function. */
8514 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8515 break;
8516
8517 if (arglist == 0
8518 /* Arg could be non-integer if user redeclared this fcn wrong. */
8519 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8520 break;
8521
8522 /* Compute the argument. */
8523 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8524 /* Compute ffs, into TARGET if possible.
8525 Set TARGET to wherever the result comes back. */
8526 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8527 ffs_optab, op0, target, 1);
8528 if (target == 0)
8529 abort ();
8530 return target;
8531
8532 case BUILT_IN_STRLEN:
8533 /* If not optimizing, call the library function. */
8534 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8535 break;
8536
8537 if (arglist == 0
8538 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8539 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8540 break;
8541 else
8542 {
8543 tree src = TREE_VALUE (arglist);
8544 tree len = c_strlen (src);
8545
8546 int align
8547 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8548
8549 rtx result, src_rtx, char_rtx;
8550 enum machine_mode insn_mode = value_mode, char_mode;
8551 enum insn_code icode;
8552
8553 /* If the length is known, just return it. */
8554 if (len != 0)
8555 return expand_expr (len, target, mode, 0);
8556
8557 /* If SRC is not a pointer type, don't do this operation inline. */
8558 if (align == 0)
8559 break;
8560
8561 /* Call a function if we can't compute strlen in the right mode. */
8562
8563 while (insn_mode != VOIDmode)
8564 {
8565 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8566 if (icode != CODE_FOR_nothing)
8567 break;
8568
8569 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8570 }
8571 if (insn_mode == VOIDmode)
8572 break;
8573
8574 /* Make a place to write the result of the instruction. */
8575 result = target;
8576 if (! (result != 0
8577 && GET_CODE (result) == REG
8578 && GET_MODE (result) == insn_mode
8579 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8580 result = gen_reg_rtx (insn_mode);
8581
8582 /* Make sure the operands are acceptable to the predicates. */
8583
8584 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8585 result = gen_reg_rtx (insn_mode);
8586
8587 src_rtx = memory_address (BLKmode,
8588 expand_expr (src, NULL_RTX, ptr_mode,
8589 EXPAND_NORMAL));
8590 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8591 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8592
8593 char_rtx = const0_rtx;
8594 char_mode = insn_operand_mode[(int)icode][2];
8595 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8596 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8597
8598 emit_insn (GEN_FCN (icode) (result,
8599 gen_rtx (MEM, BLKmode, src_rtx),
8600 char_rtx, GEN_INT (align)));
8601
8602 /* Return the value in the proper mode for this function. */
8603 if (GET_MODE (result) == value_mode)
8604 return result;
8605 else if (target != 0)
8606 {
8607 convert_move (target, result, 0);
8608 return target;
8609 }
8610 else
8611 return convert_to_mode (value_mode, result, 0);
8612 }
8613
8614 case BUILT_IN_STRCPY:
8615 /* If not optimizing, call the library function. */
8616 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8617 break;
8618
8619 if (arglist == 0
8620 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8621 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8622 || TREE_CHAIN (arglist) == 0
8623 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8624 break;
8625 else
8626 {
8627 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8628
8629 if (len == 0)
8630 break;
8631
8632 len = size_binop (PLUS_EXPR, len, integer_one_node);
8633
8634 chainon (arglist, build_tree_list (NULL_TREE, len));
8635 }
8636
8637 /* Drops in. */
8638 case BUILT_IN_MEMCPY:
8639 /* If not optimizing, call the library function. */
8640 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8641 break;
8642
8643 if (arglist == 0
8644 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8645 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8646 || TREE_CHAIN (arglist) == 0
8647 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8648 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8649 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8650 break;
8651 else
8652 {
8653 tree dest = TREE_VALUE (arglist);
8654 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8655 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8656 tree type;
8657
8658 int src_align
8659 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8660 int dest_align
8661 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8662 rtx dest_rtx, dest_mem, src_mem;
8663
8664 /* If either SRC or DEST is not a pointer type, don't do
8665 this operation in-line. */
8666 if (src_align == 0 || dest_align == 0)
8667 {
8668 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8669 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8670 break;
8671 }
8672
8673 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8674 dest_mem = gen_rtx (MEM, BLKmode,
8675 memory_address (BLKmode, dest_rtx));
8676 /* There could be a void* cast on top of the object. */
8677 while (TREE_CODE (dest) == NOP_EXPR)
8678 dest = TREE_OPERAND (dest, 0);
8679 type = TREE_TYPE (TREE_TYPE (dest));
8680 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8681 src_mem = gen_rtx (MEM, BLKmode,
8682 memory_address (BLKmode,
8683 expand_expr (src, NULL_RTX,
8684 ptr_mode,
8685 EXPAND_SUM)));
8686 /* There could be a void* cast on top of the object. */
8687 while (TREE_CODE (src) == NOP_EXPR)
8688 src = TREE_OPERAND (src, 0);
8689 type = TREE_TYPE (TREE_TYPE (src));
8690 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8691
8692 /* Copy word part most expediently. */
8693 emit_block_move (dest_mem, src_mem,
8694 expand_expr (len, NULL_RTX, VOIDmode, 0),
8695 MIN (src_align, dest_align));
8696 return force_operand (dest_rtx, NULL_RTX);
8697 }
8698
8699 case BUILT_IN_MEMSET:
8700 /* If not optimizing, call the library function. */
8701 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8702 break;
8703
8704 if (arglist == 0
8705 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8706 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8707 || TREE_CHAIN (arglist) == 0
8708 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8709 != INTEGER_TYPE)
8710 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8711 || (INTEGER_CST
8712 != (TREE_CODE (TREE_TYPE
8713 (TREE_VALUE
8714 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8715 break;
8716 else
8717 {
8718 tree dest = TREE_VALUE (arglist);
8719 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8720 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8721 tree type;
8722
8723 int dest_align
8724 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8725 rtx dest_rtx, dest_mem;
8726
8727 /* If DEST is not a pointer type, don't do this
8728 operation in-line. */
8729 if (dest_align == 0)
8730 break;
8731
8732 /* If VAL is not 0, don't do this operation in-line. */
8733 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8734 break;
8735
8736 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8737 dest_mem = gen_rtx (MEM, BLKmode,
8738 memory_address (BLKmode, dest_rtx));
8739 /* There could be a void* cast on top of the object. */
8740 while (TREE_CODE (dest) == NOP_EXPR)
8741 dest = TREE_OPERAND (dest, 0);
8742 type = TREE_TYPE (TREE_TYPE (dest));
8743 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8744
8745 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8746 dest_align);
8747
8748 return force_operand (dest_rtx, NULL_RTX);
8749 }
8750
8751 /* These comparison functions need an instruction that returns an actual
8752 index. An ordinary compare that just sets the condition codes
8753 is not enough. */
8754 #ifdef HAVE_cmpstrsi
8755 case BUILT_IN_STRCMP:
8756 /* If not optimizing, call the library function. */
8757 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8758 break;
8759
8760 if (arglist == 0
8761 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8762 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8763 || TREE_CHAIN (arglist) == 0
8764 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8765 break;
8766 else if (!HAVE_cmpstrsi)
8767 break;
8768 {
8769 tree arg1 = TREE_VALUE (arglist);
8770 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8771 tree offset;
8772 tree len, len2;
8773
8774 len = c_strlen (arg1);
8775 if (len)
8776 len = size_binop (PLUS_EXPR, integer_one_node, len);
8777 len2 = c_strlen (arg2);
8778 if (len2)
8779 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8780
8781 /* If we don't have a constant length for the first, use the length
8782 of the second, if we know it. We don't require a constant for
8783 this case; some cost analysis could be done if both are available
8784 but neither is constant. For now, assume they're equally cheap.
8785
8786 If both strings have constant lengths, use the smaller. This
8787 could arise if optimization results in strcpy being called with
8788 two fixed strings, or if the code was machine-generated. We should
8789 add some code to the `memcmp' handler below to deal with such
8790 situations, someday. */
8791 if (!len || TREE_CODE (len) != INTEGER_CST)
8792 {
8793 if (len2)
8794 len = len2;
8795 else if (len == 0)
8796 break;
8797 }
8798 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8799 {
8800 if (tree_int_cst_lt (len2, len))
8801 len = len2;
8802 }
8803
8804 chainon (arglist, build_tree_list (NULL_TREE, len));
8805 }
8806
8807 /* Drops in. */
8808 case BUILT_IN_MEMCMP:
8809 /* If not optimizing, call the library function. */
8810 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8811 break;
8812
8813 if (arglist == 0
8814 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8815 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8816 || TREE_CHAIN (arglist) == 0
8817 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8818 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8819 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8820 break;
8821 else if (!HAVE_cmpstrsi)
8822 break;
8823 {
8824 tree arg1 = TREE_VALUE (arglist);
8825 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8826 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8827 rtx result;
8828
8829 int arg1_align
8830 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8831 int arg2_align
8832 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8833 enum machine_mode insn_mode
8834 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8835
8836 /* If we don't have POINTER_TYPE, call the function. */
8837 if (arg1_align == 0 || arg2_align == 0)
8838 {
8839 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8840 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8841 break;
8842 }
8843
8844 /* Make a place to write the result of the instruction. */
8845 result = target;
8846 if (! (result != 0
8847 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8848 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8849 result = gen_reg_rtx (insn_mode);
8850
8851 emit_insn (gen_cmpstrsi (result,
8852 gen_rtx (MEM, BLKmode,
8853 expand_expr (arg1, NULL_RTX,
8854 ptr_mode,
8855 EXPAND_NORMAL)),
8856 gen_rtx (MEM, BLKmode,
8857 expand_expr (arg2, NULL_RTX,
8858 ptr_mode,
8859 EXPAND_NORMAL)),
8860 expand_expr (len, NULL_RTX, VOIDmode, 0),
8861 GEN_INT (MIN (arg1_align, arg2_align))));
8862
8863 /* Return the value in the proper mode for this function. */
8864 mode = TYPE_MODE (TREE_TYPE (exp));
8865 if (GET_MODE (result) == mode)
8866 return result;
8867 else if (target != 0)
8868 {
8869 convert_move (target, result, 0);
8870 return target;
8871 }
8872 else
8873 return convert_to_mode (mode, result, 0);
8874 }
8875 #else
8876 case BUILT_IN_STRCMP:
8877 case BUILT_IN_MEMCMP:
8878 break;
8879 #endif
8880
8881 /* __builtin_setjmp is passed a pointer to an array of five words
8882 (not all will be used on all machines). It operates similarly to
8883 the C library function of the same name, but is more efficient.
8884 Much of the code below (and for longjmp) is copied from the handling
8885 of non-local gotos.
8886
8887 NOTE: This is intended for use by GNAT and will only work in
8888 the method used by it. This code will likely NOT survive to
8889 the GCC 2.8.0 release. */
8890 case BUILT_IN_SETJMP:
8891 if (arglist == 0
8892 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8893 break;
8894
8895 {
8896 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8897 VOIDmode, 0);
8898 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8899 enum machine_mode sa_mode = Pmode;
8900 rtx stack_save;
8901 int old_inhibit_defer_pop = inhibit_defer_pop;
8902 int return_pops
8903 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8904 build_function_type (void_type_node, NULL_TREE),
8905 0);
8906 rtx next_arg_reg;
8907 CUMULATIVE_ARGS args_so_far;
8908 int i;
8909
8910 #ifdef POINTERS_EXTEND_UNSIGNED
8911 buf_addr = convert_memory_address (Pmode, buf_addr);
8912 #endif
8913
8914 buf_addr = force_reg (Pmode, buf_addr);
8915
8916 if (target == 0 || GET_CODE (target) != REG
8917 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8918 target = gen_reg_rtx (value_mode);
8919
8920 emit_queue ();
8921
8922 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8923 current_function_calls_setjmp = 1;
8924
8925 /* We store the frame pointer and the address of lab1 in the buffer
8926 and use the rest of it for the stack save area, which is
8927 machine-dependent. */
8928 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8929 virtual_stack_vars_rtx);
8930 emit_move_insn
8931 (validize_mem (gen_rtx (MEM, Pmode,
8932 plus_constant (buf_addr,
8933 GET_MODE_SIZE (Pmode)))),
8934 gen_rtx (LABEL_REF, Pmode, lab1));
8935
8936 #ifdef HAVE_save_stack_nonlocal
8937 if (HAVE_save_stack_nonlocal)
8938 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8939 #endif
8940
8941 stack_save = gen_rtx (MEM, sa_mode,
8942 plus_constant (buf_addr,
8943 2 * GET_MODE_SIZE (Pmode)));
8944 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8945
8946 #ifdef HAVE_setjmp
8947 if (HAVE_setjmp)
8948 emit_insn (gen_setjmp ());
8949 #endif
8950
8951 /* Set TARGET to zero and branch around the other case. */
8952 emit_move_insn (target, const0_rtx);
8953 emit_jump_insn (gen_jump (lab2));
8954 emit_barrier ();
8955 emit_label (lab1);
8956
8957 /* Note that setjmp clobbers FP when we get here, so we have to
8958 make sure it's marked as used by this function. */
8959 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8960
8961 /* Mark the static chain as clobbered here so life information
8962 doesn't get messed up for it. */
8963 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8964
8965 /* Now put in the code to restore the frame pointer, and argument
8966 pointer, if needed. The code below is from expand_end_bindings
8967 in stmt.c; see detailed documentation there. */
8968 #ifdef HAVE_nonlocal_goto
8969 if (! HAVE_nonlocal_goto)
8970 #endif
8971 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8972
8973 current_function_has_nonlocal_goto = 1;
8974
8975 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8976 if (fixed_regs[ARG_POINTER_REGNUM])
8977 {
8978 #ifdef ELIMINABLE_REGS
8979 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8980
8981 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8982 if (elim_regs[i].from == ARG_POINTER_REGNUM
8983 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8984 break;
8985
8986 if (i == sizeof elim_regs / sizeof elim_regs [0])
8987 #endif
8988 {
8989 /* Now restore our arg pointer from the address at which it
8990 was saved in our stack frame.
8991 If there hasn't be space allocated for it yet, make
8992 some now. */
8993 if (arg_pointer_save_area == 0)
8994 arg_pointer_save_area
8995 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8996 emit_move_insn (virtual_incoming_args_rtx,
8997 copy_to_reg (arg_pointer_save_area));
8998 }
8999 }
9000 #endif
9001
9002 #ifdef HAVE_nonlocal_goto_receiver
9003 if (HAVE_nonlocal_goto_receiver)
9004 emit_insn (gen_nonlocal_goto_receiver ());
9005 #endif
9006 /* The static chain pointer contains the address of dummy function.
9007 We need to call it here to handle some PIC cases of restoring
9008 a global pointer. Then return 1. */
9009 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
9010
9011 /* We can't actually call emit_library_call here, so do everything
9012 it does, which isn't much for a libfunc with no args. */
9013 op0 = memory_address (FUNCTION_MODE, op0);
9014
9015 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
9016 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
9017 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
9018
9019 #ifndef ACCUMULATE_OUTGOING_ARGS
9020 #ifdef HAVE_call_pop
9021 if (HAVE_call_pop)
9022 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
9023 const0_rtx, next_arg_reg,
9024 GEN_INT (return_pops)));
9025 else
9026 #endif
9027 #endif
9028
9029 #ifdef HAVE_call
9030 if (HAVE_call)
9031 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
9032 const0_rtx, next_arg_reg, const0_rtx));
9033 else
9034 #endif
9035 abort ();
9036
9037 emit_move_insn (target, const1_rtx);
9038 emit_label (lab2);
9039 return target;
9040 }
9041
9042 /* __builtin_longjmp is passed a pointer to an array of five words
9043 and a value, which is a dummy. It's similar to the C library longjmp
9044 function but works with __builtin_setjmp above. */
9045 case BUILT_IN_LONGJMP:
9046 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9047 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9048 break;
9049
9050 {
9051 tree dummy_id = get_identifier ("__dummy");
9052 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9053 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9054 #ifdef POINTERS_EXTEND_UNSIGNED
9055 rtx buf_addr
9056 = force_reg (Pmode,
9057 convert_memory_address
9058 (Pmode,
9059 expand_expr (TREE_VALUE (arglist),
9060 NULL_RTX, VOIDmode, 0)));
9061 #else
9062 rtx buf_addr
9063 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9064 NULL_RTX,
9065 VOIDmode, 0));
9066 #endif
9067 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9068 rtx lab = gen_rtx (MEM, Pmode,
9069 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9070 enum machine_mode sa_mode
9071 #ifdef HAVE_save_stack_nonlocal
9072 = (HAVE_save_stack_nonlocal
9073 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9074 : Pmode);
9075 #else
9076 = Pmode;
9077 #endif
9078 rtx stack = gen_rtx (MEM, sa_mode,
9079 plus_constant (buf_addr,
9080 2 * GET_MODE_SIZE (Pmode)));
9081
9082 DECL_EXTERNAL (dummy_decl) = 1;
9083 TREE_PUBLIC (dummy_decl) = 1;
9084 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9085
9086 /* Expand the second expression just for side-effects. */
9087 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9088 const0_rtx, VOIDmode, 0);
9089
9090 assemble_external (dummy_decl);
9091
9092 /* Pick up FP, label, and SP from the block and jump. This code is
9093 from expand_goto in stmt.c; see there for detailed comments. */
9094 #if HAVE_nonlocal_goto
9095 if (HAVE_nonlocal_goto)
9096 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9097 XEXP (DECL_RTL (dummy_decl), 0)));
9098 else
9099 #endif
9100 {
9101 lab = copy_to_reg (lab);
9102 emit_move_insn (hard_frame_pointer_rtx, fp);
9103 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9104
9105 /* Put in the static chain register the address of the dummy
9106 function. */
9107 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9108 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9109 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9110 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9111 emit_indirect_jump (lab);
9112 }
9113
9114 return const0_rtx;
9115 }
9116
9117 default: /* just do library call, if unknown builtin */
9118 error ("built-in function `%s' not currently supported",
9119 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9120 }
9121
9122 /* The switch statement above can drop through to cause the function
9123 to be called normally. */
9124
9125 return expand_call (exp, target, ignore);
9126 }
9127 \f
9128 /* Built-in functions to perform an untyped call and return. */
9129
9130 /* For each register that may be used for calling a function, this
9131 gives a mode used to copy the register's value. VOIDmode indicates
9132 the register is not used for calling a function. If the machine
9133 has register windows, this gives only the outbound registers.
9134 INCOMING_REGNO gives the corresponding inbound register. */
9135 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9136
9137 /* For each register that may be used for returning values, this gives
9138 a mode used to copy the register's value. VOIDmode indicates the
9139 register is not used for returning values. If the machine has
9140 register windows, this gives only the outbound registers.
9141 INCOMING_REGNO gives the corresponding inbound register. */
9142 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9143
9144 /* For each register that may be used for calling a function, this
9145 gives the offset of that register into the block returned by
9146 __builtin_apply_args. 0 indicates that the register is not
9147 used for calling a function. */
9148 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9149
9150 /* Return the offset of register REGNO into the block returned by
9151 __builtin_apply_args. This is not declared static, since it is
9152 needed in objc-act.c. */
9153
9154 int
9155 apply_args_register_offset (regno)
9156 int regno;
9157 {
9158 apply_args_size ();
9159
9160 /* Arguments are always put in outgoing registers (in the argument
9161 block) if such make sense. */
9162 #ifdef OUTGOING_REGNO
9163 regno = OUTGOING_REGNO(regno);
9164 #endif
9165 return apply_args_reg_offset[regno];
9166 }
9167
9168 /* Return the size required for the block returned by __builtin_apply_args,
9169 and initialize apply_args_mode. */
9170
9171 static int
9172 apply_args_size ()
9173 {
9174 static int size = -1;
9175 int align, regno;
9176 enum machine_mode mode;
9177
9178 /* The values computed by this function never change. */
9179 if (size < 0)
9180 {
9181 /* The first value is the incoming arg-pointer. */
9182 size = GET_MODE_SIZE (Pmode);
9183
9184 /* The second value is the structure value address unless this is
9185 passed as an "invisible" first argument. */
9186 if (struct_value_rtx)
9187 size += GET_MODE_SIZE (Pmode);
9188
9189 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9190 if (FUNCTION_ARG_REGNO_P (regno))
9191 {
9192 /* Search for the proper mode for copying this register's
9193 value. I'm not sure this is right, but it works so far. */
9194 enum machine_mode best_mode = VOIDmode;
9195
9196 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9197 mode != VOIDmode;
9198 mode = GET_MODE_WIDER_MODE (mode))
9199 if (HARD_REGNO_MODE_OK (regno, mode)
9200 && HARD_REGNO_NREGS (regno, mode) == 1)
9201 best_mode = mode;
9202
9203 if (best_mode == VOIDmode)
9204 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9205 mode != VOIDmode;
9206 mode = GET_MODE_WIDER_MODE (mode))
9207 if (HARD_REGNO_MODE_OK (regno, mode)
9208 && (mov_optab->handlers[(int) mode].insn_code
9209 != CODE_FOR_nothing))
9210 best_mode = mode;
9211
9212 mode = best_mode;
9213 if (mode == VOIDmode)
9214 abort ();
9215
9216 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9217 if (size % align != 0)
9218 size = CEIL (size, align) * align;
9219 apply_args_reg_offset[regno] = size;
9220 size += GET_MODE_SIZE (mode);
9221 apply_args_mode[regno] = mode;
9222 }
9223 else
9224 {
9225 apply_args_mode[regno] = VOIDmode;
9226 apply_args_reg_offset[regno] = 0;
9227 }
9228 }
9229 return size;
9230 }
9231
9232 /* Return the size required for the block returned by __builtin_apply,
9233 and initialize apply_result_mode. */
9234
9235 static int
9236 apply_result_size ()
9237 {
9238 static int size = -1;
9239 int align, regno;
9240 enum machine_mode mode;
9241
9242 /* The values computed by this function never change. */
9243 if (size < 0)
9244 {
9245 size = 0;
9246
9247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9248 if (FUNCTION_VALUE_REGNO_P (regno))
9249 {
9250 /* Search for the proper mode for copying this register's
9251 value. I'm not sure this is right, but it works so far. */
9252 enum machine_mode best_mode = VOIDmode;
9253
9254 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9255 mode != TImode;
9256 mode = GET_MODE_WIDER_MODE (mode))
9257 if (HARD_REGNO_MODE_OK (regno, mode))
9258 best_mode = mode;
9259
9260 if (best_mode == VOIDmode)
9261 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9262 mode != VOIDmode;
9263 mode = GET_MODE_WIDER_MODE (mode))
9264 if (HARD_REGNO_MODE_OK (regno, mode)
9265 && (mov_optab->handlers[(int) mode].insn_code
9266 != CODE_FOR_nothing))
9267 best_mode = mode;
9268
9269 mode = best_mode;
9270 if (mode == VOIDmode)
9271 abort ();
9272
9273 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9274 if (size % align != 0)
9275 size = CEIL (size, align) * align;
9276 size += GET_MODE_SIZE (mode);
9277 apply_result_mode[regno] = mode;
9278 }
9279 else
9280 apply_result_mode[regno] = VOIDmode;
9281
9282 /* Allow targets that use untyped_call and untyped_return to override
9283 the size so that machine-specific information can be stored here. */
9284 #ifdef APPLY_RESULT_SIZE
9285 size = APPLY_RESULT_SIZE;
9286 #endif
9287 }
9288 return size;
9289 }
9290
9291 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9292 /* Create a vector describing the result block RESULT. If SAVEP is true,
9293 the result block is used to save the values; otherwise it is used to
9294 restore the values. */
9295
9296 static rtx
9297 result_vector (savep, result)
9298 int savep;
9299 rtx result;
9300 {
9301 int regno, size, align, nelts;
9302 enum machine_mode mode;
9303 rtx reg, mem;
9304 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9305
9306 size = nelts = 0;
9307 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9308 if ((mode = apply_result_mode[regno]) != VOIDmode)
9309 {
9310 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9311 if (size % align != 0)
9312 size = CEIL (size, align) * align;
9313 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9314 mem = change_address (result, mode,
9315 plus_constant (XEXP (result, 0), size));
9316 savevec[nelts++] = (savep
9317 ? gen_rtx (SET, VOIDmode, mem, reg)
9318 : gen_rtx (SET, VOIDmode, reg, mem));
9319 size += GET_MODE_SIZE (mode);
9320 }
9321 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9322 }
9323 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9324
9325 /* Save the state required to perform an untyped call with the same
9326 arguments as were passed to the current function. */
9327
9328 static rtx
9329 expand_builtin_apply_args ()
9330 {
9331 rtx registers;
9332 int size, align, regno;
9333 enum machine_mode mode;
9334
9335 /* Create a block where the arg-pointer, structure value address,
9336 and argument registers can be saved. */
9337 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9338
9339 /* Walk past the arg-pointer and structure value address. */
9340 size = GET_MODE_SIZE (Pmode);
9341 if (struct_value_rtx)
9342 size += GET_MODE_SIZE (Pmode);
9343
9344 /* Save each register used in calling a function to the block. */
9345 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9346 if ((mode = apply_args_mode[regno]) != VOIDmode)
9347 {
9348 rtx tem;
9349
9350 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9351 if (size % align != 0)
9352 size = CEIL (size, align) * align;
9353
9354 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9355
9356 #ifdef STACK_REGS
9357 /* For reg-stack.c's stack register household.
9358 Compare with a similar piece of code in function.c. */
9359
9360 emit_insn (gen_rtx (USE, mode, tem));
9361 #endif
9362
9363 emit_move_insn (change_address (registers, mode,
9364 plus_constant (XEXP (registers, 0),
9365 size)),
9366 tem);
9367 size += GET_MODE_SIZE (mode);
9368 }
9369
9370 /* Save the arg pointer to the block. */
9371 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9372 copy_to_reg (virtual_incoming_args_rtx));
9373 size = GET_MODE_SIZE (Pmode);
9374
9375 /* Save the structure value address unless this is passed as an
9376 "invisible" first argument. */
9377 if (struct_value_incoming_rtx)
9378 {
9379 emit_move_insn (change_address (registers, Pmode,
9380 plus_constant (XEXP (registers, 0),
9381 size)),
9382 copy_to_reg (struct_value_incoming_rtx));
9383 size += GET_MODE_SIZE (Pmode);
9384 }
9385
9386 /* Return the address of the block. */
9387 return copy_addr_to_reg (XEXP (registers, 0));
9388 }
9389
9390 /* Perform an untyped call and save the state required to perform an
9391 untyped return of whatever value was returned by the given function. */
9392
9393 static rtx
9394 expand_builtin_apply (function, arguments, argsize)
9395 rtx function, arguments, argsize;
9396 {
9397 int size, align, regno;
9398 enum machine_mode mode;
9399 rtx incoming_args, result, reg, dest, call_insn;
9400 rtx old_stack_level = 0;
9401 rtx call_fusage = 0;
9402
9403 /* Create a block where the return registers can be saved. */
9404 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9405
9406 /* ??? The argsize value should be adjusted here. */
9407
9408 /* Fetch the arg pointer from the ARGUMENTS block. */
9409 incoming_args = gen_reg_rtx (Pmode);
9410 emit_move_insn (incoming_args,
9411 gen_rtx (MEM, Pmode, arguments));
9412 #ifndef STACK_GROWS_DOWNWARD
9413 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9414 incoming_args, 0, OPTAB_LIB_WIDEN);
9415 #endif
9416
9417 /* Perform postincrements before actually calling the function. */
9418 emit_queue ();
9419
9420 /* Push a new argument block and copy the arguments. */
9421 do_pending_stack_adjust ();
9422 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9423
9424 /* Push a block of memory onto the stack to store the memory arguments.
9425 Save the address in a register, and copy the memory arguments. ??? I
9426 haven't figured out how the calling convention macros effect this,
9427 but it's likely that the source and/or destination addresses in
9428 the block copy will need updating in machine specific ways. */
9429 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9430 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9431 gen_rtx (MEM, BLKmode, incoming_args),
9432 argsize,
9433 PARM_BOUNDARY / BITS_PER_UNIT);
9434
9435 /* Refer to the argument block. */
9436 apply_args_size ();
9437 arguments = gen_rtx (MEM, BLKmode, arguments);
9438
9439 /* Walk past the arg-pointer and structure value address. */
9440 size = GET_MODE_SIZE (Pmode);
9441 if (struct_value_rtx)
9442 size += GET_MODE_SIZE (Pmode);
9443
9444 /* Restore each of the registers previously saved. Make USE insns
9445 for each of these registers for use in making the call. */
9446 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9447 if ((mode = apply_args_mode[regno]) != VOIDmode)
9448 {
9449 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9450 if (size % align != 0)
9451 size = CEIL (size, align) * align;
9452 reg = gen_rtx (REG, mode, regno);
9453 emit_move_insn (reg,
9454 change_address (arguments, mode,
9455 plus_constant (XEXP (arguments, 0),
9456 size)));
9457
9458 use_reg (&call_fusage, reg);
9459 size += GET_MODE_SIZE (mode);
9460 }
9461
9462 /* Restore the structure value address unless this is passed as an
9463 "invisible" first argument. */
9464 size = GET_MODE_SIZE (Pmode);
9465 if (struct_value_rtx)
9466 {
9467 rtx value = gen_reg_rtx (Pmode);
9468 emit_move_insn (value,
9469 change_address (arguments, Pmode,
9470 plus_constant (XEXP (arguments, 0),
9471 size)));
9472 emit_move_insn (struct_value_rtx, value);
9473 if (GET_CODE (struct_value_rtx) == REG)
9474 use_reg (&call_fusage, struct_value_rtx);
9475 size += GET_MODE_SIZE (Pmode);
9476 }
9477
9478 /* All arguments and registers used for the call are set up by now! */
9479 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9480
9481 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9482 and we don't want to load it into a register as an optimization,
9483 because prepare_call_address already did it if it should be done. */
9484 if (GET_CODE (function) != SYMBOL_REF)
9485 function = memory_address (FUNCTION_MODE, function);
9486
9487 /* Generate the actual call instruction and save the return value. */
9488 #ifdef HAVE_untyped_call
9489 if (HAVE_untyped_call)
9490 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9491 result, result_vector (1, result)));
9492 else
9493 #endif
9494 #ifdef HAVE_call_value
9495 if (HAVE_call_value)
9496 {
9497 rtx valreg = 0;
9498
9499 /* Locate the unique return register. It is not possible to
9500 express a call that sets more than one return register using
9501 call_value; use untyped_call for that. In fact, untyped_call
9502 only needs to save the return registers in the given block. */
9503 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9504 if ((mode = apply_result_mode[regno]) != VOIDmode)
9505 {
9506 if (valreg)
9507 abort (); /* HAVE_untyped_call required. */
9508 valreg = gen_rtx (REG, mode, regno);
9509 }
9510
9511 emit_call_insn (gen_call_value (valreg,
9512 gen_rtx (MEM, FUNCTION_MODE, function),
9513 const0_rtx, NULL_RTX, const0_rtx));
9514
9515 emit_move_insn (change_address (result, GET_MODE (valreg),
9516 XEXP (result, 0)),
9517 valreg);
9518 }
9519 else
9520 #endif
9521 abort ();
9522
9523 /* Find the CALL insn we just emitted. */
9524 for (call_insn = get_last_insn ();
9525 call_insn && GET_CODE (call_insn) != CALL_INSN;
9526 call_insn = PREV_INSN (call_insn))
9527 ;
9528
9529 if (! call_insn)
9530 abort ();
9531
9532 /* Put the register usage information on the CALL. If there is already
9533 some usage information, put ours at the end. */
9534 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9535 {
9536 rtx link;
9537
9538 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9539 link = XEXP (link, 1))
9540 ;
9541
9542 XEXP (link, 1) = call_fusage;
9543 }
9544 else
9545 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9546
9547 /* Restore the stack. */
9548 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9549
9550 /* Return the address of the result block. */
9551 return copy_addr_to_reg (XEXP (result, 0));
9552 }
9553
9554 /* Perform an untyped return. */
9555
9556 static void
9557 expand_builtin_return (result)
9558 rtx result;
9559 {
9560 int size, align, regno;
9561 enum machine_mode mode;
9562 rtx reg;
9563 rtx call_fusage = 0;
9564
9565 apply_result_size ();
9566 result = gen_rtx (MEM, BLKmode, result);
9567
9568 #ifdef HAVE_untyped_return
9569 if (HAVE_untyped_return)
9570 {
9571 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9572 emit_barrier ();
9573 return;
9574 }
9575 #endif
9576
9577 /* Restore the return value and note that each value is used. */
9578 size = 0;
9579 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9580 if ((mode = apply_result_mode[regno]) != VOIDmode)
9581 {
9582 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9583 if (size % align != 0)
9584 size = CEIL (size, align) * align;
9585 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9586 emit_move_insn (reg,
9587 change_address (result, mode,
9588 plus_constant (XEXP (result, 0),
9589 size)));
9590
9591 push_to_sequence (call_fusage);
9592 emit_insn (gen_rtx (USE, VOIDmode, reg));
9593 call_fusage = get_insns ();
9594 end_sequence ();
9595 size += GET_MODE_SIZE (mode);
9596 }
9597
9598 /* Put the USE insns before the return. */
9599 emit_insns (call_fusage);
9600
9601 /* Return whatever values was restored by jumping directly to the end
9602 of the function. */
9603 expand_null_return ();
9604 }
9605 \f
9606 /* Expand code for a post- or pre- increment or decrement
9607 and return the RTX for the result.
9608 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9609
9610 static rtx
9611 expand_increment (exp, post, ignore)
9612 register tree exp;
9613 int post, ignore;
9614 {
9615 register rtx op0, op1;
9616 register rtx temp, value;
9617 register tree incremented = TREE_OPERAND (exp, 0);
9618 optab this_optab = add_optab;
9619 int icode;
9620 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9621 int op0_is_copy = 0;
9622 int single_insn = 0;
9623 /* 1 means we can't store into OP0 directly,
9624 because it is a subreg narrower than a word,
9625 and we don't dare clobber the rest of the word. */
9626 int bad_subreg = 0;
9627
9628 if (output_bytecode)
9629 {
9630 bc_expand_expr (exp);
9631 return NULL_RTX;
9632 }
9633
9634 /* Stabilize any component ref that might need to be
9635 evaluated more than once below. */
9636 if (!post
9637 || TREE_CODE (incremented) == BIT_FIELD_REF
9638 || (TREE_CODE (incremented) == COMPONENT_REF
9639 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9640 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9641 incremented = stabilize_reference (incremented);
9642 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9643 ones into save exprs so that they don't accidentally get evaluated
9644 more than once by the code below. */
9645 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9646 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9647 incremented = save_expr (incremented);
9648
9649 /* Compute the operands as RTX.
9650 Note whether OP0 is the actual lvalue or a copy of it:
9651 I believe it is a copy iff it is a register or subreg
9652 and insns were generated in computing it. */
9653
9654 temp = get_last_insn ();
9655 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9656
9657 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9658 in place but instead must do sign- or zero-extension during assignment,
9659 so we copy it into a new register and let the code below use it as
9660 a copy.
9661
9662 Note that we can safely modify this SUBREG since it is know not to be
9663 shared (it was made by the expand_expr call above). */
9664
9665 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9666 {
9667 if (post)
9668 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9669 else
9670 bad_subreg = 1;
9671 }
9672 else if (GET_CODE (op0) == SUBREG
9673 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9674 {
9675 /* We cannot increment this SUBREG in place. If we are
9676 post-incrementing, get a copy of the old value. Otherwise,
9677 just mark that we cannot increment in place. */
9678 if (post)
9679 op0 = copy_to_reg (op0);
9680 else
9681 bad_subreg = 1;
9682 }
9683
9684 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9685 && temp != get_last_insn ());
9686 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9687
9688 /* Decide whether incrementing or decrementing. */
9689 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9690 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9691 this_optab = sub_optab;
9692
9693 /* Convert decrement by a constant into a negative increment. */
9694 if (this_optab == sub_optab
9695 && GET_CODE (op1) == CONST_INT)
9696 {
9697 op1 = GEN_INT (- INTVAL (op1));
9698 this_optab = add_optab;
9699 }
9700
9701 /* For a preincrement, see if we can do this with a single instruction. */
9702 if (!post)
9703 {
9704 icode = (int) this_optab->handlers[(int) mode].insn_code;
9705 if (icode != (int) CODE_FOR_nothing
9706 /* Make sure that OP0 is valid for operands 0 and 1
9707 of the insn we want to queue. */
9708 && (*insn_operand_predicate[icode][0]) (op0, mode)
9709 && (*insn_operand_predicate[icode][1]) (op0, mode)
9710 && (*insn_operand_predicate[icode][2]) (op1, mode))
9711 single_insn = 1;
9712 }
9713
9714 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9715 then we cannot just increment OP0. We must therefore contrive to
9716 increment the original value. Then, for postincrement, we can return
9717 OP0 since it is a copy of the old value. For preincrement, expand here
9718 unless we can do it with a single insn.
9719
9720 Likewise if storing directly into OP0 would clobber high bits
9721 we need to preserve (bad_subreg). */
9722 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9723 {
9724 /* This is the easiest way to increment the value wherever it is.
9725 Problems with multiple evaluation of INCREMENTED are prevented
9726 because either (1) it is a component_ref or preincrement,
9727 in which case it was stabilized above, or (2) it is an array_ref
9728 with constant index in an array in a register, which is
9729 safe to reevaluate. */
9730 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9731 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9732 ? MINUS_EXPR : PLUS_EXPR),
9733 TREE_TYPE (exp),
9734 incremented,
9735 TREE_OPERAND (exp, 1));
9736
9737 while (TREE_CODE (incremented) == NOP_EXPR
9738 || TREE_CODE (incremented) == CONVERT_EXPR)
9739 {
9740 newexp = convert (TREE_TYPE (incremented), newexp);
9741 incremented = TREE_OPERAND (incremented, 0);
9742 }
9743
9744 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9745 return post ? op0 : temp;
9746 }
9747
9748 if (post)
9749 {
9750 /* We have a true reference to the value in OP0.
9751 If there is an insn to add or subtract in this mode, queue it.
9752 Queueing the increment insn avoids the register shuffling
9753 that often results if we must increment now and first save
9754 the old value for subsequent use. */
9755
9756 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9757 op0 = stabilize (op0);
9758 #endif
9759
9760 icode = (int) this_optab->handlers[(int) mode].insn_code;
9761 if (icode != (int) CODE_FOR_nothing
9762 /* Make sure that OP0 is valid for operands 0 and 1
9763 of the insn we want to queue. */
9764 && (*insn_operand_predicate[icode][0]) (op0, mode)
9765 && (*insn_operand_predicate[icode][1]) (op0, mode))
9766 {
9767 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9768 op1 = force_reg (mode, op1);
9769
9770 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9771 }
9772 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9773 {
9774 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9775 rtx temp, result;
9776
9777 op0 = change_address (op0, VOIDmode, addr);
9778 temp = force_reg (GET_MODE (op0), op0);
9779 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9780 op1 = force_reg (mode, op1);
9781
9782 /* The increment queue is LIFO, thus we have to `queue'
9783 the instructions in reverse order. */
9784 enqueue_insn (op0, gen_move_insn (op0, temp));
9785 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9786 return result;
9787 }
9788 }
9789
9790 /* Preincrement, or we can't increment with one simple insn. */
9791 if (post)
9792 /* Save a copy of the value before inc or dec, to return it later. */
9793 temp = value = copy_to_reg (op0);
9794 else
9795 /* Arrange to return the incremented value. */
9796 /* Copy the rtx because expand_binop will protect from the queue,
9797 and the results of that would be invalid for us to return
9798 if our caller does emit_queue before using our result. */
9799 temp = copy_rtx (value = op0);
9800
9801 /* Increment however we can. */
9802 op1 = expand_binop (mode, this_optab, value, op1, op0,
9803 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9804 /* Make sure the value is stored into OP0. */
9805 if (op1 != op0)
9806 emit_move_insn (op0, op1);
9807
9808 return temp;
9809 }
9810 \f
9811 /* Expand all function calls contained within EXP, innermost ones first.
9812 But don't look within expressions that have sequence points.
9813 For each CALL_EXPR, record the rtx for its value
9814 in the CALL_EXPR_RTL field. */
9815
9816 static void
9817 preexpand_calls (exp)
9818 tree exp;
9819 {
9820 register int nops, i;
9821 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9822
9823 if (! do_preexpand_calls)
9824 return;
9825
9826 /* Only expressions and references can contain calls. */
9827
9828 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9829 return;
9830
9831 switch (TREE_CODE (exp))
9832 {
9833 case CALL_EXPR:
9834 /* Do nothing if already expanded. */
9835 if (CALL_EXPR_RTL (exp) != 0
9836 /* Do nothing if the call returns a variable-sized object. */
9837 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9838 /* Do nothing to built-in functions. */
9839 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9840 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9841 == FUNCTION_DECL)
9842 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9843 return;
9844
9845 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9846 return;
9847
9848 case COMPOUND_EXPR:
9849 case COND_EXPR:
9850 case TRUTH_ANDIF_EXPR:
9851 case TRUTH_ORIF_EXPR:
9852 /* If we find one of these, then we can be sure
9853 the adjust will be done for it (since it makes jumps).
9854 Do it now, so that if this is inside an argument
9855 of a function, we don't get the stack adjustment
9856 after some other args have already been pushed. */
9857 do_pending_stack_adjust ();
9858 return;
9859
9860 case BLOCK:
9861 case RTL_EXPR:
9862 case WITH_CLEANUP_EXPR:
9863 case CLEANUP_POINT_EXPR:
9864 return;
9865
9866 case SAVE_EXPR:
9867 if (SAVE_EXPR_RTL (exp) != 0)
9868 return;
9869 }
9870
9871 nops = tree_code_length[(int) TREE_CODE (exp)];
9872 for (i = 0; i < nops; i++)
9873 if (TREE_OPERAND (exp, i) != 0)
9874 {
9875 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9876 if (type == 'e' || type == '<' || type == '1' || type == '2'
9877 || type == 'r')
9878 preexpand_calls (TREE_OPERAND (exp, i));
9879 }
9880 }
9881 \f
9882 /* At the start of a function, record that we have no previously-pushed
9883 arguments waiting to be popped. */
9884
9885 void
9886 init_pending_stack_adjust ()
9887 {
9888 pending_stack_adjust = 0;
9889 }
9890
9891 /* When exiting from function, if safe, clear out any pending stack adjust
9892 so the adjustment won't get done. */
9893
9894 void
9895 clear_pending_stack_adjust ()
9896 {
9897 #ifdef EXIT_IGNORE_STACK
9898 if (optimize > 0
9899 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9900 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9901 && ! flag_inline_functions)
9902 pending_stack_adjust = 0;
9903 #endif
9904 }
9905
9906 /* Pop any previously-pushed arguments that have not been popped yet. */
9907
9908 void
9909 do_pending_stack_adjust ()
9910 {
9911 if (inhibit_defer_pop == 0)
9912 {
9913 if (pending_stack_adjust != 0)
9914 adjust_stack (GEN_INT (pending_stack_adjust));
9915 pending_stack_adjust = 0;
9916 }
9917 }
9918
9919 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9920 Returns the cleanups to be performed. */
9921
9922 static tree
9923 defer_cleanups_to (old_cleanups)
9924 tree old_cleanups;
9925 {
9926 tree new_cleanups = NULL_TREE;
9927 tree cleanups = cleanups_this_call;
9928 tree last = NULL_TREE;
9929
9930 while (cleanups_this_call != old_cleanups)
9931 {
9932 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9933 last = cleanups_this_call;
9934 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9935 }
9936
9937 if (last)
9938 {
9939 /* Remove the list from the chain of cleanups. */
9940 TREE_CHAIN (last) = NULL_TREE;
9941
9942 /* reverse them so that we can build them in the right order. */
9943 cleanups = nreverse (cleanups);
9944
9945 /* All cleanups must be on the function_obstack. */
9946 push_obstacks_nochange ();
9947 resume_temporary_allocation ();
9948
9949 while (cleanups)
9950 {
9951 if (new_cleanups)
9952 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9953 TREE_VALUE (cleanups), new_cleanups);
9954 else
9955 new_cleanups = TREE_VALUE (cleanups);
9956
9957 cleanups = TREE_CHAIN (cleanups);
9958 }
9959
9960 pop_obstacks ();
9961 }
9962
9963 return new_cleanups;
9964 }
9965
9966 /* Expand all cleanups up to OLD_CLEANUPS.
9967 Needed here, and also for language-dependent calls. */
9968
9969 void
9970 expand_cleanups_to (old_cleanups)
9971 tree old_cleanups;
9972 {
9973 while (cleanups_this_call != old_cleanups)
9974 {
9975 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9976 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9977 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9978 }
9979 }
9980 \f
9981 /* Expand conditional expressions. */
9982
9983 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9984 LABEL is an rtx of code CODE_LABEL, in this function and all the
9985 functions here. */
9986
9987 void
9988 jumpifnot (exp, label)
9989 tree exp;
9990 rtx label;
9991 {
9992 do_jump (exp, label, NULL_RTX);
9993 }
9994
9995 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9996
9997 void
9998 jumpif (exp, label)
9999 tree exp;
10000 rtx label;
10001 {
10002 do_jump (exp, NULL_RTX, label);
10003 }
10004
10005 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10006 the result is zero, or IF_TRUE_LABEL if the result is one.
10007 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10008 meaning fall through in that case.
10009
10010 do_jump always does any pending stack adjust except when it does not
10011 actually perform a jump. An example where there is no jump
10012 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10013
10014 This function is responsible for optimizing cases such as
10015 &&, || and comparison operators in EXP. */
10016
10017 void
10018 do_jump (exp, if_false_label, if_true_label)
10019 tree exp;
10020 rtx if_false_label, if_true_label;
10021 {
10022 register enum tree_code code = TREE_CODE (exp);
10023 /* Some cases need to create a label to jump to
10024 in order to properly fall through.
10025 These cases set DROP_THROUGH_LABEL nonzero. */
10026 rtx drop_through_label = 0;
10027 rtx temp;
10028 rtx comparison = 0;
10029 int i;
10030 tree type;
10031 enum machine_mode mode;
10032
10033 emit_queue ();
10034
10035 switch (code)
10036 {
10037 case ERROR_MARK:
10038 break;
10039
10040 case INTEGER_CST:
10041 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10042 if (temp)
10043 emit_jump (temp);
10044 break;
10045
10046 #if 0
10047 /* This is not true with #pragma weak */
10048 case ADDR_EXPR:
10049 /* The address of something can never be zero. */
10050 if (if_true_label)
10051 emit_jump (if_true_label);
10052 break;
10053 #endif
10054
10055 case NOP_EXPR:
10056 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10057 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10058 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10059 goto normal;
10060 case CONVERT_EXPR:
10061 /* If we are narrowing the operand, we have to do the compare in the
10062 narrower mode. */
10063 if ((TYPE_PRECISION (TREE_TYPE (exp))
10064 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10065 goto normal;
10066 case NON_LVALUE_EXPR:
10067 case REFERENCE_EXPR:
10068 case ABS_EXPR:
10069 case NEGATE_EXPR:
10070 case LROTATE_EXPR:
10071 case RROTATE_EXPR:
10072 /* These cannot change zero->non-zero or vice versa. */
10073 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10074 break;
10075
10076 #if 0
10077 /* This is never less insns than evaluating the PLUS_EXPR followed by
10078 a test and can be longer if the test is eliminated. */
10079 case PLUS_EXPR:
10080 /* Reduce to minus. */
10081 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10082 TREE_OPERAND (exp, 0),
10083 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10084 TREE_OPERAND (exp, 1))));
10085 /* Process as MINUS. */
10086 #endif
10087
10088 case MINUS_EXPR:
10089 /* Non-zero iff operands of minus differ. */
10090 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10091 TREE_OPERAND (exp, 0),
10092 TREE_OPERAND (exp, 1)),
10093 NE, NE);
10094 break;
10095
10096 case BIT_AND_EXPR:
10097 /* If we are AND'ing with a small constant, do this comparison in the
10098 smallest type that fits. If the machine doesn't have comparisons
10099 that small, it will be converted back to the wider comparison.
10100 This helps if we are testing the sign bit of a narrower object.
10101 combine can't do this for us because it can't know whether a
10102 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10103
10104 if (! SLOW_BYTE_ACCESS
10105 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10106 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10107 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10108 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10109 && (type = type_for_mode (mode, 1)) != 0
10110 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10111 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10112 != CODE_FOR_nothing))
10113 {
10114 do_jump (convert (type, exp), if_false_label, if_true_label);
10115 break;
10116 }
10117 goto normal;
10118
10119 case TRUTH_NOT_EXPR:
10120 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10121 break;
10122
10123 case TRUTH_ANDIF_EXPR:
10124 {
10125 rtx seq1, seq2;
10126 tree cleanups, old_cleanups;
10127
10128 if (if_false_label == 0)
10129 if_false_label = drop_through_label = gen_label_rtx ();
10130 start_sequence ();
10131 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10132 seq1 = get_insns ();
10133 end_sequence ();
10134
10135 old_cleanups = cleanups_this_call;
10136 start_sequence ();
10137 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10138 seq2 = get_insns ();
10139 cleanups = defer_cleanups_to (old_cleanups);
10140 end_sequence ();
10141
10142 if (cleanups)
10143 {
10144 rtx flag = gen_reg_rtx (word_mode);
10145 tree new_cleanups;
10146 tree cond;
10147
10148 /* Flag cleanups as not needed. */
10149 emit_move_insn (flag, const0_rtx);
10150 emit_insns (seq1);
10151
10152 /* Flag cleanups as needed. */
10153 emit_move_insn (flag, const1_rtx);
10154 emit_insns (seq2);
10155
10156 /* All cleanups must be on the function_obstack. */
10157 push_obstacks_nochange ();
10158 resume_temporary_allocation ();
10159
10160 /* convert flag, which is an rtx, into a tree. */
10161 cond = make_node (RTL_EXPR);
10162 TREE_TYPE (cond) = integer_type_node;
10163 RTL_EXPR_RTL (cond) = flag;
10164 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10165 cond = save_expr (cond);
10166
10167 new_cleanups = build (COND_EXPR, void_type_node,
10168 truthvalue_conversion (cond),
10169 cleanups, integer_zero_node);
10170 new_cleanups = fold (new_cleanups);
10171
10172 pop_obstacks ();
10173
10174 /* Now add in the conditionalized cleanups. */
10175 cleanups_this_call
10176 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10177 expand_eh_region_start ();
10178 }
10179 else
10180 {
10181 emit_insns (seq1);
10182 emit_insns (seq2);
10183 }
10184 }
10185 break;
10186
10187 case TRUTH_ORIF_EXPR:
10188 {
10189 rtx seq1, seq2;
10190 tree cleanups, old_cleanups;
10191
10192 if (if_true_label == 0)
10193 if_true_label = drop_through_label = gen_label_rtx ();
10194 start_sequence ();
10195 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10196 seq1 = get_insns ();
10197 end_sequence ();
10198
10199 old_cleanups = cleanups_this_call;
10200 start_sequence ();
10201 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10202 seq2 = get_insns ();
10203 cleanups = defer_cleanups_to (old_cleanups);
10204 end_sequence ();
10205
10206 if (cleanups)
10207 {
10208 rtx flag = gen_reg_rtx (word_mode);
10209 tree new_cleanups;
10210 tree cond;
10211
10212 /* Flag cleanups as not needed. */
10213 emit_move_insn (flag, const0_rtx);
10214 emit_insns (seq1);
10215
10216 /* Flag cleanups as needed. */
10217 emit_move_insn (flag, const1_rtx);
10218 emit_insns (seq2);
10219
10220 /* All cleanups must be on the function_obstack. */
10221 push_obstacks_nochange ();
10222 resume_temporary_allocation ();
10223
10224 /* convert flag, which is an rtx, into a tree. */
10225 cond = make_node (RTL_EXPR);
10226 TREE_TYPE (cond) = integer_type_node;
10227 RTL_EXPR_RTL (cond) = flag;
10228 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10229 cond = save_expr (cond);
10230
10231 new_cleanups = build (COND_EXPR, void_type_node,
10232 truthvalue_conversion (cond),
10233 cleanups, integer_zero_node);
10234 new_cleanups = fold (new_cleanups);
10235
10236 pop_obstacks ();
10237
10238 /* Now add in the conditionalized cleanups. */
10239 cleanups_this_call
10240 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10241 expand_eh_region_start ();
10242 }
10243 else
10244 {
10245 emit_insns (seq1);
10246 emit_insns (seq2);
10247 }
10248 }
10249 break;
10250
10251 case COMPOUND_EXPR:
10252 push_temp_slots ();
10253 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10254 preserve_temp_slots (NULL_RTX);
10255 free_temp_slots ();
10256 pop_temp_slots ();
10257 emit_queue ();
10258 do_pending_stack_adjust ();
10259 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10260 break;
10261
10262 case COMPONENT_REF:
10263 case BIT_FIELD_REF:
10264 case ARRAY_REF:
10265 {
10266 int bitsize, bitpos, unsignedp;
10267 enum machine_mode mode;
10268 tree type;
10269 tree offset;
10270 int volatilep = 0;
10271 int alignment;
10272
10273 /* Get description of this reference. We don't actually care
10274 about the underlying object here. */
10275 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10276 &mode, &unsignedp, &volatilep,
10277 &alignment);
10278
10279 type = type_for_size (bitsize, unsignedp);
10280 if (! SLOW_BYTE_ACCESS
10281 && type != 0 && bitsize >= 0
10282 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10283 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10284 != CODE_FOR_nothing))
10285 {
10286 do_jump (convert (type, exp), if_false_label, if_true_label);
10287 break;
10288 }
10289 goto normal;
10290 }
10291
10292 case COND_EXPR:
10293 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10294 if (integer_onep (TREE_OPERAND (exp, 1))
10295 && integer_zerop (TREE_OPERAND (exp, 2)))
10296 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10297
10298 else if (integer_zerop (TREE_OPERAND (exp, 1))
10299 && integer_onep (TREE_OPERAND (exp, 2)))
10300 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10301
10302 else
10303 {
10304 rtx seq1, seq2;
10305 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10306
10307 register rtx label1 = gen_label_rtx ();
10308 drop_through_label = gen_label_rtx ();
10309
10310 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10311
10312 /* We need to save the cleanups for the lhs and rhs separately.
10313 Keep track of the cleanups seen before the lhs. */
10314 old_cleanups = cleanups_this_call;
10315 start_sequence ();
10316 /* Now the THEN-expression. */
10317 do_jump (TREE_OPERAND (exp, 1),
10318 if_false_label ? if_false_label : drop_through_label,
10319 if_true_label ? if_true_label : drop_through_label);
10320 /* In case the do_jump just above never jumps. */
10321 do_pending_stack_adjust ();
10322 emit_label (label1);
10323 seq1 = get_insns ();
10324 /* Now grab the cleanups for the lhs. */
10325 cleanups_left_side = defer_cleanups_to (old_cleanups);
10326 end_sequence ();
10327
10328 /* And keep track of where we start before the rhs. */
10329 old_cleanups = cleanups_this_call;
10330 start_sequence ();
10331 /* Now the ELSE-expression. */
10332 do_jump (TREE_OPERAND (exp, 2),
10333 if_false_label ? if_false_label : drop_through_label,
10334 if_true_label ? if_true_label : drop_through_label);
10335 seq2 = get_insns ();
10336 /* Grab the cleanups for the rhs. */
10337 cleanups_right_side = defer_cleanups_to (old_cleanups);
10338 end_sequence ();
10339
10340 if (cleanups_left_side || cleanups_right_side)
10341 {
10342 /* Make the cleanups for the THEN and ELSE clauses
10343 conditional based on which half is executed. */
10344 rtx flag = gen_reg_rtx (word_mode);
10345 tree new_cleanups;
10346 tree cond;
10347
10348 /* Set the flag to 0 so that we know we executed the lhs. */
10349 emit_move_insn (flag, const0_rtx);
10350 emit_insns (seq1);
10351
10352 /* Set the flag to 1 so that we know we executed the rhs. */
10353 emit_move_insn (flag, const1_rtx);
10354 emit_insns (seq2);
10355
10356 /* Make sure the cleanup lives on the function_obstack. */
10357 push_obstacks_nochange ();
10358 resume_temporary_allocation ();
10359
10360 /* Now, build up a COND_EXPR that tests the value of the
10361 flag, and then either do the cleanups for the lhs or the
10362 rhs. */
10363 cond = make_node (RTL_EXPR);
10364 TREE_TYPE (cond) = integer_type_node;
10365 RTL_EXPR_RTL (cond) = flag;
10366 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10367 cond = save_expr (cond);
10368
10369 new_cleanups = build (COND_EXPR, void_type_node,
10370 truthvalue_conversion (cond),
10371 cleanups_right_side, cleanups_left_side);
10372 new_cleanups = fold (new_cleanups);
10373
10374 pop_obstacks ();
10375
10376 /* Now add in the conditionalized cleanups. */
10377 cleanups_this_call
10378 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10379 expand_eh_region_start ();
10380 }
10381 else
10382 {
10383 /* No cleanups were needed, so emit the two sequences
10384 directly. */
10385 emit_insns (seq1);
10386 emit_insns (seq2);
10387 }
10388 }
10389 break;
10390
10391 case EQ_EXPR:
10392 {
10393 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10394
10395 if (integer_zerop (TREE_OPERAND (exp, 1)))
10396 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10397 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10398 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10399 do_jump
10400 (fold
10401 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10402 fold (build (EQ_EXPR, TREE_TYPE (exp),
10403 fold (build1 (REALPART_EXPR,
10404 TREE_TYPE (inner_type),
10405 TREE_OPERAND (exp, 0))),
10406 fold (build1 (REALPART_EXPR,
10407 TREE_TYPE (inner_type),
10408 TREE_OPERAND (exp, 1))))),
10409 fold (build (EQ_EXPR, TREE_TYPE (exp),
10410 fold (build1 (IMAGPART_EXPR,
10411 TREE_TYPE (inner_type),
10412 TREE_OPERAND (exp, 0))),
10413 fold (build1 (IMAGPART_EXPR,
10414 TREE_TYPE (inner_type),
10415 TREE_OPERAND (exp, 1))))))),
10416 if_false_label, if_true_label);
10417 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10418 && !can_compare_p (TYPE_MODE (inner_type)))
10419 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10420 else
10421 comparison = compare (exp, EQ, EQ);
10422 break;
10423 }
10424
10425 case NE_EXPR:
10426 {
10427 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10428
10429 if (integer_zerop (TREE_OPERAND (exp, 1)))
10430 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10431 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10432 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10433 do_jump
10434 (fold
10435 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10436 fold (build (NE_EXPR, TREE_TYPE (exp),
10437 fold (build1 (REALPART_EXPR,
10438 TREE_TYPE (inner_type),
10439 TREE_OPERAND (exp, 0))),
10440 fold (build1 (REALPART_EXPR,
10441 TREE_TYPE (inner_type),
10442 TREE_OPERAND (exp, 1))))),
10443 fold (build (NE_EXPR, TREE_TYPE (exp),
10444 fold (build1 (IMAGPART_EXPR,
10445 TREE_TYPE (inner_type),
10446 TREE_OPERAND (exp, 0))),
10447 fold (build1 (IMAGPART_EXPR,
10448 TREE_TYPE (inner_type),
10449 TREE_OPERAND (exp, 1))))))),
10450 if_false_label, if_true_label);
10451 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10452 && !can_compare_p (TYPE_MODE (inner_type)))
10453 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10454 else
10455 comparison = compare (exp, NE, NE);
10456 break;
10457 }
10458
10459 case LT_EXPR:
10460 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10461 == MODE_INT)
10462 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10463 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10464 else
10465 comparison = compare (exp, LT, LTU);
10466 break;
10467
10468 case LE_EXPR:
10469 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10470 == MODE_INT)
10471 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10472 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10473 else
10474 comparison = compare (exp, LE, LEU);
10475 break;
10476
10477 case GT_EXPR:
10478 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10479 == MODE_INT)
10480 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10481 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10482 else
10483 comparison = compare (exp, GT, GTU);
10484 break;
10485
10486 case GE_EXPR:
10487 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10488 == MODE_INT)
10489 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10490 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10491 else
10492 comparison = compare (exp, GE, GEU);
10493 break;
10494
10495 default:
10496 normal:
10497 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10498 #if 0
10499 /* This is not needed any more and causes poor code since it causes
10500 comparisons and tests from non-SI objects to have different code
10501 sequences. */
10502 /* Copy to register to avoid generating bad insns by cse
10503 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10504 if (!cse_not_expected && GET_CODE (temp) == MEM)
10505 temp = copy_to_reg (temp);
10506 #endif
10507 do_pending_stack_adjust ();
10508 if (GET_CODE (temp) == CONST_INT)
10509 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10510 else if (GET_CODE (temp) == LABEL_REF)
10511 comparison = const_true_rtx;
10512 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10513 && !can_compare_p (GET_MODE (temp)))
10514 /* Note swapping the labels gives us not-equal. */
10515 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10516 else if (GET_MODE (temp) != VOIDmode)
10517 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10518 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10519 GET_MODE (temp), NULL_RTX, 0);
10520 else
10521 abort ();
10522 }
10523
10524 /* Do any postincrements in the expression that was tested. */
10525 emit_queue ();
10526
10527 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10528 straight into a conditional jump instruction as the jump condition.
10529 Otherwise, all the work has been done already. */
10530
10531 if (comparison == const_true_rtx)
10532 {
10533 if (if_true_label)
10534 emit_jump (if_true_label);
10535 }
10536 else if (comparison == const0_rtx)
10537 {
10538 if (if_false_label)
10539 emit_jump (if_false_label);
10540 }
10541 else if (comparison)
10542 do_jump_for_compare (comparison, if_false_label, if_true_label);
10543
10544 if (drop_through_label)
10545 {
10546 /* If do_jump produces code that might be jumped around,
10547 do any stack adjusts from that code, before the place
10548 where control merges in. */
10549 do_pending_stack_adjust ();
10550 emit_label (drop_through_label);
10551 }
10552 }
10553 \f
10554 /* Given a comparison expression EXP for values too wide to be compared
10555 with one insn, test the comparison and jump to the appropriate label.
10556 The code of EXP is ignored; we always test GT if SWAP is 0,
10557 and LT if SWAP is 1. */
10558
10559 static void
10560 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10561 tree exp;
10562 int swap;
10563 rtx if_false_label, if_true_label;
10564 {
10565 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10566 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10567 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10568 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10569 rtx drop_through_label = 0;
10570 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10571 int i;
10572
10573 if (! if_true_label || ! if_false_label)
10574 drop_through_label = gen_label_rtx ();
10575 if (! if_true_label)
10576 if_true_label = drop_through_label;
10577 if (! if_false_label)
10578 if_false_label = drop_through_label;
10579
10580 /* Compare a word at a time, high order first. */
10581 for (i = 0; i < nwords; i++)
10582 {
10583 rtx comp;
10584 rtx op0_word, op1_word;
10585
10586 if (WORDS_BIG_ENDIAN)
10587 {
10588 op0_word = operand_subword_force (op0, i, mode);
10589 op1_word = operand_subword_force (op1, i, mode);
10590 }
10591 else
10592 {
10593 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10594 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10595 }
10596
10597 /* All but high-order word must be compared as unsigned. */
10598 comp = compare_from_rtx (op0_word, op1_word,
10599 (unsignedp || i > 0) ? GTU : GT,
10600 unsignedp, word_mode, NULL_RTX, 0);
10601 if (comp == const_true_rtx)
10602 emit_jump (if_true_label);
10603 else if (comp != const0_rtx)
10604 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10605
10606 /* Consider lower words only if these are equal. */
10607 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10608 NULL_RTX, 0);
10609 if (comp == const_true_rtx)
10610 emit_jump (if_false_label);
10611 else if (comp != const0_rtx)
10612 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10613 }
10614
10615 if (if_false_label)
10616 emit_jump (if_false_label);
10617 if (drop_through_label)
10618 emit_label (drop_through_label);
10619 }
10620
10621 /* Compare OP0 with OP1, word at a time, in mode MODE.
10622 UNSIGNEDP says to do unsigned comparison.
10623 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10624
10625 void
10626 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10627 enum machine_mode mode;
10628 int unsignedp;
10629 rtx op0, op1;
10630 rtx if_false_label, if_true_label;
10631 {
10632 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10633 rtx drop_through_label = 0;
10634 int i;
10635
10636 if (! if_true_label || ! if_false_label)
10637 drop_through_label = gen_label_rtx ();
10638 if (! if_true_label)
10639 if_true_label = drop_through_label;
10640 if (! if_false_label)
10641 if_false_label = drop_through_label;
10642
10643 /* Compare a word at a time, high order first. */
10644 for (i = 0; i < nwords; i++)
10645 {
10646 rtx comp;
10647 rtx op0_word, op1_word;
10648
10649 if (WORDS_BIG_ENDIAN)
10650 {
10651 op0_word = operand_subword_force (op0, i, mode);
10652 op1_word = operand_subword_force (op1, i, mode);
10653 }
10654 else
10655 {
10656 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10657 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10658 }
10659
10660 /* All but high-order word must be compared as unsigned. */
10661 comp = compare_from_rtx (op0_word, op1_word,
10662 (unsignedp || i > 0) ? GTU : GT,
10663 unsignedp, word_mode, NULL_RTX, 0);
10664 if (comp == const_true_rtx)
10665 emit_jump (if_true_label);
10666 else if (comp != const0_rtx)
10667 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10668
10669 /* Consider lower words only if these are equal. */
10670 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10671 NULL_RTX, 0);
10672 if (comp == const_true_rtx)
10673 emit_jump (if_false_label);
10674 else if (comp != const0_rtx)
10675 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10676 }
10677
10678 if (if_false_label)
10679 emit_jump (if_false_label);
10680 if (drop_through_label)
10681 emit_label (drop_through_label);
10682 }
10683
10684 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10685 with one insn, test the comparison and jump to the appropriate label. */
10686
10687 static void
10688 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10689 tree exp;
10690 rtx if_false_label, if_true_label;
10691 {
10692 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10693 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10694 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10695 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10696 int i;
10697 rtx drop_through_label = 0;
10698
10699 if (! if_false_label)
10700 drop_through_label = if_false_label = gen_label_rtx ();
10701
10702 for (i = 0; i < nwords; i++)
10703 {
10704 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10705 operand_subword_force (op1, i, mode),
10706 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10707 word_mode, NULL_RTX, 0);
10708 if (comp == const_true_rtx)
10709 emit_jump (if_false_label);
10710 else if (comp != const0_rtx)
10711 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10712 }
10713
10714 if (if_true_label)
10715 emit_jump (if_true_label);
10716 if (drop_through_label)
10717 emit_label (drop_through_label);
10718 }
10719 \f
10720 /* Jump according to whether OP0 is 0.
10721 We assume that OP0 has an integer mode that is too wide
10722 for the available compare insns. */
10723
10724 static void
10725 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10726 rtx op0;
10727 rtx if_false_label, if_true_label;
10728 {
10729 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10730 int i;
10731 rtx drop_through_label = 0;
10732
10733 if (! if_false_label)
10734 drop_through_label = if_false_label = gen_label_rtx ();
10735
10736 for (i = 0; i < nwords; i++)
10737 {
10738 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10739 GET_MODE (op0)),
10740 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10741 if (comp == const_true_rtx)
10742 emit_jump (if_false_label);
10743 else if (comp != const0_rtx)
10744 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10745 }
10746
10747 if (if_true_label)
10748 emit_jump (if_true_label);
10749 if (drop_through_label)
10750 emit_label (drop_through_label);
10751 }
10752
10753 /* Given a comparison expression in rtl form, output conditional branches to
10754 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10755
10756 static void
10757 do_jump_for_compare (comparison, if_false_label, if_true_label)
10758 rtx comparison, if_false_label, if_true_label;
10759 {
10760 if (if_true_label)
10761 {
10762 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10763 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10764 else
10765 abort ();
10766
10767 if (if_false_label)
10768 emit_jump (if_false_label);
10769 }
10770 else if (if_false_label)
10771 {
10772 rtx insn;
10773 rtx prev = get_last_insn ();
10774 rtx branch = 0;
10775
10776 /* Output the branch with the opposite condition. Then try to invert
10777 what is generated. If more than one insn is a branch, or if the
10778 branch is not the last insn written, abort. If we can't invert
10779 the branch, emit make a true label, redirect this jump to that,
10780 emit a jump to the false label and define the true label. */
10781
10782 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10783 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10784 else
10785 abort ();
10786
10787 /* Here we get the first insn that was just emitted. It used to be the
10788 case that, on some machines, emitting the branch would discard
10789 the previous compare insn and emit a replacement. This isn't
10790 done anymore, but abort if we see that PREV is deleted. */
10791
10792 if (prev == 0)
10793 insn = get_insns ();
10794 else if (INSN_DELETED_P (prev))
10795 abort ();
10796 else
10797 insn = NEXT_INSN (prev);
10798
10799 for (; insn; insn = NEXT_INSN (insn))
10800 if (GET_CODE (insn) == JUMP_INSN)
10801 {
10802 if (branch)
10803 abort ();
10804 branch = insn;
10805 }
10806
10807 if (branch != get_last_insn ())
10808 abort ();
10809
10810 JUMP_LABEL (branch) = if_false_label;
10811 if (! invert_jump (branch, if_false_label))
10812 {
10813 if_true_label = gen_label_rtx ();
10814 redirect_jump (branch, if_true_label);
10815 emit_jump (if_false_label);
10816 emit_label (if_true_label);
10817 }
10818 }
10819 }
10820 \f
10821 /* Generate code for a comparison expression EXP
10822 (including code to compute the values to be compared)
10823 and set (CC0) according to the result.
10824 SIGNED_CODE should be the rtx operation for this comparison for
10825 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10826
10827 We force a stack adjustment unless there are currently
10828 things pushed on the stack that aren't yet used. */
10829
10830 static rtx
10831 compare (exp, signed_code, unsigned_code)
10832 register tree exp;
10833 enum rtx_code signed_code, unsigned_code;
10834 {
10835 register rtx op0
10836 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10837 register rtx op1
10838 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10839 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10840 register enum machine_mode mode = TYPE_MODE (type);
10841 int unsignedp = TREE_UNSIGNED (type);
10842 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10843
10844 #ifdef HAVE_canonicalize_funcptr_for_compare
10845 /* If function pointers need to be "canonicalized" before they can
10846 be reliably compared, then canonicalize them. */
10847 if (HAVE_canonicalize_funcptr_for_compare
10848 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10849 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10850 == FUNCTION_TYPE))
10851 {
10852 rtx new_op0 = gen_reg_rtx (mode);
10853
10854 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10855 op0 = new_op0;
10856 }
10857
10858 if (HAVE_canonicalize_funcptr_for_compare
10859 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10860 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10861 == FUNCTION_TYPE))
10862 {
10863 rtx new_op1 = gen_reg_rtx (mode);
10864
10865 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10866 op1 = new_op1;
10867 }
10868 #endif
10869
10870 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10871 ((mode == BLKmode)
10872 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10873 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10874 }
10875
10876 /* Like compare but expects the values to compare as two rtx's.
10877 The decision as to signed or unsigned comparison must be made by the caller.
10878
10879 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10880 compared.
10881
10882 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10883 size of MODE should be used. */
10884
10885 rtx
10886 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10887 register rtx op0, op1;
10888 enum rtx_code code;
10889 int unsignedp;
10890 enum machine_mode mode;
10891 rtx size;
10892 int align;
10893 {
10894 rtx tem;
10895
10896 /* If one operand is constant, make it the second one. Only do this
10897 if the other operand is not constant as well. */
10898
10899 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10900 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10901 {
10902 tem = op0;
10903 op0 = op1;
10904 op1 = tem;
10905 code = swap_condition (code);
10906 }
10907
10908 if (flag_force_mem)
10909 {
10910 op0 = force_not_mem (op0);
10911 op1 = force_not_mem (op1);
10912 }
10913
10914 do_pending_stack_adjust ();
10915
10916 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10917 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10918 return tem;
10919
10920 #if 0
10921 /* There's no need to do this now that combine.c can eliminate lots of
10922 sign extensions. This can be less efficient in certain cases on other
10923 machines. */
10924
10925 /* If this is a signed equality comparison, we can do it as an
10926 unsigned comparison since zero-extension is cheaper than sign
10927 extension and comparisons with zero are done as unsigned. This is
10928 the case even on machines that can do fast sign extension, since
10929 zero-extension is easier to combine with other operations than
10930 sign-extension is. If we are comparing against a constant, we must
10931 convert it to what it would look like unsigned. */
10932 if ((code == EQ || code == NE) && ! unsignedp
10933 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10934 {
10935 if (GET_CODE (op1) == CONST_INT
10936 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10937 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10938 unsignedp = 1;
10939 }
10940 #endif
10941
10942 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10943
10944 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10945 }
10946 \f
10947 /* Generate code to calculate EXP using a store-flag instruction
10948 and return an rtx for the result. EXP is either a comparison
10949 or a TRUTH_NOT_EXPR whose operand is a comparison.
10950
10951 If TARGET is nonzero, store the result there if convenient.
10952
10953 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10954 cheap.
10955
10956 Return zero if there is no suitable set-flag instruction
10957 available on this machine.
10958
10959 Once expand_expr has been called on the arguments of the comparison,
10960 we are committed to doing the store flag, since it is not safe to
10961 re-evaluate the expression. We emit the store-flag insn by calling
10962 emit_store_flag, but only expand the arguments if we have a reason
10963 to believe that emit_store_flag will be successful. If we think that
10964 it will, but it isn't, we have to simulate the store-flag with a
10965 set/jump/set sequence. */
10966
10967 static rtx
10968 do_store_flag (exp, target, mode, only_cheap)
10969 tree exp;
10970 rtx target;
10971 enum machine_mode mode;
10972 int only_cheap;
10973 {
10974 enum rtx_code code;
10975 tree arg0, arg1, type;
10976 tree tem;
10977 enum machine_mode operand_mode;
10978 int invert = 0;
10979 int unsignedp;
10980 rtx op0, op1;
10981 enum insn_code icode;
10982 rtx subtarget = target;
10983 rtx result, label, pattern, jump_pat;
10984
10985 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10986 result at the end. We can't simply invert the test since it would
10987 have already been inverted if it were valid. This case occurs for
10988 some floating-point comparisons. */
10989
10990 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10991 invert = 1, exp = TREE_OPERAND (exp, 0);
10992
10993 arg0 = TREE_OPERAND (exp, 0);
10994 arg1 = TREE_OPERAND (exp, 1);
10995 type = TREE_TYPE (arg0);
10996 operand_mode = TYPE_MODE (type);
10997 unsignedp = TREE_UNSIGNED (type);
10998
10999 /* We won't bother with BLKmode store-flag operations because it would mean
11000 passing a lot of information to emit_store_flag. */
11001 if (operand_mode == BLKmode)
11002 return 0;
11003
11004 /* We won't bother with store-flag operations involving function pointers
11005 when function pointers must be canonicalized before comparisons. */
11006 #ifdef HAVE_canonicalize_funcptr_for_compare
11007 if (HAVE_canonicalize_funcptr_for_compare
11008 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11009 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11010 == FUNCTION_TYPE))
11011 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11012 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11013 == FUNCTION_TYPE))))
11014 return 0;
11015 #endif
11016
11017 STRIP_NOPS (arg0);
11018 STRIP_NOPS (arg1);
11019
11020 /* Get the rtx comparison code to use. We know that EXP is a comparison
11021 operation of some type. Some comparisons against 1 and -1 can be
11022 converted to comparisons with zero. Do so here so that the tests
11023 below will be aware that we have a comparison with zero. These
11024 tests will not catch constants in the first operand, but constants
11025 are rarely passed as the first operand. */
11026
11027 switch (TREE_CODE (exp))
11028 {
11029 case EQ_EXPR:
11030 code = EQ;
11031 break;
11032 case NE_EXPR:
11033 code = NE;
11034 break;
11035 case LT_EXPR:
11036 if (integer_onep (arg1))
11037 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11038 else
11039 code = unsignedp ? LTU : LT;
11040 break;
11041 case LE_EXPR:
11042 if (! unsignedp && integer_all_onesp (arg1))
11043 arg1 = integer_zero_node, code = LT;
11044 else
11045 code = unsignedp ? LEU : LE;
11046 break;
11047 case GT_EXPR:
11048 if (! unsignedp && integer_all_onesp (arg1))
11049 arg1 = integer_zero_node, code = GE;
11050 else
11051 code = unsignedp ? GTU : GT;
11052 break;
11053 case GE_EXPR:
11054 if (integer_onep (arg1))
11055 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11056 else
11057 code = unsignedp ? GEU : GE;
11058 break;
11059 default:
11060 abort ();
11061 }
11062
11063 /* Put a constant second. */
11064 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11065 {
11066 tem = arg0; arg0 = arg1; arg1 = tem;
11067 code = swap_condition (code);
11068 }
11069
11070 /* If this is an equality or inequality test of a single bit, we can
11071 do this by shifting the bit being tested to the low-order bit and
11072 masking the result with the constant 1. If the condition was EQ,
11073 we xor it with 1. This does not require an scc insn and is faster
11074 than an scc insn even if we have it. */
11075
11076 if ((code == NE || code == EQ)
11077 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11078 && integer_pow2p (TREE_OPERAND (arg0, 1))
11079 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11080 {
11081 tree inner = TREE_OPERAND (arg0, 0);
11082 HOST_WIDE_INT tem;
11083 int bitnum;
11084 int ops_unsignedp;
11085
11086 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11087 NULL_RTX, VOIDmode, 0));
11088 /* In this case, immed_double_const will sign extend the value to make
11089 it look the same on the host and target. We must remove the
11090 sign-extension before calling exact_log2, since exact_log2 will
11091 fail for negative values. */
11092 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11093 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11094 /* We don't use the obvious constant shift to generate the mask,
11095 because that generates compiler warnings when BITS_PER_WORD is
11096 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11097 code is unreachable in that case. */
11098 tem = tem & GET_MODE_MASK (word_mode);
11099 bitnum = exact_log2 (tem);
11100
11101 /* If INNER is a right shift of a constant and it plus BITNUM does
11102 not overflow, adjust BITNUM and INNER. */
11103
11104 if (TREE_CODE (inner) == RSHIFT_EXPR
11105 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11106 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11107 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11108 < TYPE_PRECISION (type)))
11109 {
11110 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11111 inner = TREE_OPERAND (inner, 0);
11112 }
11113
11114 /* If we are going to be able to omit the AND below, we must do our
11115 operations as unsigned. If we must use the AND, we have a choice.
11116 Normally unsigned is faster, but for some machines signed is. */
11117 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11118 #ifdef LOAD_EXTEND_OP
11119 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11120 #else
11121 : 1
11122 #endif
11123 );
11124
11125 if (subtarget == 0 || GET_CODE (subtarget) != REG
11126 || GET_MODE (subtarget) != operand_mode
11127 || ! safe_from_p (subtarget, inner))
11128 subtarget = 0;
11129
11130 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11131
11132 if (bitnum != 0)
11133 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11134 size_int (bitnum), subtarget, ops_unsignedp);
11135
11136 if (GET_MODE (op0) != mode)
11137 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11138
11139 if ((code == EQ && ! invert) || (code == NE && invert))
11140 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11141 ops_unsignedp, OPTAB_LIB_WIDEN);
11142
11143 /* Put the AND last so it can combine with more things. */
11144 if (bitnum != TYPE_PRECISION (type) - 1)
11145 op0 = expand_and (op0, const1_rtx, subtarget);
11146
11147 return op0;
11148 }
11149
11150 /* Now see if we are likely to be able to do this. Return if not. */
11151 if (! can_compare_p (operand_mode))
11152 return 0;
11153 icode = setcc_gen_code[(int) code];
11154 if (icode == CODE_FOR_nothing
11155 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11156 {
11157 /* We can only do this if it is one of the special cases that
11158 can be handled without an scc insn. */
11159 if ((code == LT && integer_zerop (arg1))
11160 || (! only_cheap && code == GE && integer_zerop (arg1)))
11161 ;
11162 else if (BRANCH_COST >= 0
11163 && ! only_cheap && (code == NE || code == EQ)
11164 && TREE_CODE (type) != REAL_TYPE
11165 && ((abs_optab->handlers[(int) operand_mode].insn_code
11166 != CODE_FOR_nothing)
11167 || (ffs_optab->handlers[(int) operand_mode].insn_code
11168 != CODE_FOR_nothing)))
11169 ;
11170 else
11171 return 0;
11172 }
11173
11174 preexpand_calls (exp);
11175 if (subtarget == 0 || GET_CODE (subtarget) != REG
11176 || GET_MODE (subtarget) != operand_mode
11177 || ! safe_from_p (subtarget, arg1))
11178 subtarget = 0;
11179
11180 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11181 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11182
11183 if (target == 0)
11184 target = gen_reg_rtx (mode);
11185
11186 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11187 because, if the emit_store_flag does anything it will succeed and
11188 OP0 and OP1 will not be used subsequently. */
11189
11190 result = emit_store_flag (target, code,
11191 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11192 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11193 operand_mode, unsignedp, 1);
11194
11195 if (result)
11196 {
11197 if (invert)
11198 result = expand_binop (mode, xor_optab, result, const1_rtx,
11199 result, 0, OPTAB_LIB_WIDEN);
11200 return result;
11201 }
11202
11203 /* If this failed, we have to do this with set/compare/jump/set code. */
11204 if (GET_CODE (target) != REG
11205 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11206 target = gen_reg_rtx (GET_MODE (target));
11207
11208 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11209 result = compare_from_rtx (op0, op1, code, unsignedp,
11210 operand_mode, NULL_RTX, 0);
11211 if (GET_CODE (result) == CONST_INT)
11212 return (((result == const0_rtx && ! invert)
11213 || (result != const0_rtx && invert))
11214 ? const0_rtx : const1_rtx);
11215
11216 label = gen_label_rtx ();
11217 if (bcc_gen_fctn[(int) code] == 0)
11218 abort ();
11219
11220 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11221 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11222 emit_label (label);
11223
11224 return target;
11225 }
11226 \f
11227 /* Generate a tablejump instruction (used for switch statements). */
11228
11229 #ifdef HAVE_tablejump
11230
11231 /* INDEX is the value being switched on, with the lowest value
11232 in the table already subtracted.
11233 MODE is its expected mode (needed if INDEX is constant).
11234 RANGE is the length of the jump table.
11235 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11236
11237 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11238 index value is out of range. */
11239
11240 void
11241 do_tablejump (index, mode, range, table_label, default_label)
11242 rtx index, range, table_label, default_label;
11243 enum machine_mode mode;
11244 {
11245 register rtx temp, vector;
11246
11247 /* Do an unsigned comparison (in the proper mode) between the index
11248 expression and the value which represents the length of the range.
11249 Since we just finished subtracting the lower bound of the range
11250 from the index expression, this comparison allows us to simultaneously
11251 check that the original index expression value is both greater than
11252 or equal to the minimum value of the range and less than or equal to
11253 the maximum value of the range. */
11254
11255 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11256 emit_jump_insn (gen_bgtu (default_label));
11257
11258 /* If index is in range, it must fit in Pmode.
11259 Convert to Pmode so we can index with it. */
11260 if (mode != Pmode)
11261 index = convert_to_mode (Pmode, index, 1);
11262
11263 /* Don't let a MEM slip thru, because then INDEX that comes
11264 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11265 and break_out_memory_refs will go to work on it and mess it up. */
11266 #ifdef PIC_CASE_VECTOR_ADDRESS
11267 if (flag_pic && GET_CODE (index) != REG)
11268 index = copy_to_mode_reg (Pmode, index);
11269 #endif
11270
11271 /* If flag_force_addr were to affect this address
11272 it could interfere with the tricky assumptions made
11273 about addresses that contain label-refs,
11274 which may be valid only very near the tablejump itself. */
11275 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11276 GET_MODE_SIZE, because this indicates how large insns are. The other
11277 uses should all be Pmode, because they are addresses. This code
11278 could fail if addresses and insns are not the same size. */
11279 index = gen_rtx (PLUS, Pmode,
11280 gen_rtx (MULT, Pmode, index,
11281 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11282 gen_rtx (LABEL_REF, Pmode, table_label));
11283 #ifdef PIC_CASE_VECTOR_ADDRESS
11284 if (flag_pic)
11285 index = PIC_CASE_VECTOR_ADDRESS (index);
11286 else
11287 #endif
11288 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11289 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11290 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11291 RTX_UNCHANGING_P (vector) = 1;
11292 convert_move (temp, vector, 0);
11293
11294 emit_jump_insn (gen_tablejump (temp, table_label));
11295
11296 #ifndef CASE_VECTOR_PC_RELATIVE
11297 /* If we are generating PIC code or if the table is PC-relative, the
11298 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11299 if (! flag_pic)
11300 emit_barrier ();
11301 #endif
11302 }
11303
11304 #endif /* HAVE_tablejump */
11305
11306
11307 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11308 to that value is on the top of the stack. The resulting type is TYPE, and
11309 the source declaration is DECL. */
11310
11311 void
11312 bc_load_memory (type, decl)
11313 tree type, decl;
11314 {
11315 enum bytecode_opcode opcode;
11316
11317
11318 /* Bit fields are special. We only know about signed and
11319 unsigned ints, and enums. The latter are treated as
11320 signed integers. */
11321
11322 if (DECL_BIT_FIELD (decl))
11323 if (TREE_CODE (type) == ENUMERAL_TYPE
11324 || TREE_CODE (type) == INTEGER_TYPE)
11325 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11326 else
11327 abort ();
11328 else
11329 /* See corresponding comment in bc_store_memory(). */
11330 if (TYPE_MODE (type) == BLKmode
11331 || TYPE_MODE (type) == VOIDmode)
11332 return;
11333 else
11334 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11335
11336 if (opcode == neverneverland)
11337 abort ();
11338
11339 bc_emit_bytecode (opcode);
11340
11341 #ifdef DEBUG_PRINT_CODE
11342 fputc ('\n', stderr);
11343 #endif
11344 }
11345
11346
11347 /* Store the contents of the second stack slot to the address in the
11348 top stack slot. DECL is the declaration of the destination and is used
11349 to determine whether we're dealing with a bitfield. */
11350
11351 void
11352 bc_store_memory (type, decl)
11353 tree type, decl;
11354 {
11355 enum bytecode_opcode opcode;
11356
11357
11358 if (DECL_BIT_FIELD (decl))
11359 {
11360 if (TREE_CODE (type) == ENUMERAL_TYPE
11361 || TREE_CODE (type) == INTEGER_TYPE)
11362 opcode = sstoreBI;
11363 else
11364 abort ();
11365 }
11366 else
11367 if (TYPE_MODE (type) == BLKmode)
11368 {
11369 /* Copy structure. This expands to a block copy instruction, storeBLK.
11370 In addition to the arguments expected by the other store instructions,
11371 it also expects a type size (SImode) on top of the stack, which is the
11372 structure size in size units (usually bytes). The two first arguments
11373 are already on the stack; so we just put the size on level 1. For some
11374 other languages, the size may be variable, this is why we don't encode
11375 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11376
11377 bc_expand_expr (TYPE_SIZE (type));
11378 opcode = storeBLK;
11379 }
11380 else
11381 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11382
11383 if (opcode == neverneverland)
11384 abort ();
11385
11386 bc_emit_bytecode (opcode);
11387
11388 #ifdef DEBUG_PRINT_CODE
11389 fputc ('\n', stderr);
11390 #endif
11391 }
11392
11393
11394 /* Allocate local stack space sufficient to hold a value of the given
11395 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11396 integral power of 2. A special case is locals of type VOID, which
11397 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11398 remapped into the corresponding attribute of SI. */
11399
11400 rtx
11401 bc_allocate_local (size, alignment)
11402 int size, alignment;
11403 {
11404 rtx retval;
11405 int byte_alignment;
11406
11407 if (size < 0)
11408 abort ();
11409
11410 /* Normalize size and alignment */
11411 if (!size)
11412 size = UNITS_PER_WORD;
11413
11414 if (alignment < BITS_PER_UNIT)
11415 byte_alignment = 1 << (INT_ALIGN - 1);
11416 else
11417 /* Align */
11418 byte_alignment = alignment / BITS_PER_UNIT;
11419
11420 if (local_vars_size & (byte_alignment - 1))
11421 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11422
11423 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11424 local_vars_size += size;
11425
11426 return retval;
11427 }
11428
11429
11430 /* Allocate variable-sized local array. Variable-sized arrays are
11431 actually pointers to the address in memory where they are stored. */
11432
11433 rtx
11434 bc_allocate_variable_array (size)
11435 tree size;
11436 {
11437 rtx retval;
11438 const int ptralign = (1 << (PTR_ALIGN - 1));
11439
11440 /* Align pointer */
11441 if (local_vars_size & ptralign)
11442 local_vars_size += ptralign - (local_vars_size & ptralign);
11443
11444 /* Note down local space needed: pointer to block; also return
11445 dummy rtx */
11446
11447 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11448 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11449 return retval;
11450 }
11451
11452
11453 /* Push the machine address for the given external variable offset. */
11454
11455 void
11456 bc_load_externaddr (externaddr)
11457 rtx externaddr;
11458 {
11459 bc_emit_bytecode (constP);
11460 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11461 BYTECODE_BC_LABEL (externaddr)->offset);
11462
11463 #ifdef DEBUG_PRINT_CODE
11464 fputc ('\n', stderr);
11465 #endif
11466 }
11467
11468
11469 /* Like above, but expects an IDENTIFIER. */
11470
11471 void
11472 bc_load_externaddr_id (id, offset)
11473 tree id;
11474 int offset;
11475 {
11476 if (!IDENTIFIER_POINTER (id))
11477 abort ();
11478
11479 bc_emit_bytecode (constP);
11480 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11481
11482 #ifdef DEBUG_PRINT_CODE
11483 fputc ('\n', stderr);
11484 #endif
11485 }
11486
11487
11488 /* Push the machine address for the given local variable offset. */
11489
11490 void
11491 bc_load_localaddr (localaddr)
11492 rtx localaddr;
11493 {
11494 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11495 }
11496
11497
11498 /* Push the machine address for the given parameter offset.
11499 NOTE: offset is in bits. */
11500
11501 void
11502 bc_load_parmaddr (parmaddr)
11503 rtx parmaddr;
11504 {
11505 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11506 / BITS_PER_UNIT));
11507 }
11508
11509
11510 /* Convert a[i] into *(a + i). */
11511
11512 tree
11513 bc_canonicalize_array_ref (exp)
11514 tree exp;
11515 {
11516 tree type = TREE_TYPE (exp);
11517 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11518 TREE_OPERAND (exp, 0));
11519 tree index = TREE_OPERAND (exp, 1);
11520
11521
11522 /* Convert the integer argument to a type the same size as a pointer
11523 so the multiply won't overflow spuriously. */
11524
11525 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11526 index = convert (type_for_size (POINTER_SIZE, 0), index);
11527
11528 /* The array address isn't volatile even if the array is.
11529 (Of course this isn't terribly relevant since the bytecode
11530 translator treats nearly everything as volatile anyway.) */
11531 TREE_THIS_VOLATILE (array_adr) = 0;
11532
11533 return build1 (INDIRECT_REF, type,
11534 fold (build (PLUS_EXPR,
11535 TYPE_POINTER_TO (type),
11536 array_adr,
11537 fold (build (MULT_EXPR,
11538 TYPE_POINTER_TO (type),
11539 index,
11540 size_in_bytes (type))))));
11541 }
11542
11543
11544 /* Load the address of the component referenced by the given
11545 COMPONENT_REF expression.
11546
11547 Returns innermost lvalue. */
11548
11549 tree
11550 bc_expand_component_address (exp)
11551 tree exp;
11552 {
11553 tree tem, chain;
11554 enum machine_mode mode;
11555 int bitpos = 0;
11556 HOST_WIDE_INT SIval;
11557
11558
11559 tem = TREE_OPERAND (exp, 1);
11560 mode = DECL_MODE (tem);
11561
11562
11563 /* Compute cumulative bit offset for nested component refs
11564 and array refs, and find the ultimate containing object. */
11565
11566 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11567 {
11568 if (TREE_CODE (tem) == COMPONENT_REF)
11569 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11570 else
11571 if (TREE_CODE (tem) == ARRAY_REF
11572 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11573 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11574
11575 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11576 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11577 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11578 else
11579 break;
11580 }
11581
11582 bc_expand_expr (tem);
11583
11584
11585 /* For bitfields also push their offset and size */
11586 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11587 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11588 else
11589 if (SIval = bitpos / BITS_PER_UNIT)
11590 bc_emit_instruction (addconstPSI, SIval);
11591
11592 return (TREE_OPERAND (exp, 1));
11593 }
11594
11595
11596 /* Emit code to push two SI constants */
11597
11598 void
11599 bc_push_offset_and_size (offset, size)
11600 HOST_WIDE_INT offset, size;
11601 {
11602 bc_emit_instruction (constSI, offset);
11603 bc_emit_instruction (constSI, size);
11604 }
11605
11606
11607 /* Emit byte code to push the address of the given lvalue expression to
11608 the stack. If it's a bit field, we also push offset and size info.
11609
11610 Returns innermost component, which allows us to determine not only
11611 its type, but also whether it's a bitfield. */
11612
11613 tree
11614 bc_expand_address (exp)
11615 tree exp;
11616 {
11617 /* Safeguard */
11618 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11619 return (exp);
11620
11621
11622 switch (TREE_CODE (exp))
11623 {
11624 case ARRAY_REF:
11625
11626 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11627
11628 case COMPONENT_REF:
11629
11630 return (bc_expand_component_address (exp));
11631
11632 case INDIRECT_REF:
11633
11634 bc_expand_expr (TREE_OPERAND (exp, 0));
11635
11636 /* For variable-sized types: retrieve pointer. Sometimes the
11637 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11638 also make sure we have an operand, just in case... */
11639
11640 if (TREE_OPERAND (exp, 0)
11641 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11642 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11643 bc_emit_instruction (loadP);
11644
11645 /* If packed, also return offset and size */
11646 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11647
11648 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11649 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11650
11651 return (TREE_OPERAND (exp, 0));
11652
11653 case FUNCTION_DECL:
11654
11655 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11656 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11657 break;
11658
11659 case PARM_DECL:
11660
11661 bc_load_parmaddr (DECL_RTL (exp));
11662
11663 /* For variable-sized types: retrieve pointer */
11664 if (TYPE_SIZE (TREE_TYPE (exp))
11665 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11666 bc_emit_instruction (loadP);
11667
11668 /* If packed, also return offset and size */
11669 if (DECL_BIT_FIELD (exp))
11670 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11671 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11672
11673 break;
11674
11675 case RESULT_DECL:
11676
11677 bc_emit_instruction (returnP);
11678 break;
11679
11680 case VAR_DECL:
11681
11682 #if 0
11683 if (BYTECODE_LABEL (DECL_RTL (exp)))
11684 bc_load_externaddr (DECL_RTL (exp));
11685 #endif
11686
11687 if (DECL_EXTERNAL (exp))
11688 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11689 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11690 else
11691 bc_load_localaddr (DECL_RTL (exp));
11692
11693 /* For variable-sized types: retrieve pointer */
11694 if (TYPE_SIZE (TREE_TYPE (exp))
11695 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11696 bc_emit_instruction (loadP);
11697
11698 /* If packed, also return offset and size */
11699 if (DECL_BIT_FIELD (exp))
11700 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11701 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11702
11703 break;
11704
11705 case STRING_CST:
11706 {
11707 rtx r;
11708
11709 bc_emit_bytecode (constP);
11710 r = output_constant_def (exp);
11711 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11712
11713 #ifdef DEBUG_PRINT_CODE
11714 fputc ('\n', stderr);
11715 #endif
11716 }
11717 break;
11718
11719 default:
11720
11721 abort();
11722 break;
11723 }
11724
11725 /* Most lvalues don't have components. */
11726 return (exp);
11727 }
11728
11729
11730 /* Emit a type code to be used by the runtime support in handling
11731 parameter passing. The type code consists of the machine mode
11732 plus the minimal alignment shifted left 8 bits. */
11733
11734 tree
11735 bc_runtime_type_code (type)
11736 tree type;
11737 {
11738 int val;
11739
11740 switch (TREE_CODE (type))
11741 {
11742 case VOID_TYPE:
11743 case INTEGER_TYPE:
11744 case REAL_TYPE:
11745 case COMPLEX_TYPE:
11746 case ENUMERAL_TYPE:
11747 case POINTER_TYPE:
11748 case RECORD_TYPE:
11749
11750 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11751 break;
11752
11753 case ERROR_MARK:
11754
11755 val = 0;
11756 break;
11757
11758 default:
11759
11760 abort ();
11761 }
11762 return build_int_2 (val, 0);
11763 }
11764
11765
11766 /* Generate constructor label */
11767
11768 char *
11769 bc_gen_constr_label ()
11770 {
11771 static int label_counter;
11772 static char label[20];
11773
11774 sprintf (label, "*LR%d", label_counter++);
11775
11776 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11777 }
11778
11779
11780 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11781 expand the constructor data as static data, and push a pointer to it.
11782 The pointer is put in the pointer table and is retrieved by a constP
11783 bytecode instruction. We then loop and store each constructor member in
11784 the corresponding component. Finally, we return the original pointer on
11785 the stack. */
11786
11787 void
11788 bc_expand_constructor (constr)
11789 tree constr;
11790 {
11791 char *l;
11792 HOST_WIDE_INT ptroffs;
11793 rtx constr_rtx;
11794
11795
11796 /* Literal constructors are handled as constants, whereas
11797 non-literals are evaluated and stored element by element
11798 into the data segment. */
11799
11800 /* Allocate space in proper segment and push pointer to space on stack.
11801 */
11802
11803 l = bc_gen_constr_label ();
11804
11805 if (TREE_CONSTANT (constr))
11806 {
11807 text_section ();
11808
11809 bc_emit_const_labeldef (l);
11810 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11811 }
11812 else
11813 {
11814 data_section ();
11815
11816 bc_emit_data_labeldef (l);
11817 bc_output_data_constructor (constr);
11818 }
11819
11820
11821 /* Add reference to pointer table and recall pointer to stack;
11822 this code is common for both types of constructors: literals
11823 and non-literals. */
11824
11825 ptroffs = bc_define_pointer (l);
11826 bc_emit_instruction (constP, ptroffs);
11827
11828 /* This is all that has to be done if it's a literal. */
11829 if (TREE_CONSTANT (constr))
11830 return;
11831
11832
11833 /* At this point, we have the pointer to the structure on top of the stack.
11834 Generate sequences of store_memory calls for the constructor. */
11835
11836 /* constructor type is structure */
11837 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11838 {
11839 register tree elt;
11840
11841 /* If the constructor has fewer fields than the structure,
11842 clear the whole structure first. */
11843
11844 if (list_length (CONSTRUCTOR_ELTS (constr))
11845 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11846 {
11847 bc_emit_instruction (duplicate);
11848 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11849 bc_emit_instruction (clearBLK);
11850 }
11851
11852 /* Store each element of the constructor into the corresponding
11853 field of TARGET. */
11854
11855 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11856 {
11857 register tree field = TREE_PURPOSE (elt);
11858 register enum machine_mode mode;
11859 int bitsize;
11860 int bitpos;
11861 int unsignedp;
11862
11863 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11864 mode = DECL_MODE (field);
11865 unsignedp = TREE_UNSIGNED (field);
11866
11867 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11868
11869 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11870 /* The alignment of TARGET is
11871 at least what its type requires. */
11872 VOIDmode, 0,
11873 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11874 int_size_in_bytes (TREE_TYPE (constr)));
11875 }
11876 }
11877 else
11878
11879 /* Constructor type is array */
11880 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11881 {
11882 register tree elt;
11883 register int i;
11884 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11885 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11886 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11887 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11888
11889 /* If the constructor has fewer fields than the structure,
11890 clear the whole structure first. */
11891
11892 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11893 {
11894 bc_emit_instruction (duplicate);
11895 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11896 bc_emit_instruction (clearBLK);
11897 }
11898
11899
11900 /* Store each element of the constructor into the corresponding
11901 element of TARGET, determined by counting the elements. */
11902
11903 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11904 elt;
11905 elt = TREE_CHAIN (elt), i++)
11906 {
11907 register enum machine_mode mode;
11908 int bitsize;
11909 int bitpos;
11910 int unsignedp;
11911
11912 mode = TYPE_MODE (elttype);
11913 bitsize = GET_MODE_BITSIZE (mode);
11914 unsignedp = TREE_UNSIGNED (elttype);
11915
11916 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11917 /* * TYPE_SIZE_UNIT (elttype) */ );
11918
11919 bc_store_field (elt, bitsize, bitpos, mode,
11920 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11921 /* The alignment of TARGET is
11922 at least what its type requires. */
11923 VOIDmode, 0,
11924 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11925 int_size_in_bytes (TREE_TYPE (constr)));
11926 }
11927
11928 }
11929 }
11930
11931
11932 /* Store the value of EXP (an expression tree) into member FIELD of
11933 structure at address on stack, which has type TYPE, mode MODE and
11934 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11935 structure.
11936
11937 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11938 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11939
11940 void
11941 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11942 value_mode, unsignedp, align, total_size)
11943 int bitsize, bitpos;
11944 enum machine_mode mode;
11945 tree field, exp, type;
11946 enum machine_mode value_mode;
11947 int unsignedp;
11948 int align;
11949 int total_size;
11950 {
11951
11952 /* Expand expression and copy pointer */
11953 bc_expand_expr (exp);
11954 bc_emit_instruction (over);
11955
11956
11957 /* If the component is a bit field, we cannot use addressing to access
11958 it. Use bit-field techniques to store in it. */
11959
11960 if (DECL_BIT_FIELD (field))
11961 {
11962 bc_store_bit_field (bitpos, bitsize, unsignedp);
11963 return;
11964 }
11965 else
11966 /* Not bit field */
11967 {
11968 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11969
11970 /* Advance pointer to the desired member */
11971 if (offset)
11972 bc_emit_instruction (addconstPSI, offset);
11973
11974 /* Store */
11975 bc_store_memory (type, field);
11976 }
11977 }
11978
11979
11980 /* Store SI/SU in bitfield */
11981
11982 void
11983 bc_store_bit_field (offset, size, unsignedp)
11984 int offset, size, unsignedp;
11985 {
11986 /* Push bitfield offset and size */
11987 bc_push_offset_and_size (offset, size);
11988
11989 /* Store */
11990 bc_emit_instruction (sstoreBI);
11991 }
11992
11993
11994 /* Load SI/SU from bitfield */
11995
11996 void
11997 bc_load_bit_field (offset, size, unsignedp)
11998 int offset, size, unsignedp;
11999 {
12000 /* Push bitfield offset and size */
12001 bc_push_offset_and_size (offset, size);
12002
12003 /* Load: sign-extend if signed, else zero-extend */
12004 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
12005 }
12006
12007
12008 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12009 (adjust stack pointer upwards), negative means add that number of
12010 levels (adjust the stack pointer downwards). Only positive values
12011 normally make sense. */
12012
12013 void
12014 bc_adjust_stack (nlevels)
12015 int nlevels;
12016 {
12017 switch (nlevels)
12018 {
12019 case 0:
12020 break;
12021
12022 case 2:
12023 bc_emit_instruction (drop);
12024
12025 case 1:
12026 bc_emit_instruction (drop);
12027 break;
12028
12029 default:
12030
12031 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12032 stack_depth -= nlevels;
12033 }
12034
12035 #if defined (VALIDATE_STACK_FOR_BC)
12036 VALIDATE_STACK_FOR_BC ();
12037 #endif
12038 }
This page took 0.601543 seconds and 5 git commands to generate.