]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
({move,clear}_by_pieces_1, expand_assignment): Ensure we have unshared rtx before...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
34 #include "expr.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "typeclass.h"
39
40 #include "bytecode.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
43 #include "bc-optab.h"
44 #include "bc-emit.h"
45
46
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
81
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
86
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
90
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
96
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
100
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 of TARGET_EXPRs. */
105 int target_temp_slot_level;
106
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
111
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
114
115 /* This structure is used by move_by_pieces to describe the move to
116 be performed. */
117
118 struct move_by_pieces
119 {
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
129 int from_struct;
130 int len;
131 int offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148 };
149
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
153
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
159
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
216
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
220
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
223
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
226
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
236
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
248
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
256 \f
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
264
265 void
266 bc_init_mode_to_opcode_maps ()
267 {
268 int mode;
269
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
274
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
279
280 #include "modemap.def"
281 #undef DEF_MODEMAP
282 }
283 \f
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
286
287 void
288 init_expr_once ()
289 {
290 rtx insn, pat;
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
297
298 start_sequence ();
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
301
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
304 {
305 int regno;
306 rtx reg;
307 int num_clobbers;
308
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
312
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
315
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 regno++)
320 {
321 if (! HARD_REGNO_MODE_OK (regno, mode))
322 continue;
323
324 reg = gen_rtx (REG, mode, regno);
325
326 SET_SRC (pat) = mem;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
330
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
335
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
340
341 SET_SRC (pat) = reg;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
345 }
346 }
347
348 end_sequence ();
349 }
350
351 /* This is run at the start of compiling a function. */
352
353 void
354 init_expr ()
355 {
356 init_queue ();
357
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
362 apply_args_value = 0;
363 forced_labels = 0;
364 }
365
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
368
369 void
370 save_expr_status (p)
371 struct function *p;
372 {
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
375
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
382
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
386 saveregs_value = 0;
387 apply_args_value = 0;
388 forced_labels = 0;
389 }
390
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
393
394 void
395 restore_expr_status (p)
396 struct function *p;
397 {
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
404 }
405 \f
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
408
409 static rtx pending_chain;
410
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
414
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
417
418 static rtx
419 enqueue_insn (var, body)
420 rtx var, body;
421 {
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
425 }
426
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
433
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
437
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
441
442 rtx
443 protect_from_queue (x, modify)
444 register rtx x;
445 int modify;
446 {
447 register RTX_CODE code = GET_CODE (x);
448
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
452 return x;
453 #endif
454
455 if (code != QUEUED)
456 {
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
461 shared. */
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 {
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
471
472 if (QUEUED_INSN (y))
473 {
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
476 QUEUED_INSN (y));
477 return temp;
478 }
479 return new;
480 }
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
483 if (code == MEM)
484 {
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
487 {
488 x = copy_rtx (x);
489 XEXP (x, 0) = tem;
490 }
491 }
492 else if (code == PLUS || code == MULT)
493 {
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 {
498 x = copy_rtx (x);
499 XEXP (x, 0) = new0;
500 XEXP (x, 1) = new1;
501 }
502 }
503 return x;
504 }
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
509 use that copy. */
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 QUEUED_INSN (x));
517 return QUEUED_COPY (x);
518 }
519
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
524
525 static int
526 queued_subexp_p (x)
527 rtx x;
528 {
529 register enum rtx_code code = GET_CODE (x);
530 switch (code)
531 {
532 case QUEUED:
533 return 1;
534 case MEM:
535 return queued_subexp_p (XEXP (x, 0));
536 case MULT:
537 case PLUS:
538 case MINUS:
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
541 }
542 return 0;
543 }
544
545 /* Perform all the pending incrementations. */
546
547 void
548 emit_queue ()
549 {
550 register rtx p;
551 while (p = pending_chain)
552 {
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
555 }
556 }
557
558 static void
559 init_queue ()
560 {
561 if (pending_chain)
562 abort ();
563 }
564 \f
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
569
570 void
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
573 int unsignedp;
574 {
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
579 enum insn_code code;
580 rtx libcall;
581
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
587
588 if (to_real != from_real)
589 abort ();
590
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
593 TO here. */
594
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 abort ();
603
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 {
607 emit_move_insn (to, from);
608 return;
609 }
610
611 if (to_real)
612 {
613 rtx value;
614
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
616 {
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
619 != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from, UNKNOWN);
622 return;
623 }
624 }
625
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
630 return;
631 }
632 #endif
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
710 }
711 #endif
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
717 }
718 #endif
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
724 }
725 #endif
726
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
739 }
740 #endif
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
746 }
747 #endif
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 {
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
753 }
754 #endif
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 {
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
760 }
761 #endif
762
763 libcall = (rtx) 0;
764 switch (from_mode)
765 {
766 case SFmode:
767 switch (to_mode)
768 {
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
772
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
776
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
780 }
781 break;
782
783 case DFmode:
784 switch (to_mode)
785 {
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
789
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
793
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
797 }
798 break;
799
800 case XFmode:
801 switch (to_mode)
802 {
803 case SFmode:
804 libcall = truncxfsf2_libfunc;
805 break;
806
807 case DFmode:
808 libcall = truncxfdf2_libfunc;
809 break;
810 }
811 break;
812
813 case TFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = trunctfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = trunctfdf2_libfunc;
822 break;
823 }
824 break;
825 }
826
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
829 abort ();
830
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 1, from, from_mode);
833 emit_move_insn (to, value);
834 return;
835 }
836
837 /* Now both modes are integers. */
838
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
842 {
843 rtx insns;
844 rtx lowpart;
845 rtx fill_value;
846 rtx lowfrom;
847 int i;
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 != CODE_FOR_nothing)
854 {
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
862 return;
863 }
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
868 {
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
874 return;
875 }
876
877 /* No special multiword conversion insn; do it by hand. */
878 start_sequence ();
879
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
882
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
885
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
891
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
896
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
901 {
902 #ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
906 {
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
911 }
912 else
913 #endif
914 {
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 NULL_RTX, 0);
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
920 }
921 }
922
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 {
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
928
929 if (subword == 0)
930 abort ();
931
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
934 }
935
936 insns = get_insns ();
937 end_sequence ();
938
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
941 return;
942 }
943
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
947 {
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
957 }
958
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
961 {
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
964
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
967 {
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 return;
970 }
971 #endif /* HAVE_truncsipsi2 */
972 abort ();
973 }
974
975 if (from_mode == PSImode)
976 {
977 if (to_mode != SImode)
978 {
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
981 }
982 else
983 {
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
986 {
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 return;
989 }
990 #endif /* HAVE_extendpsisi2 */
991 abort ();
992 }
993 }
994
995 if (to_mode == PDImode)
996 {
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
999
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 return;
1005 }
1006 #endif /* HAVE_truncdipdi2 */
1007 abort ();
1008 }
1009
1010 if (from_mode == PDImode)
1011 {
1012 if (to_mode != DImode)
1013 {
1014 from = convert_to_mode (DImode, from, unsignedp);
1015 from_mode = DImode;
1016 }
1017 else
1018 {
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1021 {
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 return;
1024 }
1025 #endif /* HAVE_extendpdidi2 */
1026 abort ();
1027 }
1028 }
1029
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1032
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1037 {
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 return;
1050 }
1051
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 {
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1058 {
1059 emit_unop_insn (code, to, from, equiv_code);
1060 return;
1061 }
1062 else
1063 {
1064 enum machine_mode intermediate;
1065
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1075 {
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1078 return;
1079 }
1080
1081 /* No suitable intermediate mode. */
1082 abort ();
1083 }
1084 }
1085
1086 /* Support special truncate insns for certain modes. */
1087
1088 if (from_mode == DImode && to_mode == SImode)
1089 {
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1092 {
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1094 return;
1095 }
1096 #endif
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1098 return;
1099 }
1100
1101 if (from_mode == DImode && to_mode == HImode)
1102 {
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1105 {
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1107 return;
1108 }
1109 #endif
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1111 return;
1112 }
1113
1114 if (from_mode == DImode && to_mode == QImode)
1115 {
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1118 {
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1120 return;
1121 }
1122 #endif
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1124 return;
1125 }
1126
1127 if (from_mode == SImode && to_mode == HImode)
1128 {
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1131 {
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1133 return;
1134 }
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1138 }
1139
1140 if (from_mode == SImode && to_mode == QImode)
1141 {
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1144 {
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1146 return;
1147 }
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1151 }
1152
1153 if (from_mode == HImode && to_mode == QImode)
1154 {
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1157 {
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1159 return;
1160 }
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1164 }
1165
1166 if (from_mode == TImode && to_mode == DImode)
1167 {
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1170 {
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1172 return;
1173 }
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == TImode && to_mode == SImode)
1180 {
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1183 {
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1185 return;
1186 }
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == TImode && to_mode == HImode)
1193 {
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1196 {
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1198 return;
1199 }
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == TImode && to_mode == QImode)
1206 {
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1209 {
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1211 return;
1212 }
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 {
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1225 return;
1226 }
1227
1228 /* Mode combination is not recognized. */
1229 abort ();
1230 }
1231
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1238
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1241
1242 rtx
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1245 rtx x;
1246 int unsignedp;
1247 {
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1249 }
1250
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1255
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1258
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1263
1264 rtx
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1267 rtx x;
1268 int unsignedp;
1269 {
1270 register rtx temp;
1271
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1274
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1279
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1282
1283 if (mode == oldmode)
1284 return x;
1285
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1291
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1295 {
1296 HOST_WIDE_INT val = INTVAL (x);
1297
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1300 {
1301 int width = GET_MODE_BITSIZE (oldmode);
1302
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1305 }
1306
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1308 }
1309
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1314
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1326 {
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1332 {
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1335
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 if (! unsignedp
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1342
1343 return GEN_INT (val);
1344 }
1345
1346 return gen_lowpart (mode, x);
1347 }
1348
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1351 return temp;
1352 }
1353 \f
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
1360 static void
1361 move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1364 {
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1368
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1381
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1388
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1397 {
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1400 {
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1404 }
1405 #endif
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1408 {
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1412 }
1413 #endif
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1418 {
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1420 data.autinc_to = 1;
1421 data.explicit_inc_to = -1;
1422 }
1423 #endif
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1426 {
1427 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = 1;
1430 }
1431 #endif
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 }
1435
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1439
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1442
1443 while (max_size > 1)
1444 {
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1447
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1451 mode = tmode;
1452
1453 if (mode == VOIDmode)
1454 break;
1455
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1461
1462 max_size = GET_MODE_SIZE (mode);
1463 }
1464
1465 /* The code above should have handled everything. */
1466 if (data.len > 0)
1467 abort ();
1468 }
1469
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1472
1473 static int
1474 move_by_pieces_ninsns (l, align)
1475 unsigned int l;
1476 int align;
1477 {
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1480
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1483 align = MOVE_MAX;
1484
1485 while (max_size > 1)
1486 {
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1489
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1494
1495 if (mode == VOIDmode)
1496 break;
1497
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1503
1504 max_size = GET_MODE_SIZE (mode);
1505 }
1506
1507 return n_insns;
1508 }
1509
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1513
1514 static void
1515 move_by_pieces_1 (genfun, mode, data)
1516 rtx (*genfun) ();
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1519 {
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1522
1523 while (data->len >= size)
1524 {
1525 if (data->reverse) data->offset -= size;
1526
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : copy_rtx (change_address (data->to, mode,
1530 plus_constant (data->to_addr,
1531 data->offset))));
1532 MEM_IN_STRUCT_P (to1) = data->to_struct;
1533
1534 from1 =
1535 (data->autinc_from
1536 ? gen_rtx (MEM, mode, data->from_addr)
1537 : copy_rtx (change_address (data->from, mode,
1538 plus_constant (data->from_addr,
1539 data->offset))));
1540 MEM_IN_STRUCT_P (from1) = data->from_struct;
1541
1542 #ifdef HAVE_PRE_DECREMENT
1543 if (data->explicit_inc_to < 0)
1544 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1545 if (data->explicit_inc_from < 0)
1546 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1547 #endif
1548
1549 emit_insn ((*genfun) (to1, from1));
1550 #ifdef HAVE_POST_INCREMENT
1551 if (data->explicit_inc_to > 0)
1552 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1553 if (data->explicit_inc_from > 0)
1554 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1555 #endif
1556
1557 if (! data->reverse) data->offset += size;
1558
1559 data->len -= size;
1560 }
1561 }
1562 \f
1563 /* Emit code to move a block Y to a block X.
1564 This may be done with string-move instructions,
1565 with multiple scalar move instructions, or with a library call.
1566
1567 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1568 with mode BLKmode.
1569 SIZE is an rtx that says how long they are.
1570 ALIGN is the maximum alignment we can assume they have,
1571 measured in bytes. */
1572
1573 void
1574 emit_block_move (x, y, size, align)
1575 rtx x, y;
1576 rtx size;
1577 int align;
1578 {
1579 if (GET_MODE (x) != BLKmode)
1580 abort ();
1581
1582 if (GET_MODE (y) != BLKmode)
1583 abort ();
1584
1585 x = protect_from_queue (x, 1);
1586 y = protect_from_queue (y, 0);
1587 size = protect_from_queue (size, 0);
1588
1589 if (GET_CODE (x) != MEM)
1590 abort ();
1591 if (GET_CODE (y) != MEM)
1592 abort ();
1593 if (size == 0)
1594 abort ();
1595
1596 if (GET_CODE (size) == CONST_INT
1597 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1598 move_by_pieces (x, y, INTVAL (size), align);
1599 else
1600 {
1601 /* Try the most limited insn first, because there's no point
1602 including more than one in the machine description unless
1603 the more limited one has some advantage. */
1604
1605 rtx opalign = GEN_INT (align);
1606 enum machine_mode mode;
1607
1608 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1609 mode = GET_MODE_WIDER_MODE (mode))
1610 {
1611 enum insn_code code = movstr_optab[(int) mode];
1612
1613 if (code != CODE_FOR_nothing
1614 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1615 here because if SIZE is less than the mode mask, as it is
1616 returned by the macro, it will definitely be less than the
1617 actual mode mask. */
1618 && ((GET_CODE (size) == CONST_INT
1619 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1620 <= GET_MODE_MASK (mode)))
1621 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1622 && (insn_operand_predicate[(int) code][0] == 0
1623 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1624 && (insn_operand_predicate[(int) code][1] == 0
1625 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1626 && (insn_operand_predicate[(int) code][3] == 0
1627 || (*insn_operand_predicate[(int) code][3]) (opalign,
1628 VOIDmode)))
1629 {
1630 rtx op2;
1631 rtx last = get_last_insn ();
1632 rtx pat;
1633
1634 op2 = convert_to_mode (mode, size, 1);
1635 if (insn_operand_predicate[(int) code][2] != 0
1636 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1637 op2 = copy_to_mode_reg (mode, op2);
1638
1639 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1640 if (pat)
1641 {
1642 emit_insn (pat);
1643 return;
1644 }
1645 else
1646 delete_insns_since (last);
1647 }
1648 }
1649
1650 #ifdef TARGET_MEM_FUNCTIONS
1651 emit_library_call (memcpy_libfunc, 0,
1652 VOIDmode, 3, XEXP (x, 0), Pmode,
1653 XEXP (y, 0), Pmode,
1654 convert_to_mode (TYPE_MODE (sizetype), size,
1655 TREE_UNSIGNED (sizetype)),
1656 TYPE_MODE (sizetype));
1657 #else
1658 emit_library_call (bcopy_libfunc, 0,
1659 VOIDmode, 3, XEXP (y, 0), Pmode,
1660 XEXP (x, 0), Pmode,
1661 convert_to_mode (TYPE_MODE (integer_type_node), size,
1662 TREE_UNSIGNED (integer_type_node)),
1663 TYPE_MODE (integer_type_node));
1664 #endif
1665 }
1666 }
1667 \f
1668 /* Copy all or part of a value X into registers starting at REGNO.
1669 The number of registers to be filled is NREGS. */
1670
1671 void
1672 move_block_to_reg (regno, x, nregs, mode)
1673 int regno;
1674 rtx x;
1675 int nregs;
1676 enum machine_mode mode;
1677 {
1678 int i;
1679 rtx pat, last;
1680
1681 if (nregs == 0)
1682 return;
1683
1684 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1685 x = validize_mem (force_const_mem (mode, x));
1686
1687 /* See if the machine can do this with a load multiple insn. */
1688 #ifdef HAVE_load_multiple
1689 if (HAVE_load_multiple)
1690 {
1691 last = get_last_insn ();
1692 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1693 GEN_INT (nregs));
1694 if (pat)
1695 {
1696 emit_insn (pat);
1697 return;
1698 }
1699 else
1700 delete_insns_since (last);
1701 }
1702 #endif
1703
1704 for (i = 0; i < nregs; i++)
1705 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1706 operand_subword_force (x, i, mode));
1707 }
1708
1709 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1710 The number of registers to be filled is NREGS. SIZE indicates the number
1711 of bytes in the object X. */
1712
1713
1714 void
1715 move_block_from_reg (regno, x, nregs, size)
1716 int regno;
1717 rtx x;
1718 int nregs;
1719 int size;
1720 {
1721 int i;
1722 rtx pat, last;
1723 enum machine_mode mode;
1724
1725 /* If SIZE is that of a mode no bigger than a word, just use that
1726 mode's store operation. */
1727 if (size <= UNITS_PER_WORD
1728 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1729 {
1730 emit_move_insn (change_address (x, mode, NULL),
1731 gen_rtx (REG, mode, regno));
1732 return;
1733 }
1734
1735 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1736 to the left before storing to memory. Note that the previous test
1737 doesn't handle all cases (e.g. SIZE == 3). */
1738 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1739 {
1740 rtx tem = operand_subword (x, 0, 1, BLKmode);
1741 rtx shift;
1742
1743 if (tem == 0)
1744 abort ();
1745
1746 shift = expand_shift (LSHIFT_EXPR, word_mode,
1747 gen_rtx (REG, word_mode, regno),
1748 build_int_2 ((UNITS_PER_WORD - size)
1749 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1750 emit_move_insn (tem, shift);
1751 return;
1752 }
1753
1754 /* See if the machine can do this with a store multiple insn. */
1755 #ifdef HAVE_store_multiple
1756 if (HAVE_store_multiple)
1757 {
1758 last = get_last_insn ();
1759 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1760 GEN_INT (nregs));
1761 if (pat)
1762 {
1763 emit_insn (pat);
1764 return;
1765 }
1766 else
1767 delete_insns_since (last);
1768 }
1769 #endif
1770
1771 for (i = 0; i < nregs; i++)
1772 {
1773 rtx tem = operand_subword (x, i, 1, BLKmode);
1774
1775 if (tem == 0)
1776 abort ();
1777
1778 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1779 }
1780 }
1781
1782 /* Emit code to move a block Y to a block X, where X is non-consecutive
1783 registers represented by a PARALLEL. */
1784
1785 void
1786 emit_group_load (x, y)
1787 rtx x, y;
1788 {
1789 rtx target_reg, source;
1790 int i;
1791
1792 if (GET_CODE (x) != PARALLEL)
1793 abort ();
1794
1795 /* Check for a NULL entry, used to indicate that the parameter goes
1796 both on the stack and in registers. */
1797 if (XEXP (XVECEXP (x, 0, 0), 0))
1798 i = 0;
1799 else
1800 i = 1;
1801
1802 for (; i < XVECLEN (x, 0); i++)
1803 {
1804 rtx element = XVECEXP (x, 0, i);
1805
1806 target_reg = XEXP (element, 0);
1807
1808 if (GET_CODE (y) == MEM)
1809 source = change_address (y, GET_MODE (target_reg),
1810 plus_constant (XEXP (y, 0),
1811 INTVAL (XEXP (element, 1))));
1812 else if (XEXP (element, 1) == const0_rtx)
1813 {
1814 if (GET_MODE (target_reg) == GET_MODE (y))
1815 source = y;
1816 /* Allow for the target_reg to be smaller than the input register
1817 to allow for AIX with 4 DF arguments after a single SI arg. The
1818 last DF argument will only load 1 word into the integer registers,
1819 but load a DF value into the float registers. */
1820 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1821 <= GET_MODE_SIZE (GET_MODE (y)))
1822 && GET_MODE (target_reg) == word_mode)
1823 /* This might be a const_double, so we can't just use SUBREG. */
1824 source = operand_subword (y, 0, 0, VOIDmode);
1825 else
1826 abort ();
1827 }
1828 else
1829 abort ();
1830
1831 emit_move_insn (target_reg, source);
1832 }
1833 }
1834
1835 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1836 registers represented by a PARALLEL. */
1837
1838 void
1839 emit_group_store (x, y)
1840 rtx x, y;
1841 {
1842 rtx source_reg, target;
1843 int i;
1844
1845 if (GET_CODE (y) != PARALLEL)
1846 abort ();
1847
1848 /* Check for a NULL entry, used to indicate that the parameter goes
1849 both on the stack and in registers. */
1850 if (XEXP (XVECEXP (y, 0, 0), 0))
1851 i = 0;
1852 else
1853 i = 1;
1854
1855 for (; i < XVECLEN (y, 0); i++)
1856 {
1857 rtx element = XVECEXP (y, 0, i);
1858
1859 source_reg = XEXP (element, 0);
1860
1861 if (GET_CODE (x) == MEM)
1862 target = change_address (x, GET_MODE (source_reg),
1863 plus_constant (XEXP (x, 0),
1864 INTVAL (XEXP (element, 1))));
1865 else if (XEXP (element, 1) == const0_rtx)
1866 {
1867 target = x;
1868 if (GET_MODE (target) != GET_MODE (source_reg))
1869 target = gen_lowpart (GET_MODE (source_reg), target);
1870 }
1871 else
1872 abort ();
1873
1874 emit_move_insn (target, source_reg);
1875 }
1876 }
1877
1878 /* Add a USE expression for REG to the (possibly empty) list pointed
1879 to by CALL_FUSAGE. REG must denote a hard register. */
1880
1881 void
1882 use_reg (call_fusage, reg)
1883 rtx *call_fusage, reg;
1884 {
1885 if (GET_CODE (reg) != REG
1886 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1887 abort();
1888
1889 *call_fusage
1890 = gen_rtx (EXPR_LIST, VOIDmode,
1891 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1892 }
1893
1894 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1895 starting at REGNO. All of these registers must be hard registers. */
1896
1897 void
1898 use_regs (call_fusage, regno, nregs)
1899 rtx *call_fusage;
1900 int regno;
1901 int nregs;
1902 {
1903 int i;
1904
1905 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1906 abort ();
1907
1908 for (i = 0; i < nregs; i++)
1909 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1910 }
1911
1912 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1913 PARALLEL REGS. This is for calls that pass values in multiple
1914 non-contiguous locations. The Irix 6 ABI has examples of this. */
1915
1916 void
1917 use_group_regs (call_fusage, regs)
1918 rtx *call_fusage;
1919 rtx regs;
1920 {
1921 int i;
1922
1923 /* Check for a NULL entry, used to indicate that the parameter goes
1924 both on the stack and in registers. */
1925 if (XEXP (XVECEXP (regs, 0, 0), 0))
1926 i = 0;
1927 else
1928 i = 1;
1929
1930 for (; i < XVECLEN (regs, 0); i++)
1931 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1932 }
1933 \f
1934 /* Generate several move instructions to clear LEN bytes of block TO.
1935 (A MEM rtx with BLKmode). The caller must pass TO through
1936 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1937 we can assume. */
1938
1939 static void
1940 clear_by_pieces (to, len, align)
1941 rtx to;
1942 int len, align;
1943 {
1944 struct clear_by_pieces data;
1945 rtx to_addr = XEXP (to, 0);
1946 int max_size = MOVE_MAX + 1;
1947
1948 data.offset = 0;
1949 data.to_addr = to_addr;
1950 data.to = to;
1951 data.autinc_to
1952 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1953 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1954
1955 data.explicit_inc_to = 0;
1956 data.reverse
1957 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1958 if (data.reverse) data.offset = len;
1959 data.len = len;
1960
1961 data.to_struct = MEM_IN_STRUCT_P (to);
1962
1963 /* If copying requires more than two move insns,
1964 copy addresses to registers (to make displacements shorter)
1965 and use post-increment if available. */
1966 if (!data.autinc_to
1967 && move_by_pieces_ninsns (len, align) > 2)
1968 {
1969 #ifdef HAVE_PRE_DECREMENT
1970 if (data.reverse && ! data.autinc_to)
1971 {
1972 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1973 data.autinc_to = 1;
1974 data.explicit_inc_to = -1;
1975 }
1976 #endif
1977 #ifdef HAVE_POST_INCREMENT
1978 if (! data.reverse && ! data.autinc_to)
1979 {
1980 data.to_addr = copy_addr_to_reg (to_addr);
1981 data.autinc_to = 1;
1982 data.explicit_inc_to = 1;
1983 }
1984 #endif
1985 if (!data.autinc_to && CONSTANT_P (to_addr))
1986 data.to_addr = copy_addr_to_reg (to_addr);
1987 }
1988
1989 if (! SLOW_UNALIGNED_ACCESS
1990 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1991 align = MOVE_MAX;
1992
1993 /* First move what we can in the largest integer mode, then go to
1994 successively smaller modes. */
1995
1996 while (max_size > 1)
1997 {
1998 enum machine_mode mode = VOIDmode, tmode;
1999 enum insn_code icode;
2000
2001 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2002 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2003 if (GET_MODE_SIZE (tmode) < max_size)
2004 mode = tmode;
2005
2006 if (mode == VOIDmode)
2007 break;
2008
2009 icode = mov_optab->handlers[(int) mode].insn_code;
2010 if (icode != CODE_FOR_nothing
2011 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2012 GET_MODE_SIZE (mode)))
2013 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2014
2015 max_size = GET_MODE_SIZE (mode);
2016 }
2017
2018 /* The code above should have handled everything. */
2019 if (data.len != 0)
2020 abort ();
2021 }
2022
2023 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2024 with move instructions for mode MODE. GENFUN is the gen_... function
2025 to make a move insn for that mode. DATA has all the other info. */
2026
2027 static void
2028 clear_by_pieces_1 (genfun, mode, data)
2029 rtx (*genfun) ();
2030 enum machine_mode mode;
2031 struct clear_by_pieces *data;
2032 {
2033 register int size = GET_MODE_SIZE (mode);
2034 register rtx to1;
2035
2036 while (data->len >= size)
2037 {
2038 if (data->reverse) data->offset -= size;
2039
2040 to1 = (data->autinc_to
2041 ? gen_rtx (MEM, mode, data->to_addr)
2042 : copy_rtx (change_address (data->to, mode,
2043 plus_constant (data->to_addr,
2044 data->offset))));
2045 MEM_IN_STRUCT_P (to1) = data->to_struct;
2046
2047 #ifdef HAVE_PRE_DECREMENT
2048 if (data->explicit_inc_to < 0)
2049 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2050 #endif
2051
2052 emit_insn ((*genfun) (to1, const0_rtx));
2053 #ifdef HAVE_POST_INCREMENT
2054 if (data->explicit_inc_to > 0)
2055 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2056 #endif
2057
2058 if (! data->reverse) data->offset += size;
2059
2060 data->len -= size;
2061 }
2062 }
2063 \f
2064 /* Write zeros through the storage of OBJECT.
2065 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2066 the maximum alignment we can is has, measured in bytes. */
2067
2068 void
2069 clear_storage (object, size, align)
2070 rtx object;
2071 rtx size;
2072 int align;
2073 {
2074 if (GET_MODE (object) == BLKmode)
2075 {
2076 object = protect_from_queue (object, 1);
2077 size = protect_from_queue (size, 0);
2078
2079 if (GET_CODE (size) == CONST_INT
2080 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2081 clear_by_pieces (object, INTVAL (size), align);
2082
2083 else
2084 {
2085 /* Try the most limited insn first, because there's no point
2086 including more than one in the machine description unless
2087 the more limited one has some advantage. */
2088
2089 rtx opalign = GEN_INT (align);
2090 enum machine_mode mode;
2091
2092 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2093 mode = GET_MODE_WIDER_MODE (mode))
2094 {
2095 enum insn_code code = clrstr_optab[(int) mode];
2096
2097 if (code != CODE_FOR_nothing
2098 /* We don't need MODE to be narrower than
2099 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2100 the mode mask, as it is returned by the macro, it will
2101 definitely be less than the actual mode mask. */
2102 && ((GET_CODE (size) == CONST_INT
2103 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2104 <= GET_MODE_MASK (mode)))
2105 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2106 && (insn_operand_predicate[(int) code][0] == 0
2107 || (*insn_operand_predicate[(int) code][0]) (object,
2108 BLKmode))
2109 && (insn_operand_predicate[(int) code][2] == 0
2110 || (*insn_operand_predicate[(int) code][2]) (opalign,
2111 VOIDmode)))
2112 {
2113 rtx op1;
2114 rtx last = get_last_insn ();
2115 rtx pat;
2116
2117 op1 = convert_to_mode (mode, size, 1);
2118 if (insn_operand_predicate[(int) code][1] != 0
2119 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2120 mode))
2121 op1 = copy_to_mode_reg (mode, op1);
2122
2123 pat = GEN_FCN ((int) code) (object, op1, opalign);
2124 if (pat)
2125 {
2126 emit_insn (pat);
2127 return;
2128 }
2129 else
2130 delete_insns_since (last);
2131 }
2132 }
2133
2134
2135 #ifdef TARGET_MEM_FUNCTIONS
2136 emit_library_call (memset_libfunc, 0,
2137 VOIDmode, 3,
2138 XEXP (object, 0), Pmode,
2139 const0_rtx, TYPE_MODE (integer_type_node),
2140 convert_to_mode (TYPE_MODE (sizetype),
2141 size, TREE_UNSIGNED (sizetype)),
2142 TYPE_MODE (sizetype));
2143 #else
2144 emit_library_call (bzero_libfunc, 0,
2145 VOIDmode, 2,
2146 XEXP (object, 0), Pmode,
2147 convert_to_mode (TYPE_MODE (integer_type_node),
2148 size,
2149 TREE_UNSIGNED (integer_type_node)),
2150 TYPE_MODE (integer_type_node));
2151 #endif
2152 }
2153 }
2154 else
2155 emit_move_insn (object, const0_rtx);
2156 }
2157
2158 /* Generate code to copy Y into X.
2159 Both Y and X must have the same mode, except that
2160 Y can be a constant with VOIDmode.
2161 This mode cannot be BLKmode; use emit_block_move for that.
2162
2163 Return the last instruction emitted. */
2164
2165 rtx
2166 emit_move_insn (x, y)
2167 rtx x, y;
2168 {
2169 enum machine_mode mode = GET_MODE (x);
2170
2171 x = protect_from_queue (x, 1);
2172 y = protect_from_queue (y, 0);
2173
2174 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2175 abort ();
2176
2177 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2178 y = force_const_mem (mode, y);
2179
2180 /* If X or Y are memory references, verify that their addresses are valid
2181 for the machine. */
2182 if (GET_CODE (x) == MEM
2183 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2184 && ! push_operand (x, GET_MODE (x)))
2185 || (flag_force_addr
2186 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2187 x = change_address (x, VOIDmode, XEXP (x, 0));
2188
2189 if (GET_CODE (y) == MEM
2190 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2191 || (flag_force_addr
2192 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2193 y = change_address (y, VOIDmode, XEXP (y, 0));
2194
2195 if (mode == BLKmode)
2196 abort ();
2197
2198 return emit_move_insn_1 (x, y);
2199 }
2200
2201 /* Low level part of emit_move_insn.
2202 Called just like emit_move_insn, but assumes X and Y
2203 are basically valid. */
2204
2205 rtx
2206 emit_move_insn_1 (x, y)
2207 rtx x, y;
2208 {
2209 enum machine_mode mode = GET_MODE (x);
2210 enum machine_mode submode;
2211 enum mode_class class = GET_MODE_CLASS (mode);
2212 int i;
2213
2214 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2215 return
2216 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2217
2218 /* Expand complex moves by moving real part and imag part, if possible. */
2219 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2220 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2221 * BITS_PER_UNIT),
2222 (class == MODE_COMPLEX_INT
2223 ? MODE_INT : MODE_FLOAT),
2224 0))
2225 && (mov_optab->handlers[(int) submode].insn_code
2226 != CODE_FOR_nothing))
2227 {
2228 /* Don't split destination if it is a stack push. */
2229 int stack = push_operand (x, GET_MODE (x));
2230 rtx insns;
2231
2232 /* If this is a stack, push the highpart first, so it
2233 will be in the argument order.
2234
2235 In that case, change_address is used only to convert
2236 the mode, not to change the address. */
2237 if (stack)
2238 {
2239 /* Note that the real part always precedes the imag part in memory
2240 regardless of machine's endianness. */
2241 #ifdef STACK_GROWS_DOWNWARD
2242 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2243 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2244 gen_imagpart (submode, y)));
2245 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2246 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2247 gen_realpart (submode, y)));
2248 #else
2249 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2250 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2251 gen_realpart (submode, y)));
2252 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2253 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2254 gen_imagpart (submode, y)));
2255 #endif
2256 }
2257 else
2258 {
2259 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2260 (gen_realpart (submode, x), gen_realpart (submode, y)));
2261 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2262 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2263 }
2264
2265 return get_last_insn ();
2266 }
2267
2268 /* This will handle any multi-word mode that lacks a move_insn pattern.
2269 However, you will get better code if you define such patterns,
2270 even if they must turn into multiple assembler instructions. */
2271 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2272 {
2273 rtx last_insn = 0;
2274 rtx insns;
2275
2276 #ifdef PUSH_ROUNDING
2277
2278 /* If X is a push on the stack, do the push now and replace
2279 X with a reference to the stack pointer. */
2280 if (push_operand (x, GET_MODE (x)))
2281 {
2282 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2283 x = change_address (x, VOIDmode, stack_pointer_rtx);
2284 }
2285 #endif
2286
2287 /* Show the output dies here. */
2288 if (x != y)
2289 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2290
2291 for (i = 0;
2292 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2293 i++)
2294 {
2295 rtx xpart = operand_subword (x, i, 1, mode);
2296 rtx ypart = operand_subword (y, i, 1, mode);
2297
2298 /* If we can't get a part of Y, put Y into memory if it is a
2299 constant. Otherwise, force it into a register. If we still
2300 can't get a part of Y, abort. */
2301 if (ypart == 0 && CONSTANT_P (y))
2302 {
2303 y = force_const_mem (mode, y);
2304 ypart = operand_subword (y, i, 1, mode);
2305 }
2306 else if (ypart == 0)
2307 ypart = operand_subword_force (y, i, mode);
2308
2309 if (xpart == 0 || ypart == 0)
2310 abort ();
2311
2312 last_insn = emit_move_insn (xpart, ypart);
2313 }
2314
2315 return last_insn;
2316 }
2317 else
2318 abort ();
2319 }
2320 \f
2321 /* Pushing data onto the stack. */
2322
2323 /* Push a block of length SIZE (perhaps variable)
2324 and return an rtx to address the beginning of the block.
2325 Note that it is not possible for the value returned to be a QUEUED.
2326 The value may be virtual_outgoing_args_rtx.
2327
2328 EXTRA is the number of bytes of padding to push in addition to SIZE.
2329 BELOW nonzero means this padding comes at low addresses;
2330 otherwise, the padding comes at high addresses. */
2331
2332 rtx
2333 push_block (size, extra, below)
2334 rtx size;
2335 int extra, below;
2336 {
2337 register rtx temp;
2338
2339 size = convert_modes (Pmode, ptr_mode, size, 1);
2340 if (CONSTANT_P (size))
2341 anti_adjust_stack (plus_constant (size, extra));
2342 else if (GET_CODE (size) == REG && extra == 0)
2343 anti_adjust_stack (size);
2344 else
2345 {
2346 rtx temp = copy_to_mode_reg (Pmode, size);
2347 if (extra != 0)
2348 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2349 temp, 0, OPTAB_LIB_WIDEN);
2350 anti_adjust_stack (temp);
2351 }
2352
2353 #ifdef STACK_GROWS_DOWNWARD
2354 temp = virtual_outgoing_args_rtx;
2355 if (extra != 0 && below)
2356 temp = plus_constant (temp, extra);
2357 #else
2358 if (GET_CODE (size) == CONST_INT)
2359 temp = plus_constant (virtual_outgoing_args_rtx,
2360 - INTVAL (size) - (below ? 0 : extra));
2361 else if (extra != 0 && !below)
2362 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2363 negate_rtx (Pmode, plus_constant (size, extra)));
2364 else
2365 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2366 negate_rtx (Pmode, size));
2367 #endif
2368
2369 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2370 }
2371
2372 rtx
2373 gen_push_operand ()
2374 {
2375 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2376 }
2377
2378 /* Generate code to push X onto the stack, assuming it has mode MODE and
2379 type TYPE.
2380 MODE is redundant except when X is a CONST_INT (since they don't
2381 carry mode info).
2382 SIZE is an rtx for the size of data to be copied (in bytes),
2383 needed only if X is BLKmode.
2384
2385 ALIGN (in bytes) is maximum alignment we can assume.
2386
2387 If PARTIAL and REG are both nonzero, then copy that many of the first
2388 words of X into registers starting with REG, and push the rest of X.
2389 The amount of space pushed is decreased by PARTIAL words,
2390 rounded *down* to a multiple of PARM_BOUNDARY.
2391 REG must be a hard register in this case.
2392 If REG is zero but PARTIAL is not, take any all others actions for an
2393 argument partially in registers, but do not actually load any
2394 registers.
2395
2396 EXTRA is the amount in bytes of extra space to leave next to this arg.
2397 This is ignored if an argument block has already been allocated.
2398
2399 On a machine that lacks real push insns, ARGS_ADDR is the address of
2400 the bottom of the argument block for this call. We use indexing off there
2401 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2402 argument block has not been preallocated.
2403
2404 ARGS_SO_FAR is the size of args previously pushed for this call. */
2405
2406 void
2407 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2408 args_addr, args_so_far)
2409 register rtx x;
2410 enum machine_mode mode;
2411 tree type;
2412 rtx size;
2413 int align;
2414 int partial;
2415 rtx reg;
2416 int extra;
2417 rtx args_addr;
2418 rtx args_so_far;
2419 {
2420 rtx xinner;
2421 enum direction stack_direction
2422 #ifdef STACK_GROWS_DOWNWARD
2423 = downward;
2424 #else
2425 = upward;
2426 #endif
2427
2428 /* Decide where to pad the argument: `downward' for below,
2429 `upward' for above, or `none' for don't pad it.
2430 Default is below for small data on big-endian machines; else above. */
2431 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2432
2433 /* Invert direction if stack is post-update. */
2434 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2435 if (where_pad != none)
2436 where_pad = (where_pad == downward ? upward : downward);
2437
2438 xinner = x = protect_from_queue (x, 0);
2439
2440 if (mode == BLKmode)
2441 {
2442 /* Copy a block into the stack, entirely or partially. */
2443
2444 register rtx temp;
2445 int used = partial * UNITS_PER_WORD;
2446 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2447 int skip;
2448
2449 if (size == 0)
2450 abort ();
2451
2452 used -= offset;
2453
2454 /* USED is now the # of bytes we need not copy to the stack
2455 because registers will take care of them. */
2456
2457 if (partial != 0)
2458 xinner = change_address (xinner, BLKmode,
2459 plus_constant (XEXP (xinner, 0), used));
2460
2461 /* If the partial register-part of the arg counts in its stack size,
2462 skip the part of stack space corresponding to the registers.
2463 Otherwise, start copying to the beginning of the stack space,
2464 by setting SKIP to 0. */
2465 #ifndef REG_PARM_STACK_SPACE
2466 skip = 0;
2467 #else
2468 skip = used;
2469 #endif
2470
2471 #ifdef PUSH_ROUNDING
2472 /* Do it with several push insns if that doesn't take lots of insns
2473 and if there is no difficulty with push insns that skip bytes
2474 on the stack for alignment purposes. */
2475 if (args_addr == 0
2476 && GET_CODE (size) == CONST_INT
2477 && skip == 0
2478 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2479 < MOVE_RATIO)
2480 /* Here we avoid the case of a structure whose weak alignment
2481 forces many pushes of a small amount of data,
2482 and such small pushes do rounding that causes trouble. */
2483 && ((! SLOW_UNALIGNED_ACCESS)
2484 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2485 || PUSH_ROUNDING (align) == align)
2486 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2487 {
2488 /* Push padding now if padding above and stack grows down,
2489 or if padding below and stack grows up.
2490 But if space already allocated, this has already been done. */
2491 if (extra && args_addr == 0
2492 && where_pad != none && where_pad != stack_direction)
2493 anti_adjust_stack (GEN_INT (extra));
2494
2495 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2496 INTVAL (size) - used, align);
2497 }
2498 else
2499 #endif /* PUSH_ROUNDING */
2500 {
2501 /* Otherwise make space on the stack and copy the data
2502 to the address of that space. */
2503
2504 /* Deduct words put into registers from the size we must copy. */
2505 if (partial != 0)
2506 {
2507 if (GET_CODE (size) == CONST_INT)
2508 size = GEN_INT (INTVAL (size) - used);
2509 else
2510 size = expand_binop (GET_MODE (size), sub_optab, size,
2511 GEN_INT (used), NULL_RTX, 0,
2512 OPTAB_LIB_WIDEN);
2513 }
2514
2515 /* Get the address of the stack space.
2516 In this case, we do not deal with EXTRA separately.
2517 A single stack adjust will do. */
2518 if (! args_addr)
2519 {
2520 temp = push_block (size, extra, where_pad == downward);
2521 extra = 0;
2522 }
2523 else if (GET_CODE (args_so_far) == CONST_INT)
2524 temp = memory_address (BLKmode,
2525 plus_constant (args_addr,
2526 skip + INTVAL (args_so_far)));
2527 else
2528 temp = memory_address (BLKmode,
2529 plus_constant (gen_rtx (PLUS, Pmode,
2530 args_addr, args_so_far),
2531 skip));
2532
2533 /* TEMP is the address of the block. Copy the data there. */
2534 if (GET_CODE (size) == CONST_INT
2535 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2536 < MOVE_RATIO))
2537 {
2538 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2539 INTVAL (size), align);
2540 goto ret;
2541 }
2542 /* Try the most limited insn first, because there's no point
2543 including more than one in the machine description unless
2544 the more limited one has some advantage. */
2545 #ifdef HAVE_movstrqi
2546 if (HAVE_movstrqi
2547 && GET_CODE (size) == CONST_INT
2548 && ((unsigned) INTVAL (size)
2549 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2550 {
2551 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2552 xinner, size, GEN_INT (align));
2553 if (pat != 0)
2554 {
2555 emit_insn (pat);
2556 goto ret;
2557 }
2558 }
2559 #endif
2560 #ifdef HAVE_movstrhi
2561 if (HAVE_movstrhi
2562 && GET_CODE (size) == CONST_INT
2563 && ((unsigned) INTVAL (size)
2564 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2565 {
2566 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2567 xinner, size, GEN_INT (align));
2568 if (pat != 0)
2569 {
2570 emit_insn (pat);
2571 goto ret;
2572 }
2573 }
2574 #endif
2575 #ifdef HAVE_movstrsi
2576 if (HAVE_movstrsi)
2577 {
2578 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2579 xinner, size, GEN_INT (align));
2580 if (pat != 0)
2581 {
2582 emit_insn (pat);
2583 goto ret;
2584 }
2585 }
2586 #endif
2587 #ifdef HAVE_movstrdi
2588 if (HAVE_movstrdi)
2589 {
2590 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2591 xinner, size, GEN_INT (align));
2592 if (pat != 0)
2593 {
2594 emit_insn (pat);
2595 goto ret;
2596 }
2597 }
2598 #endif
2599
2600 #ifndef ACCUMULATE_OUTGOING_ARGS
2601 /* If the source is referenced relative to the stack pointer,
2602 copy it to another register to stabilize it. We do not need
2603 to do this if we know that we won't be changing sp. */
2604
2605 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2606 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2607 temp = copy_to_reg (temp);
2608 #endif
2609
2610 /* Make inhibit_defer_pop nonzero around the library call
2611 to force it to pop the bcopy-arguments right away. */
2612 NO_DEFER_POP;
2613 #ifdef TARGET_MEM_FUNCTIONS
2614 emit_library_call (memcpy_libfunc, 0,
2615 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2616 convert_to_mode (TYPE_MODE (sizetype),
2617 size, TREE_UNSIGNED (sizetype)),
2618 TYPE_MODE (sizetype));
2619 #else
2620 emit_library_call (bcopy_libfunc, 0,
2621 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2622 convert_to_mode (TYPE_MODE (integer_type_node),
2623 size,
2624 TREE_UNSIGNED (integer_type_node)),
2625 TYPE_MODE (integer_type_node));
2626 #endif
2627 OK_DEFER_POP;
2628 }
2629 }
2630 else if (partial > 0)
2631 {
2632 /* Scalar partly in registers. */
2633
2634 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2635 int i;
2636 int not_stack;
2637 /* # words of start of argument
2638 that we must make space for but need not store. */
2639 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2640 int args_offset = INTVAL (args_so_far);
2641 int skip;
2642
2643 /* Push padding now if padding above and stack grows down,
2644 or if padding below and stack grows up.
2645 But if space already allocated, this has already been done. */
2646 if (extra && args_addr == 0
2647 && where_pad != none && where_pad != stack_direction)
2648 anti_adjust_stack (GEN_INT (extra));
2649
2650 /* If we make space by pushing it, we might as well push
2651 the real data. Otherwise, we can leave OFFSET nonzero
2652 and leave the space uninitialized. */
2653 if (args_addr == 0)
2654 offset = 0;
2655
2656 /* Now NOT_STACK gets the number of words that we don't need to
2657 allocate on the stack. */
2658 not_stack = partial - offset;
2659
2660 /* If the partial register-part of the arg counts in its stack size,
2661 skip the part of stack space corresponding to the registers.
2662 Otherwise, start copying to the beginning of the stack space,
2663 by setting SKIP to 0. */
2664 #ifndef REG_PARM_STACK_SPACE
2665 skip = 0;
2666 #else
2667 skip = not_stack;
2668 #endif
2669
2670 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2671 x = validize_mem (force_const_mem (mode, x));
2672
2673 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2674 SUBREGs of such registers are not allowed. */
2675 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2676 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2677 x = copy_to_reg (x);
2678
2679 /* Loop over all the words allocated on the stack for this arg. */
2680 /* We can do it by words, because any scalar bigger than a word
2681 has a size a multiple of a word. */
2682 #ifndef PUSH_ARGS_REVERSED
2683 for (i = not_stack; i < size; i++)
2684 #else
2685 for (i = size - 1; i >= not_stack; i--)
2686 #endif
2687 if (i >= not_stack + offset)
2688 emit_push_insn (operand_subword_force (x, i, mode),
2689 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2690 0, args_addr,
2691 GEN_INT (args_offset + ((i - not_stack + skip)
2692 * UNITS_PER_WORD)));
2693 }
2694 else
2695 {
2696 rtx addr;
2697
2698 /* Push padding now if padding above and stack grows down,
2699 or if padding below and stack grows up.
2700 But if space already allocated, this has already been done. */
2701 if (extra && args_addr == 0
2702 && where_pad != none && where_pad != stack_direction)
2703 anti_adjust_stack (GEN_INT (extra));
2704
2705 #ifdef PUSH_ROUNDING
2706 if (args_addr == 0)
2707 addr = gen_push_operand ();
2708 else
2709 #endif
2710 if (GET_CODE (args_so_far) == CONST_INT)
2711 addr
2712 = memory_address (mode,
2713 plus_constant (args_addr, INTVAL (args_so_far)));
2714 else
2715 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2716 args_so_far));
2717
2718 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2719 }
2720
2721 ret:
2722 /* If part should go in registers, copy that part
2723 into the appropriate registers. Do this now, at the end,
2724 since mem-to-mem copies above may do function calls. */
2725 if (partial > 0 && reg != 0)
2726 {
2727 /* Handle calls that pass values in multiple non-contiguous locations.
2728 The Irix 6 ABI has examples of this. */
2729 if (GET_CODE (reg) == PARALLEL)
2730 emit_group_load (reg, x);
2731 else
2732 move_block_to_reg (REGNO (reg), x, partial, mode);
2733 }
2734
2735 if (extra && args_addr == 0 && where_pad == stack_direction)
2736 anti_adjust_stack (GEN_INT (extra));
2737 }
2738 \f
2739 /* Expand an assignment that stores the value of FROM into TO.
2740 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2741 (This may contain a QUEUED rtx;
2742 if the value is constant, this rtx is a constant.)
2743 Otherwise, the returned value is NULL_RTX.
2744
2745 SUGGEST_REG is no longer actually used.
2746 It used to mean, copy the value through a register
2747 and return that register, if that is possible.
2748 We now use WANT_VALUE to decide whether to do this. */
2749
2750 rtx
2751 expand_assignment (to, from, want_value, suggest_reg)
2752 tree to, from;
2753 int want_value;
2754 int suggest_reg;
2755 {
2756 register rtx to_rtx = 0;
2757 rtx result;
2758
2759 /* Don't crash if the lhs of the assignment was erroneous. */
2760
2761 if (TREE_CODE (to) == ERROR_MARK)
2762 {
2763 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2764 return want_value ? result : NULL_RTX;
2765 }
2766
2767 if (output_bytecode)
2768 {
2769 tree dest_innermost;
2770
2771 bc_expand_expr (from);
2772 bc_emit_instruction (duplicate);
2773
2774 dest_innermost = bc_expand_address (to);
2775
2776 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2777 take care of it here. */
2778
2779 bc_store_memory (TREE_TYPE (to), dest_innermost);
2780 return NULL;
2781 }
2782
2783 /* Assignment of a structure component needs special treatment
2784 if the structure component's rtx is not simply a MEM.
2785 Assignment of an array element at a constant index, and assignment of
2786 an array element in an unaligned packed structure field, has the same
2787 problem. */
2788
2789 if (TREE_CODE (to) == COMPONENT_REF
2790 || TREE_CODE (to) == BIT_FIELD_REF
2791 || (TREE_CODE (to) == ARRAY_REF
2792 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2793 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2794 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2795 {
2796 enum machine_mode mode1;
2797 int bitsize;
2798 int bitpos;
2799 tree offset;
2800 int unsignedp;
2801 int volatilep = 0;
2802 tree tem;
2803 int alignment;
2804
2805 push_temp_slots ();
2806 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2807 &unsignedp, &volatilep, &alignment);
2808
2809 /* If we are going to use store_bit_field and extract_bit_field,
2810 make sure to_rtx will be safe for multiple use. */
2811
2812 if (mode1 == VOIDmode && want_value)
2813 tem = stabilize_reference (tem);
2814
2815 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2816 if (offset != 0)
2817 {
2818 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2819
2820 if (GET_CODE (to_rtx) != MEM)
2821 abort ();
2822 to_rtx = change_address (to_rtx, VOIDmode,
2823 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2824 force_reg (ptr_mode, offset_rtx)));
2825 }
2826 if (volatilep)
2827 {
2828 if (GET_CODE (to_rtx) == MEM)
2829 {
2830 /* When the offset is zero, to_rtx is the address of the
2831 structure we are storing into, and hence may be shared.
2832 We must make a new MEM before setting the volatile bit. */
2833 if (offset == 0)
2834 to_rtx = copy_rtx (to_rtx);
2835
2836 MEM_VOLATILE_P (to_rtx) = 1;
2837 }
2838 #if 0 /* This was turned off because, when a field is volatile
2839 in an object which is not volatile, the object may be in a register,
2840 and then we would abort over here. */
2841 else
2842 abort ();
2843 #endif
2844 }
2845
2846 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2847 (want_value
2848 /* Spurious cast makes HPUX compiler happy. */
2849 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2850 : VOIDmode),
2851 unsignedp,
2852 /* Required alignment of containing datum. */
2853 alignment,
2854 int_size_in_bytes (TREE_TYPE (tem)));
2855 preserve_temp_slots (result);
2856 free_temp_slots ();
2857 pop_temp_slots ();
2858
2859 /* If the value is meaningful, convert RESULT to the proper mode.
2860 Otherwise, return nothing. */
2861 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2862 TYPE_MODE (TREE_TYPE (from)),
2863 result,
2864 TREE_UNSIGNED (TREE_TYPE (to)))
2865 : NULL_RTX);
2866 }
2867
2868 /* If the rhs is a function call and its value is not an aggregate,
2869 call the function before we start to compute the lhs.
2870 This is needed for correct code for cases such as
2871 val = setjmp (buf) on machines where reference to val
2872 requires loading up part of an address in a separate insn.
2873
2874 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2875 a promoted variable where the zero- or sign- extension needs to be done.
2876 Handling this in the normal way is safe because no computation is done
2877 before the call. */
2878 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2879 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2880 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2881 {
2882 rtx value;
2883
2884 push_temp_slots ();
2885 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2886 if (to_rtx == 0)
2887 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2888
2889 /* Handle calls that return values in multiple non-contiguous locations.
2890 The Irix 6 ABI has examples of this. */
2891 if (GET_CODE (to_rtx) == PARALLEL)
2892 emit_group_load (to_rtx, value);
2893 else if (GET_MODE (to_rtx) == BLKmode)
2894 emit_block_move (to_rtx, value, expr_size (from),
2895 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2896 else
2897 emit_move_insn (to_rtx, value);
2898 preserve_temp_slots (to_rtx);
2899 free_temp_slots ();
2900 pop_temp_slots ();
2901 return want_value ? to_rtx : NULL_RTX;
2902 }
2903
2904 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2905 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2906
2907 if (to_rtx == 0)
2908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2909
2910 /* Don't move directly into a return register. */
2911 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2912 {
2913 rtx temp;
2914
2915 push_temp_slots ();
2916 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2917 emit_move_insn (to_rtx, temp);
2918 preserve_temp_slots (to_rtx);
2919 free_temp_slots ();
2920 pop_temp_slots ();
2921 return want_value ? to_rtx : NULL_RTX;
2922 }
2923
2924 /* In case we are returning the contents of an object which overlaps
2925 the place the value is being stored, use a safe function when copying
2926 a value through a pointer into a structure value return block. */
2927 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2928 && current_function_returns_struct
2929 && !current_function_returns_pcc_struct)
2930 {
2931 rtx from_rtx, size;
2932
2933 push_temp_slots ();
2934 size = expr_size (from);
2935 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2936
2937 #ifdef TARGET_MEM_FUNCTIONS
2938 emit_library_call (memcpy_libfunc, 0,
2939 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2940 XEXP (from_rtx, 0), Pmode,
2941 convert_to_mode (TYPE_MODE (sizetype),
2942 size, TREE_UNSIGNED (sizetype)),
2943 TYPE_MODE (sizetype));
2944 #else
2945 emit_library_call (bcopy_libfunc, 0,
2946 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2947 XEXP (to_rtx, 0), Pmode,
2948 convert_to_mode (TYPE_MODE (integer_type_node),
2949 size, TREE_UNSIGNED (integer_type_node)),
2950 TYPE_MODE (integer_type_node));
2951 #endif
2952
2953 preserve_temp_slots (to_rtx);
2954 free_temp_slots ();
2955 pop_temp_slots ();
2956 return want_value ? to_rtx : NULL_RTX;
2957 }
2958
2959 /* Compute FROM and store the value in the rtx we got. */
2960
2961 push_temp_slots ();
2962 result = store_expr (from, to_rtx, want_value);
2963 preserve_temp_slots (result);
2964 free_temp_slots ();
2965 pop_temp_slots ();
2966 return want_value ? result : NULL_RTX;
2967 }
2968
2969 /* Generate code for computing expression EXP,
2970 and storing the value into TARGET.
2971 TARGET may contain a QUEUED rtx.
2972
2973 If WANT_VALUE is nonzero, return a copy of the value
2974 not in TARGET, so that we can be sure to use the proper
2975 value in a containing expression even if TARGET has something
2976 else stored in it. If possible, we copy the value through a pseudo
2977 and return that pseudo. Or, if the value is constant, we try to
2978 return the constant. In some cases, we return a pseudo
2979 copied *from* TARGET.
2980
2981 If the mode is BLKmode then we may return TARGET itself.
2982 It turns out that in BLKmode it doesn't cause a problem.
2983 because C has no operators that could combine two different
2984 assignments into the same BLKmode object with different values
2985 with no sequence point. Will other languages need this to
2986 be more thorough?
2987
2988 If WANT_VALUE is 0, we return NULL, to make sure
2989 to catch quickly any cases where the caller uses the value
2990 and fails to set WANT_VALUE. */
2991
2992 rtx
2993 store_expr (exp, target, want_value)
2994 register tree exp;
2995 register rtx target;
2996 int want_value;
2997 {
2998 register rtx temp;
2999 int dont_return_target = 0;
3000
3001 if (TREE_CODE (exp) == COMPOUND_EXPR)
3002 {
3003 /* Perform first part of compound expression, then assign from second
3004 part. */
3005 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3006 emit_queue ();
3007 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3008 }
3009 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3010 {
3011 /* For conditional expression, get safe form of the target. Then
3012 test the condition, doing the appropriate assignment on either
3013 side. This avoids the creation of unnecessary temporaries.
3014 For non-BLKmode, it is more efficient not to do this. */
3015
3016 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3017 rtx flag = NULL_RTX;
3018 tree left_cleanups = NULL_TREE;
3019 tree right_cleanups = NULL_TREE;
3020 tree old_cleanups = cleanups_this_call;
3021
3022 /* Used to save a pointer to the place to put the setting of
3023 the flag that indicates if this side of the conditional was
3024 taken. We backpatch the code, if we find out later that we
3025 have any conditional cleanups that need to be performed. */
3026 rtx dest_right_flag = NULL_RTX;
3027 rtx dest_left_flag = NULL_RTX;
3028
3029 emit_queue ();
3030 target = protect_from_queue (target, 1);
3031
3032 do_pending_stack_adjust ();
3033 NO_DEFER_POP;
3034 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3035 store_expr (TREE_OPERAND (exp, 1), target, 0);
3036 dest_left_flag = get_last_insn ();
3037 /* Handle conditional cleanups, if any. */
3038 left_cleanups = defer_cleanups_to (old_cleanups);
3039 emit_queue ();
3040 emit_jump_insn (gen_jump (lab2));
3041 emit_barrier ();
3042 emit_label (lab1);
3043 store_expr (TREE_OPERAND (exp, 2), target, 0);
3044 dest_right_flag = get_last_insn ();
3045 /* Handle conditional cleanups, if any. */
3046 right_cleanups = defer_cleanups_to (old_cleanups);
3047 emit_queue ();
3048 emit_label (lab2);
3049 OK_DEFER_POP;
3050
3051 /* Add back in any conditional cleanups. */
3052 if (left_cleanups || right_cleanups)
3053 {
3054 tree new_cleanups;
3055 tree cond;
3056 rtx last;
3057
3058 /* Now that we know that a flag is needed, go back and add in the
3059 setting of the flag. */
3060
3061 flag = gen_reg_rtx (word_mode);
3062
3063 /* Do the left side flag. */
3064 last = get_last_insn ();
3065 /* Flag left cleanups as needed. */
3066 emit_move_insn (flag, const1_rtx);
3067 /* ??? deprecated, use sequences instead. */
3068 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3069
3070 /* Do the right side flag. */
3071 last = get_last_insn ();
3072 /* Flag left cleanups as needed. */
3073 emit_move_insn (flag, const0_rtx);
3074 /* ??? deprecated, use sequences instead. */
3075 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3076
3077 /* All cleanups must be on the function_obstack. */
3078 push_obstacks_nochange ();
3079 resume_temporary_allocation ();
3080
3081 /* convert flag, which is an rtx, into a tree. */
3082 cond = make_node (RTL_EXPR);
3083 TREE_TYPE (cond) = integer_type_node;
3084 RTL_EXPR_RTL (cond) = flag;
3085 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3086 cond = save_expr (cond);
3087
3088 if (! left_cleanups)
3089 left_cleanups = integer_zero_node;
3090 if (! right_cleanups)
3091 right_cleanups = integer_zero_node;
3092 new_cleanups = build (COND_EXPR, void_type_node,
3093 truthvalue_conversion (cond),
3094 left_cleanups, right_cleanups);
3095 new_cleanups = fold (new_cleanups);
3096
3097 pop_obstacks ();
3098
3099 /* Now add in the conditionalized cleanups. */
3100 cleanups_this_call
3101 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3102 expand_eh_region_start ();
3103 }
3104 return want_value ? target : NULL_RTX;
3105 }
3106 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3107 && GET_MODE (target) != BLKmode)
3108 /* If target is in memory and caller wants value in a register instead,
3109 arrange that. Pass TARGET as target for expand_expr so that,
3110 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3111 We know expand_expr will not use the target in that case.
3112 Don't do this if TARGET is volatile because we are supposed
3113 to write it and then read it. */
3114 {
3115 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3116 GET_MODE (target), 0);
3117 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3118 temp = copy_to_reg (temp);
3119 dont_return_target = 1;
3120 }
3121 else if (queued_subexp_p (target))
3122 /* If target contains a postincrement, let's not risk
3123 using it as the place to generate the rhs. */
3124 {
3125 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3126 {
3127 /* Expand EXP into a new pseudo. */
3128 temp = gen_reg_rtx (GET_MODE (target));
3129 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3130 }
3131 else
3132 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3133
3134 /* If target is volatile, ANSI requires accessing the value
3135 *from* the target, if it is accessed. So make that happen.
3136 In no case return the target itself. */
3137 if (! MEM_VOLATILE_P (target) && want_value)
3138 dont_return_target = 1;
3139 }
3140 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3141 /* If this is an scalar in a register that is stored in a wider mode
3142 than the declared mode, compute the result into its declared mode
3143 and then convert to the wider mode. Our value is the computed
3144 expression. */
3145 {
3146 /* If we don't want a value, we can do the conversion inside EXP,
3147 which will often result in some optimizations. Do the conversion
3148 in two steps: first change the signedness, if needed, then
3149 the extend. But don't do this if the type of EXP is a subtype
3150 of something else since then the conversion might involve
3151 more than just converting modes. */
3152 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3153 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3154 {
3155 if (TREE_UNSIGNED (TREE_TYPE (exp))
3156 != SUBREG_PROMOTED_UNSIGNED_P (target))
3157 exp
3158 = convert
3159 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3160 TREE_TYPE (exp)),
3161 exp);
3162
3163 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3164 SUBREG_PROMOTED_UNSIGNED_P (target)),
3165 exp);
3166 }
3167
3168 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3169
3170 /* If TEMP is a volatile MEM and we want a result value, make
3171 the access now so it gets done only once. Likewise if
3172 it contains TARGET. */
3173 if (GET_CODE (temp) == MEM && want_value
3174 && (MEM_VOLATILE_P (temp)
3175 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3176 temp = copy_to_reg (temp);
3177
3178 /* If TEMP is a VOIDmode constant, use convert_modes to make
3179 sure that we properly convert it. */
3180 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3181 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3182 TYPE_MODE (TREE_TYPE (exp)), temp,
3183 SUBREG_PROMOTED_UNSIGNED_P (target));
3184
3185 convert_move (SUBREG_REG (target), temp,
3186 SUBREG_PROMOTED_UNSIGNED_P (target));
3187 return want_value ? temp : NULL_RTX;
3188 }
3189 else
3190 {
3191 temp = expand_expr (exp, target, GET_MODE (target), 0);
3192 /* Return TARGET if it's a specified hardware register.
3193 If TARGET is a volatile mem ref, either return TARGET
3194 or return a reg copied *from* TARGET; ANSI requires this.
3195
3196 Otherwise, if TEMP is not TARGET, return TEMP
3197 if it is constant (for efficiency),
3198 or if we really want the correct value. */
3199 if (!(target && GET_CODE (target) == REG
3200 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3201 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3202 && ! rtx_equal_p (temp, target)
3203 && (CONSTANT_P (temp) || want_value))
3204 dont_return_target = 1;
3205 }
3206
3207 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3208 the same as that of TARGET, adjust the constant. This is needed, for
3209 example, in case it is a CONST_DOUBLE and we want only a word-sized
3210 value. */
3211 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3212 && TREE_CODE (exp) != ERROR_MARK
3213 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3214 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3215 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3216
3217 /* If value was not generated in the target, store it there.
3218 Convert the value to TARGET's type first if nec. */
3219
3220 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3221 {
3222 target = protect_from_queue (target, 1);
3223 if (GET_MODE (temp) != GET_MODE (target)
3224 && GET_MODE (temp) != VOIDmode)
3225 {
3226 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3227 if (dont_return_target)
3228 {
3229 /* In this case, we will return TEMP,
3230 so make sure it has the proper mode.
3231 But don't forget to store the value into TARGET. */
3232 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3233 emit_move_insn (target, temp);
3234 }
3235 else
3236 convert_move (target, temp, unsignedp);
3237 }
3238
3239 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3240 {
3241 /* Handle copying a string constant into an array.
3242 The string constant may be shorter than the array.
3243 So copy just the string's actual length, and clear the rest. */
3244 rtx size;
3245 rtx addr;
3246
3247 /* Get the size of the data type of the string,
3248 which is actually the size of the target. */
3249 size = expr_size (exp);
3250 if (GET_CODE (size) == CONST_INT
3251 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3252 emit_block_move (target, temp, size,
3253 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3254 else
3255 {
3256 /* Compute the size of the data to copy from the string. */
3257 tree copy_size
3258 = size_binop (MIN_EXPR,
3259 make_tree (sizetype, size),
3260 convert (sizetype,
3261 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3262 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3263 VOIDmode, 0);
3264 rtx label = 0;
3265
3266 /* Copy that much. */
3267 emit_block_move (target, temp, copy_size_rtx,
3268 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3269
3270 /* Figure out how much is left in TARGET that we have to clear.
3271 Do all calculations in ptr_mode. */
3272
3273 addr = XEXP (target, 0);
3274 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3275
3276 if (GET_CODE (copy_size_rtx) == CONST_INT)
3277 {
3278 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3279 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3280 }
3281 else
3282 {
3283 addr = force_reg (ptr_mode, addr);
3284 addr = expand_binop (ptr_mode, add_optab, addr,
3285 copy_size_rtx, NULL_RTX, 0,
3286 OPTAB_LIB_WIDEN);
3287
3288 size = expand_binop (ptr_mode, sub_optab, size,
3289 copy_size_rtx, NULL_RTX, 0,
3290 OPTAB_LIB_WIDEN);
3291
3292 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3293 GET_MODE (size), 0, 0);
3294 label = gen_label_rtx ();
3295 emit_jump_insn (gen_blt (label));
3296 }
3297
3298 if (size != const0_rtx)
3299 {
3300 #ifdef TARGET_MEM_FUNCTIONS
3301 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3302 addr, ptr_mode,
3303 const0_rtx, TYPE_MODE (integer_type_node),
3304 convert_to_mode (TYPE_MODE (sizetype),
3305 size,
3306 TREE_UNSIGNED (sizetype)),
3307 TYPE_MODE (sizetype));
3308 #else
3309 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3310 addr, ptr_mode,
3311 convert_to_mode (TYPE_MODE (integer_type_node),
3312 size,
3313 TREE_UNSIGNED (integer_type_node)),
3314 TYPE_MODE (integer_type_node));
3315 #endif
3316 }
3317
3318 if (label)
3319 emit_label (label);
3320 }
3321 }
3322 /* Handle calls that return values in multiple non-contiguous locations.
3323 The Irix 6 ABI has examples of this. */
3324 else if (GET_CODE (target) == PARALLEL)
3325 emit_group_load (target, temp);
3326 else if (GET_MODE (temp) == BLKmode)
3327 emit_block_move (target, temp, expr_size (exp),
3328 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3329 else
3330 emit_move_insn (target, temp);
3331 }
3332
3333 /* If we don't want a value, return NULL_RTX. */
3334 if (! want_value)
3335 return NULL_RTX;
3336
3337 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3338 ??? The latter test doesn't seem to make sense. */
3339 else if (dont_return_target && GET_CODE (temp) != MEM)
3340 return temp;
3341
3342 /* Return TARGET itself if it is a hard register. */
3343 else if (want_value && GET_MODE (target) != BLKmode
3344 && ! (GET_CODE (target) == REG
3345 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3346 return copy_to_reg (target);
3347
3348 else
3349 return target;
3350 }
3351 \f
3352 /* Return 1 if EXP just contains zeros. */
3353
3354 static int
3355 is_zeros_p (exp)
3356 tree exp;
3357 {
3358 tree elt;
3359
3360 switch (TREE_CODE (exp))
3361 {
3362 case CONVERT_EXPR:
3363 case NOP_EXPR:
3364 case NON_LVALUE_EXPR:
3365 return is_zeros_p (TREE_OPERAND (exp, 0));
3366
3367 case INTEGER_CST:
3368 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3369
3370 case COMPLEX_CST:
3371 return
3372 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3373
3374 case REAL_CST:
3375 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3376
3377 case CONSTRUCTOR:
3378 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3379 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3380 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3381 if (! is_zeros_p (TREE_VALUE (elt)))
3382 return 0;
3383
3384 return 1;
3385 }
3386
3387 return 0;
3388 }
3389
3390 /* Return 1 if EXP contains mostly (3/4) zeros. */
3391
3392 static int
3393 mostly_zeros_p (exp)
3394 tree exp;
3395 {
3396 if (TREE_CODE (exp) == CONSTRUCTOR)
3397 {
3398 int elts = 0, zeros = 0;
3399 tree elt = CONSTRUCTOR_ELTS (exp);
3400 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3401 {
3402 /* If there are no ranges of true bits, it is all zero. */
3403 return elt == NULL_TREE;
3404 }
3405 for (; elt; elt = TREE_CHAIN (elt))
3406 {
3407 /* We do not handle the case where the index is a RANGE_EXPR,
3408 so the statistic will be somewhat inaccurate.
3409 We do make a more accurate count in store_constructor itself,
3410 so since this function is only used for nested array elements,
3411 this should be close enough. */
3412 if (mostly_zeros_p (TREE_VALUE (elt)))
3413 zeros++;
3414 elts++;
3415 }
3416
3417 return 4 * zeros >= 3 * elts;
3418 }
3419
3420 return is_zeros_p (exp);
3421 }
3422 \f
3423 /* Helper function for store_constructor.
3424 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3425 TYPE is the type of the CONSTRUCTOR, not the element type.
3426 CLEARED is as for store_constructor.
3427
3428 This provides a recursive shortcut back to store_constructor when it isn't
3429 necessary to go through store_field. This is so that we can pass through
3430 the cleared field to let store_constructor know that we may not have to
3431 clear a substructure if the outer structure has already been cleared. */
3432
3433 static void
3434 store_constructor_field (target, bitsize, bitpos,
3435 mode, exp, type, cleared)
3436 rtx target;
3437 int bitsize, bitpos;
3438 enum machine_mode mode;
3439 tree exp, type;
3440 int cleared;
3441 {
3442 if (TREE_CODE (exp) == CONSTRUCTOR
3443 && bitpos % BITS_PER_UNIT == 0
3444 /* If we have a non-zero bitpos for a register target, then we just
3445 let store_field do the bitfield handling. This is unlikely to
3446 generate unnecessary clear instructions anyways. */
3447 && (bitpos == 0 || GET_CODE (target) == MEM))
3448 {
3449 if (bitpos != 0)
3450 target = change_address (target, VOIDmode,
3451 plus_constant (XEXP (target, 0),
3452 bitpos / BITS_PER_UNIT));
3453 store_constructor (exp, target, cleared);
3454 }
3455 else
3456 store_field (target, bitsize, bitpos, mode, exp,
3457 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3458 int_size_in_bytes (type));
3459 }
3460
3461 /* Store the value of constructor EXP into the rtx TARGET.
3462 TARGET is either a REG or a MEM.
3463 CLEARED is true if TARGET is known to have been zero'd. */
3464
3465 static void
3466 store_constructor (exp, target, cleared)
3467 tree exp;
3468 rtx target;
3469 int cleared;
3470 {
3471 tree type = TREE_TYPE (exp);
3472
3473 /* We know our target cannot conflict, since safe_from_p has been called. */
3474 #if 0
3475 /* Don't try copying piece by piece into a hard register
3476 since that is vulnerable to being clobbered by EXP.
3477 Instead, construct in a pseudo register and then copy it all. */
3478 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3479 {
3480 rtx temp = gen_reg_rtx (GET_MODE (target));
3481 store_constructor (exp, temp, 0);
3482 emit_move_insn (target, temp);
3483 return;
3484 }
3485 #endif
3486
3487 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3488 || TREE_CODE (type) == QUAL_UNION_TYPE)
3489 {
3490 register tree elt;
3491
3492 /* Inform later passes that the whole union value is dead. */
3493 if (TREE_CODE (type) == UNION_TYPE
3494 || TREE_CODE (type) == QUAL_UNION_TYPE)
3495 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3496
3497 /* If we are building a static constructor into a register,
3498 set the initial value as zero so we can fold the value into
3499 a constant. But if more than one register is involved,
3500 this probably loses. */
3501 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3502 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3503 {
3504 if (! cleared)
3505 emit_move_insn (target, const0_rtx);
3506
3507 cleared = 1;
3508 }
3509
3510 /* If the constructor has fewer fields than the structure
3511 or if we are initializing the structure to mostly zeros,
3512 clear the whole structure first. */
3513 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3514 != list_length (TYPE_FIELDS (type)))
3515 || mostly_zeros_p (exp))
3516 {
3517 if (! cleared)
3518 clear_storage (target, expr_size (exp),
3519 TYPE_ALIGN (type) / BITS_PER_UNIT);
3520
3521 cleared = 1;
3522 }
3523 else
3524 /* Inform later passes that the old value is dead. */
3525 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3526
3527 /* Store each element of the constructor into
3528 the corresponding field of TARGET. */
3529
3530 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3531 {
3532 register tree field = TREE_PURPOSE (elt);
3533 register enum machine_mode mode;
3534 int bitsize;
3535 int bitpos = 0;
3536 int unsignedp;
3537 tree pos, constant = 0, offset = 0;
3538 rtx to_rtx = target;
3539
3540 /* Just ignore missing fields.
3541 We cleared the whole structure, above,
3542 if any fields are missing. */
3543 if (field == 0)
3544 continue;
3545
3546 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3547 continue;
3548
3549 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3550 unsignedp = TREE_UNSIGNED (field);
3551 mode = DECL_MODE (field);
3552 if (DECL_BIT_FIELD (field))
3553 mode = VOIDmode;
3554
3555 pos = DECL_FIELD_BITPOS (field);
3556 if (TREE_CODE (pos) == INTEGER_CST)
3557 constant = pos;
3558 else if (TREE_CODE (pos) == PLUS_EXPR
3559 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3560 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3561 else
3562 offset = pos;
3563
3564 if (constant)
3565 bitpos = TREE_INT_CST_LOW (constant);
3566
3567 if (offset)
3568 {
3569 rtx offset_rtx;
3570
3571 if (contains_placeholder_p (offset))
3572 offset = build (WITH_RECORD_EXPR, sizetype,
3573 offset, exp);
3574
3575 offset = size_binop (FLOOR_DIV_EXPR, offset,
3576 size_int (BITS_PER_UNIT));
3577
3578 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3579 if (GET_CODE (to_rtx) != MEM)
3580 abort ();
3581
3582 to_rtx
3583 = change_address (to_rtx, VOIDmode,
3584 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3585 force_reg (ptr_mode, offset_rtx)));
3586 }
3587 if (TREE_READONLY (field))
3588 {
3589 if (GET_CODE (to_rtx) == MEM)
3590 to_rtx = copy_rtx (to_rtx);
3591
3592 RTX_UNCHANGING_P (to_rtx) = 1;
3593 }
3594
3595 store_constructor_field (to_rtx, bitsize, bitpos,
3596 mode, TREE_VALUE (elt), type, cleared);
3597 }
3598 }
3599 else if (TREE_CODE (type) == ARRAY_TYPE)
3600 {
3601 register tree elt;
3602 register int i;
3603 int need_to_clear;
3604 tree domain = TYPE_DOMAIN (type);
3605 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3606 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3607 tree elttype = TREE_TYPE (type);
3608
3609 /* If the constructor has fewer elements than the array,
3610 clear the whole array first. Similarly if this this is
3611 static constructor of a non-BLKmode object. */
3612 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3613 need_to_clear = 1;
3614 else
3615 {
3616 HOST_WIDE_INT count = 0, zero_count = 0;
3617 need_to_clear = 0;
3618 /* This loop is a more accurate version of the loop in
3619 mostly_zeros_p (it handles RANGE_EXPR in an index).
3620 It is also needed to check for missing elements. */
3621 for (elt = CONSTRUCTOR_ELTS (exp);
3622 elt != NULL_TREE;
3623 elt = TREE_CHAIN (elt))
3624 {
3625 tree index = TREE_PURPOSE (elt);
3626 HOST_WIDE_INT this_node_count;
3627 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3628 {
3629 tree lo_index = TREE_OPERAND (index, 0);
3630 tree hi_index = TREE_OPERAND (index, 1);
3631 if (TREE_CODE (lo_index) != INTEGER_CST
3632 || TREE_CODE (hi_index) != INTEGER_CST)
3633 {
3634 need_to_clear = 1;
3635 break;
3636 }
3637 this_node_count = TREE_INT_CST_LOW (hi_index)
3638 - TREE_INT_CST_LOW (lo_index) + 1;
3639 }
3640 else
3641 this_node_count = 1;
3642 count += this_node_count;
3643 if (mostly_zeros_p (TREE_VALUE (elt)))
3644 zero_count += this_node_count;
3645 }
3646 /* Clear the entire array first if there are any missing elements,
3647 or if the incidence of zero elements is >= 75%. */
3648 if (count < maxelt - minelt + 1
3649 || 4 * zero_count >= 3 * count)
3650 need_to_clear = 1;
3651 }
3652 if (need_to_clear)
3653 {
3654 if (! cleared)
3655 clear_storage (target, expr_size (exp),
3656 TYPE_ALIGN (type) / BITS_PER_UNIT);
3657 cleared = 1;
3658 }
3659 else
3660 /* Inform later passes that the old value is dead. */
3661 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3662
3663 /* Store each element of the constructor into
3664 the corresponding element of TARGET, determined
3665 by counting the elements. */
3666 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3667 elt;
3668 elt = TREE_CHAIN (elt), i++)
3669 {
3670 register enum machine_mode mode;
3671 int bitsize;
3672 int bitpos;
3673 int unsignedp;
3674 tree value = TREE_VALUE (elt);
3675 tree index = TREE_PURPOSE (elt);
3676 rtx xtarget = target;
3677
3678 if (cleared && is_zeros_p (value))
3679 continue;
3680
3681 mode = TYPE_MODE (elttype);
3682 bitsize = GET_MODE_BITSIZE (mode);
3683 unsignedp = TREE_UNSIGNED (elttype);
3684
3685 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3686 {
3687 tree lo_index = TREE_OPERAND (index, 0);
3688 tree hi_index = TREE_OPERAND (index, 1);
3689 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3690 struct nesting *loop;
3691 HOST_WIDE_INT lo, hi, count;
3692 tree position;
3693
3694 /* If the range is constant and "small", unroll the loop. */
3695 if (TREE_CODE (lo_index) == INTEGER_CST
3696 && TREE_CODE (hi_index) == INTEGER_CST
3697 && (lo = TREE_INT_CST_LOW (lo_index),
3698 hi = TREE_INT_CST_LOW (hi_index),
3699 count = hi - lo + 1,
3700 (GET_CODE (target) != MEM
3701 || count <= 2
3702 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3703 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3704 <= 40 * 8))))
3705 {
3706 lo -= minelt; hi -= minelt;
3707 for (; lo <= hi; lo++)
3708 {
3709 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3710 store_constructor_field (target, bitsize, bitpos,
3711 mode, value, type, cleared);
3712 }
3713 }
3714 else
3715 {
3716 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3717 loop_top = gen_label_rtx ();
3718 loop_end = gen_label_rtx ();
3719
3720 unsignedp = TREE_UNSIGNED (domain);
3721
3722 index = build_decl (VAR_DECL, NULL_TREE, domain);
3723
3724 DECL_RTL (index) = index_r
3725 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3726 &unsignedp, 0));
3727
3728 if (TREE_CODE (value) == SAVE_EXPR
3729 && SAVE_EXPR_RTL (value) == 0)
3730 {
3731 /* Make sure value gets expanded once before the
3732 loop. */
3733 expand_expr (value, const0_rtx, VOIDmode, 0);
3734 emit_queue ();
3735 }
3736 store_expr (lo_index, index_r, 0);
3737 loop = expand_start_loop (0);
3738
3739 /* Assign value to element index. */
3740 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3741 size_int (BITS_PER_UNIT));
3742 position = size_binop (MULT_EXPR,
3743 size_binop (MINUS_EXPR, index,
3744 TYPE_MIN_VALUE (domain)),
3745 position);
3746 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3747 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3748 xtarget = change_address (target, mode, addr);
3749 if (TREE_CODE (value) == CONSTRUCTOR)
3750 store_constructor (value, xtarget, cleared);
3751 else
3752 store_expr (value, xtarget, 0);
3753
3754 expand_exit_loop_if_false (loop,
3755 build (LT_EXPR, integer_type_node,
3756 index, hi_index));
3757
3758 expand_increment (build (PREINCREMENT_EXPR,
3759 TREE_TYPE (index),
3760 index, integer_one_node), 0, 0);
3761 expand_end_loop ();
3762 emit_label (loop_end);
3763
3764 /* Needed by stupid register allocation. to extend the
3765 lifetime of pseudo-regs used by target past the end
3766 of the loop. */
3767 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3768 }
3769 }
3770 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3771 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3772 {
3773 rtx pos_rtx, addr;
3774 tree position;
3775
3776 if (index == 0)
3777 index = size_int (i);
3778
3779 if (minelt)
3780 index = size_binop (MINUS_EXPR, index,
3781 TYPE_MIN_VALUE (domain));
3782 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3783 size_int (BITS_PER_UNIT));
3784 position = size_binop (MULT_EXPR, index, position);
3785 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3786 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3787 xtarget = change_address (target, mode, addr);
3788 store_expr (value, xtarget, 0);
3789 }
3790 else
3791 {
3792 if (index != 0)
3793 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3794 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3795 else
3796 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3797 store_constructor_field (target, bitsize, bitpos,
3798 mode, value, type, cleared);
3799 }
3800 }
3801 }
3802 /* set constructor assignments */
3803 else if (TREE_CODE (type) == SET_TYPE)
3804 {
3805 tree elt = CONSTRUCTOR_ELTS (exp);
3806 rtx xtarget = XEXP (target, 0);
3807 int set_word_size = TYPE_ALIGN (type);
3808 int nbytes = int_size_in_bytes (type), nbits;
3809 tree domain = TYPE_DOMAIN (type);
3810 tree domain_min, domain_max, bitlength;
3811
3812 /* The default implementation strategy is to extract the constant
3813 parts of the constructor, use that to initialize the target,
3814 and then "or" in whatever non-constant ranges we need in addition.
3815
3816 If a large set is all zero or all ones, it is
3817 probably better to set it using memset (if available) or bzero.
3818 Also, if a large set has just a single range, it may also be
3819 better to first clear all the first clear the set (using
3820 bzero/memset), and set the bits we want. */
3821
3822 /* Check for all zeros. */
3823 if (elt == NULL_TREE)
3824 {
3825 if (!cleared)
3826 clear_storage (target, expr_size (exp),
3827 TYPE_ALIGN (type) / BITS_PER_UNIT);
3828 return;
3829 }
3830
3831 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3832 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3833 bitlength = size_binop (PLUS_EXPR,
3834 size_binop (MINUS_EXPR, domain_max, domain_min),
3835 size_one_node);
3836
3837 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3838 abort ();
3839 nbits = TREE_INT_CST_LOW (bitlength);
3840
3841 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3842 are "complicated" (more than one range), initialize (the
3843 constant parts) by copying from a constant. */
3844 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3845 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3846 {
3847 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3848 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3849 char *bit_buffer = (char *) alloca (nbits);
3850 HOST_WIDE_INT word = 0;
3851 int bit_pos = 0;
3852 int ibit = 0;
3853 int offset = 0; /* In bytes from beginning of set. */
3854 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3855 for (;;)
3856 {
3857 if (bit_buffer[ibit])
3858 {
3859 if (BYTES_BIG_ENDIAN)
3860 word |= (1 << (set_word_size - 1 - bit_pos));
3861 else
3862 word |= 1 << bit_pos;
3863 }
3864 bit_pos++; ibit++;
3865 if (bit_pos >= set_word_size || ibit == nbits)
3866 {
3867 if (word != 0 || ! cleared)
3868 {
3869 rtx datum = GEN_INT (word);
3870 rtx to_rtx;
3871 /* The assumption here is that it is safe to use
3872 XEXP if the set is multi-word, but not if
3873 it's single-word. */
3874 if (GET_CODE (target) == MEM)
3875 {
3876 to_rtx = plus_constant (XEXP (target, 0), offset);
3877 to_rtx = change_address (target, mode, to_rtx);
3878 }
3879 else if (offset == 0)
3880 to_rtx = target;
3881 else
3882 abort ();
3883 emit_move_insn (to_rtx, datum);
3884 }
3885 if (ibit == nbits)
3886 break;
3887 word = 0;
3888 bit_pos = 0;
3889 offset += set_word_size / BITS_PER_UNIT;
3890 }
3891 }
3892 }
3893 else if (!cleared)
3894 {
3895 /* Don't bother clearing storage if the set is all ones. */
3896 if (TREE_CHAIN (elt) != NULL_TREE
3897 || (TREE_PURPOSE (elt) == NULL_TREE
3898 ? nbits != 1
3899 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3900 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3901 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3902 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3903 != nbits))))
3904 clear_storage (target, expr_size (exp),
3905 TYPE_ALIGN (type) / BITS_PER_UNIT);
3906 }
3907
3908 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3909 {
3910 /* start of range of element or NULL */
3911 tree startbit = TREE_PURPOSE (elt);
3912 /* end of range of element, or element value */
3913 tree endbit = TREE_VALUE (elt);
3914 HOST_WIDE_INT startb, endb;
3915 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3916
3917 bitlength_rtx = expand_expr (bitlength,
3918 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3919
3920 /* handle non-range tuple element like [ expr ] */
3921 if (startbit == NULL_TREE)
3922 {
3923 startbit = save_expr (endbit);
3924 endbit = startbit;
3925 }
3926 startbit = convert (sizetype, startbit);
3927 endbit = convert (sizetype, endbit);
3928 if (! integer_zerop (domain_min))
3929 {
3930 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3931 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3932 }
3933 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3934 EXPAND_CONST_ADDRESS);
3935 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3936 EXPAND_CONST_ADDRESS);
3937
3938 if (REG_P (target))
3939 {
3940 targetx = assign_stack_temp (GET_MODE (target),
3941 GET_MODE_SIZE (GET_MODE (target)),
3942 0);
3943 emit_move_insn (targetx, target);
3944 }
3945 else if (GET_CODE (target) == MEM)
3946 targetx = target;
3947 else
3948 abort ();
3949
3950 #ifdef TARGET_MEM_FUNCTIONS
3951 /* Optimization: If startbit and endbit are
3952 constants divisible by BITS_PER_UNIT,
3953 call memset instead. */
3954 if (TREE_CODE (startbit) == INTEGER_CST
3955 && TREE_CODE (endbit) == INTEGER_CST
3956 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3957 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3958 {
3959 emit_library_call (memset_libfunc, 0,
3960 VOIDmode, 3,
3961 plus_constant (XEXP (targetx, 0),
3962 startb / BITS_PER_UNIT),
3963 Pmode,
3964 constm1_rtx, TYPE_MODE (integer_type_node),
3965 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3966 TYPE_MODE (sizetype));
3967 }
3968 else
3969 #endif
3970 {
3971 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3972 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3973 bitlength_rtx, TYPE_MODE (sizetype),
3974 startbit_rtx, TYPE_MODE (sizetype),
3975 endbit_rtx, TYPE_MODE (sizetype));
3976 }
3977 if (REG_P (target))
3978 emit_move_insn (target, targetx);
3979 }
3980 }
3981
3982 else
3983 abort ();
3984 }
3985
3986 /* Store the value of EXP (an expression tree)
3987 into a subfield of TARGET which has mode MODE and occupies
3988 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3989 If MODE is VOIDmode, it means that we are storing into a bit-field.
3990
3991 If VALUE_MODE is VOIDmode, return nothing in particular.
3992 UNSIGNEDP is not used in this case.
3993
3994 Otherwise, return an rtx for the value stored. This rtx
3995 has mode VALUE_MODE if that is convenient to do.
3996 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3997
3998 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3999 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4000
4001 static rtx
4002 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4003 unsignedp, align, total_size)
4004 rtx target;
4005 int bitsize, bitpos;
4006 enum machine_mode mode;
4007 tree exp;
4008 enum machine_mode value_mode;
4009 int unsignedp;
4010 int align;
4011 int total_size;
4012 {
4013 HOST_WIDE_INT width_mask = 0;
4014
4015 if (bitsize < HOST_BITS_PER_WIDE_INT)
4016 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4017
4018 /* If we are storing into an unaligned field of an aligned union that is
4019 in a register, we may have the mode of TARGET being an integer mode but
4020 MODE == BLKmode. In that case, get an aligned object whose size and
4021 alignment are the same as TARGET and store TARGET into it (we can avoid
4022 the store if the field being stored is the entire width of TARGET). Then
4023 call ourselves recursively to store the field into a BLKmode version of
4024 that object. Finally, load from the object into TARGET. This is not
4025 very efficient in general, but should only be slightly more expensive
4026 than the otherwise-required unaligned accesses. Perhaps this can be
4027 cleaned up later. */
4028
4029 if (mode == BLKmode
4030 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4031 {
4032 rtx object = assign_stack_temp (GET_MODE (target),
4033 GET_MODE_SIZE (GET_MODE (target)), 0);
4034 rtx blk_object = copy_rtx (object);
4035
4036 MEM_IN_STRUCT_P (object) = 1;
4037 MEM_IN_STRUCT_P (blk_object) = 1;
4038 PUT_MODE (blk_object, BLKmode);
4039
4040 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4041 emit_move_insn (object, target);
4042
4043 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4044 align, total_size);
4045
4046 /* Even though we aren't returning target, we need to
4047 give it the updated value. */
4048 emit_move_insn (target, object);
4049
4050 return blk_object;
4051 }
4052
4053 /* If the structure is in a register or if the component
4054 is a bit field, we cannot use addressing to access it.
4055 Use bit-field techniques or SUBREG to store in it. */
4056
4057 if (mode == VOIDmode
4058 || (mode != BLKmode && ! direct_store[(int) mode])
4059 || GET_CODE (target) == REG
4060 || GET_CODE (target) == SUBREG
4061 /* If the field isn't aligned enough to store as an ordinary memref,
4062 store it as a bit field. */
4063 || (SLOW_UNALIGNED_ACCESS
4064 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4065 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4066 {
4067 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4068
4069 /* If BITSIZE is narrower than the size of the type of EXP
4070 we will be narrowing TEMP. Normally, what's wanted are the
4071 low-order bits. However, if EXP's type is a record and this is
4072 big-endian machine, we want the upper BITSIZE bits. */
4073 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4074 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4075 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4076 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4077 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4078 - bitsize),
4079 temp, 1);
4080
4081 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4082 MODE. */
4083 if (mode != VOIDmode && mode != BLKmode
4084 && mode != TYPE_MODE (TREE_TYPE (exp)))
4085 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4086
4087 /* If the modes of TARGET and TEMP are both BLKmode, both
4088 must be in memory and BITPOS must be aligned on a byte
4089 boundary. If so, we simply do a block copy. */
4090 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4091 {
4092 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4093 || bitpos % BITS_PER_UNIT != 0)
4094 abort ();
4095
4096 target = change_address (target, VOIDmode,
4097 plus_constant (XEXP (target, 0),
4098 bitpos / BITS_PER_UNIT));
4099
4100 emit_block_move (target, temp,
4101 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4102 / BITS_PER_UNIT),
4103 1);
4104
4105 return value_mode == VOIDmode ? const0_rtx : target;
4106 }
4107
4108 /* Store the value in the bitfield. */
4109 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4110 if (value_mode != VOIDmode)
4111 {
4112 /* The caller wants an rtx for the value. */
4113 /* If possible, avoid refetching from the bitfield itself. */
4114 if (width_mask != 0
4115 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4116 {
4117 tree count;
4118 enum machine_mode tmode;
4119
4120 if (unsignedp)
4121 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4122 tmode = GET_MODE (temp);
4123 if (tmode == VOIDmode)
4124 tmode = value_mode;
4125 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4126 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4127 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4128 }
4129 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4130 NULL_RTX, value_mode, 0, align,
4131 total_size);
4132 }
4133 return const0_rtx;
4134 }
4135 else
4136 {
4137 rtx addr = XEXP (target, 0);
4138 rtx to_rtx;
4139
4140 /* If a value is wanted, it must be the lhs;
4141 so make the address stable for multiple use. */
4142
4143 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4144 && ! CONSTANT_ADDRESS_P (addr)
4145 /* A frame-pointer reference is already stable. */
4146 && ! (GET_CODE (addr) == PLUS
4147 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4148 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4149 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4150 addr = copy_to_reg (addr);
4151
4152 /* Now build a reference to just the desired component. */
4153
4154 to_rtx = copy_rtx (change_address (target, mode,
4155 plus_constant (addr,
4156 (bitpos
4157 / BITS_PER_UNIT))));
4158 MEM_IN_STRUCT_P (to_rtx) = 1;
4159
4160 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4161 }
4162 }
4163 \f
4164 /* Return true if any object containing the innermost array is an unaligned
4165 packed structure field. */
4166
4167 static int
4168 get_inner_unaligned_p (exp)
4169 tree exp;
4170 {
4171 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4172
4173 while (1)
4174 {
4175 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4176 {
4177 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4178 < needed_alignment)
4179 return 1;
4180 }
4181 else if (TREE_CODE (exp) != ARRAY_REF
4182 && TREE_CODE (exp) != NON_LVALUE_EXPR
4183 && ! ((TREE_CODE (exp) == NOP_EXPR
4184 || TREE_CODE (exp) == CONVERT_EXPR)
4185 && (TYPE_MODE (TREE_TYPE (exp))
4186 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4187 break;
4188
4189 exp = TREE_OPERAND (exp, 0);
4190 }
4191
4192 return 0;
4193 }
4194
4195 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4196 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4197 ARRAY_REFs and find the ultimate containing object, which we return.
4198
4199 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4200 bit position, and *PUNSIGNEDP to the signedness of the field.
4201 If the position of the field is variable, we store a tree
4202 giving the variable offset (in units) in *POFFSET.
4203 This offset is in addition to the bit position.
4204 If the position is not variable, we store 0 in *POFFSET.
4205 We set *PALIGNMENT to the alignment in bytes of the address that will be
4206 computed. This is the alignment of the thing we return if *POFFSET
4207 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4208
4209 If any of the extraction expressions is volatile,
4210 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4211
4212 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4213 is a mode that can be used to access the field. In that case, *PBITSIZE
4214 is redundant.
4215
4216 If the field describes a variable-sized object, *PMODE is set to
4217 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4218 this case, but the address of the object can be found. */
4219
4220 tree
4221 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4222 punsignedp, pvolatilep, palignment)
4223 tree exp;
4224 int *pbitsize;
4225 int *pbitpos;
4226 tree *poffset;
4227 enum machine_mode *pmode;
4228 int *punsignedp;
4229 int *pvolatilep;
4230 int *palignment;
4231 {
4232 tree orig_exp = exp;
4233 tree size_tree = 0;
4234 enum machine_mode mode = VOIDmode;
4235 tree offset = integer_zero_node;
4236 int alignment = BIGGEST_ALIGNMENT;
4237
4238 if (TREE_CODE (exp) == COMPONENT_REF)
4239 {
4240 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4241 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4242 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4243 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4244 }
4245 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4246 {
4247 size_tree = TREE_OPERAND (exp, 1);
4248 *punsignedp = TREE_UNSIGNED (exp);
4249 }
4250 else
4251 {
4252 mode = TYPE_MODE (TREE_TYPE (exp));
4253 *pbitsize = GET_MODE_BITSIZE (mode);
4254 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4255 }
4256
4257 if (size_tree)
4258 {
4259 if (TREE_CODE (size_tree) != INTEGER_CST)
4260 mode = BLKmode, *pbitsize = -1;
4261 else
4262 *pbitsize = TREE_INT_CST_LOW (size_tree);
4263 }
4264
4265 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4266 and find the ultimate containing object. */
4267
4268 *pbitpos = 0;
4269
4270 while (1)
4271 {
4272 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4273 {
4274 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4275 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4276 : TREE_OPERAND (exp, 2));
4277 tree constant = integer_zero_node, var = pos;
4278
4279 /* If this field hasn't been filled in yet, don't go
4280 past it. This should only happen when folding expressions
4281 made during type construction. */
4282 if (pos == 0)
4283 break;
4284
4285 /* Assume here that the offset is a multiple of a unit.
4286 If not, there should be an explicitly added constant. */
4287 if (TREE_CODE (pos) == PLUS_EXPR
4288 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4289 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4290 else if (TREE_CODE (pos) == INTEGER_CST)
4291 constant = pos, var = integer_zero_node;
4292
4293 *pbitpos += TREE_INT_CST_LOW (constant);
4294 offset = size_binop (PLUS_EXPR, offset,
4295 size_binop (EXACT_DIV_EXPR, var,
4296 size_int (BITS_PER_UNIT)));
4297 }
4298
4299 else if (TREE_CODE (exp) == ARRAY_REF)
4300 {
4301 /* This code is based on the code in case ARRAY_REF in expand_expr
4302 below. We assume here that the size of an array element is
4303 always an integral multiple of BITS_PER_UNIT. */
4304
4305 tree index = TREE_OPERAND (exp, 1);
4306 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4307 tree low_bound
4308 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4309 tree index_type = TREE_TYPE (index);
4310
4311 if (! integer_zerop (low_bound))
4312 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4313
4314 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4315 {
4316 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4317 index);
4318 index_type = TREE_TYPE (index);
4319 }
4320
4321 index = fold (build (MULT_EXPR, index_type, index,
4322 convert (index_type,
4323 TYPE_SIZE (TREE_TYPE (exp)))));
4324
4325 if (TREE_CODE (index) == INTEGER_CST
4326 && TREE_INT_CST_HIGH (index) == 0)
4327 *pbitpos += TREE_INT_CST_LOW (index);
4328 else
4329 offset = size_binop (PLUS_EXPR, offset,
4330 size_binop (FLOOR_DIV_EXPR, index,
4331 size_int (BITS_PER_UNIT)));
4332 }
4333 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4334 && ! ((TREE_CODE (exp) == NOP_EXPR
4335 || TREE_CODE (exp) == CONVERT_EXPR)
4336 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4337 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4338 != UNION_TYPE))
4339 && (TYPE_MODE (TREE_TYPE (exp))
4340 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4341 break;
4342
4343 /* If any reference in the chain is volatile, the effect is volatile. */
4344 if (TREE_THIS_VOLATILE (exp))
4345 *pvolatilep = 1;
4346
4347 /* If the offset is non-constant already, then we can't assume any
4348 alignment more than the alignment here. */
4349 if (! integer_zerop (offset))
4350 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4351
4352 exp = TREE_OPERAND (exp, 0);
4353 }
4354
4355 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4356 alignment = MIN (alignment, DECL_ALIGN (exp));
4357 else if (TREE_TYPE (exp) != 0)
4358 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4359
4360 if (integer_zerop (offset))
4361 offset = 0;
4362
4363 if (offset != 0 && contains_placeholder_p (offset))
4364 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4365
4366 *pmode = mode;
4367 *poffset = offset;
4368 *palignment = alignment / BITS_PER_UNIT;
4369 return exp;
4370 }
4371 \f
4372 /* Given an rtx VALUE that may contain additions and multiplications,
4373 return an equivalent value that just refers to a register or memory.
4374 This is done by generating instructions to perform the arithmetic
4375 and returning a pseudo-register containing the value.
4376
4377 The returned value may be a REG, SUBREG, MEM or constant. */
4378
4379 rtx
4380 force_operand (value, target)
4381 rtx value, target;
4382 {
4383 register optab binoptab = 0;
4384 /* Use a temporary to force order of execution of calls to
4385 `force_operand'. */
4386 rtx tmp;
4387 register rtx op2;
4388 /* Use subtarget as the target for operand 0 of a binary operation. */
4389 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4390
4391 if (GET_CODE (value) == PLUS)
4392 binoptab = add_optab;
4393 else if (GET_CODE (value) == MINUS)
4394 binoptab = sub_optab;
4395 else if (GET_CODE (value) == MULT)
4396 {
4397 op2 = XEXP (value, 1);
4398 if (!CONSTANT_P (op2)
4399 && !(GET_CODE (op2) == REG && op2 != subtarget))
4400 subtarget = 0;
4401 tmp = force_operand (XEXP (value, 0), subtarget);
4402 return expand_mult (GET_MODE (value), tmp,
4403 force_operand (op2, NULL_RTX),
4404 target, 0);
4405 }
4406
4407 if (binoptab)
4408 {
4409 op2 = XEXP (value, 1);
4410 if (!CONSTANT_P (op2)
4411 && !(GET_CODE (op2) == REG && op2 != subtarget))
4412 subtarget = 0;
4413 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4414 {
4415 binoptab = add_optab;
4416 op2 = negate_rtx (GET_MODE (value), op2);
4417 }
4418
4419 /* Check for an addition with OP2 a constant integer and our first
4420 operand a PLUS of a virtual register and something else. In that
4421 case, we want to emit the sum of the virtual register and the
4422 constant first and then add the other value. This allows virtual
4423 register instantiation to simply modify the constant rather than
4424 creating another one around this addition. */
4425 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4426 && GET_CODE (XEXP (value, 0)) == PLUS
4427 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4428 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4429 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4430 {
4431 rtx temp = expand_binop (GET_MODE (value), binoptab,
4432 XEXP (XEXP (value, 0), 0), op2,
4433 subtarget, 0, OPTAB_LIB_WIDEN);
4434 return expand_binop (GET_MODE (value), binoptab, temp,
4435 force_operand (XEXP (XEXP (value, 0), 1), 0),
4436 target, 0, OPTAB_LIB_WIDEN);
4437 }
4438
4439 tmp = force_operand (XEXP (value, 0), subtarget);
4440 return expand_binop (GET_MODE (value), binoptab, tmp,
4441 force_operand (op2, NULL_RTX),
4442 target, 0, OPTAB_LIB_WIDEN);
4443 /* We give UNSIGNEDP = 0 to expand_binop
4444 because the only operations we are expanding here are signed ones. */
4445 }
4446 return value;
4447 }
4448 \f
4449 /* Subroutine of expand_expr:
4450 save the non-copied parts (LIST) of an expr (LHS), and return a list
4451 which can restore these values to their previous values,
4452 should something modify their storage. */
4453
4454 static tree
4455 save_noncopied_parts (lhs, list)
4456 tree lhs;
4457 tree list;
4458 {
4459 tree tail;
4460 tree parts = 0;
4461
4462 for (tail = list; tail; tail = TREE_CHAIN (tail))
4463 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4464 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4465 else
4466 {
4467 tree part = TREE_VALUE (tail);
4468 tree part_type = TREE_TYPE (part);
4469 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4470 rtx target = assign_temp (part_type, 0, 1, 1);
4471 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4472 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4473 parts = tree_cons (to_be_saved,
4474 build (RTL_EXPR, part_type, NULL_TREE,
4475 (tree) target),
4476 parts);
4477 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4478 }
4479 return parts;
4480 }
4481
4482 /* Subroutine of expand_expr:
4483 record the non-copied parts (LIST) of an expr (LHS), and return a list
4484 which specifies the initial values of these parts. */
4485
4486 static tree
4487 init_noncopied_parts (lhs, list)
4488 tree lhs;
4489 tree list;
4490 {
4491 tree tail;
4492 tree parts = 0;
4493
4494 for (tail = list; tail; tail = TREE_CHAIN (tail))
4495 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4496 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4497 else
4498 {
4499 tree part = TREE_VALUE (tail);
4500 tree part_type = TREE_TYPE (part);
4501 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4502 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4503 }
4504 return parts;
4505 }
4506
4507 /* Subroutine of expand_expr: return nonzero iff there is no way that
4508 EXP can reference X, which is being modified. */
4509
4510 static int
4511 safe_from_p (x, exp)
4512 rtx x;
4513 tree exp;
4514 {
4515 rtx exp_rtl = 0;
4516 int i, nops;
4517
4518 if (x == 0
4519 /* If EXP has varying size, we MUST use a target since we currently
4520 have no way of allocating temporaries of variable size
4521 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4522 So we assume here that something at a higher level has prevented a
4523 clash. This is somewhat bogus, but the best we can do. Only
4524 do this when X is BLKmode. */
4525 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4526 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4527 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4528 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4529 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4530 != INTEGER_CST)
4531 && GET_MODE (x) == BLKmode))
4532 return 1;
4533
4534 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4535 find the underlying pseudo. */
4536 if (GET_CODE (x) == SUBREG)
4537 {
4538 x = SUBREG_REG (x);
4539 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4540 return 0;
4541 }
4542
4543 /* If X is a location in the outgoing argument area, it is always safe. */
4544 if (GET_CODE (x) == MEM
4545 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4546 || (GET_CODE (XEXP (x, 0)) == PLUS
4547 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4548 return 1;
4549
4550 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4551 {
4552 case 'd':
4553 exp_rtl = DECL_RTL (exp);
4554 break;
4555
4556 case 'c':
4557 return 1;
4558
4559 case 'x':
4560 if (TREE_CODE (exp) == TREE_LIST)
4561 return ((TREE_VALUE (exp) == 0
4562 || safe_from_p (x, TREE_VALUE (exp)))
4563 && (TREE_CHAIN (exp) == 0
4564 || safe_from_p (x, TREE_CHAIN (exp))));
4565 else
4566 return 0;
4567
4568 case '1':
4569 return safe_from_p (x, TREE_OPERAND (exp, 0));
4570
4571 case '2':
4572 case '<':
4573 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4574 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4575
4576 case 'e':
4577 case 'r':
4578 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4579 the expression. If it is set, we conflict iff we are that rtx or
4580 both are in memory. Otherwise, we check all operands of the
4581 expression recursively. */
4582
4583 switch (TREE_CODE (exp))
4584 {
4585 case ADDR_EXPR:
4586 return (staticp (TREE_OPERAND (exp, 0))
4587 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4588
4589 case INDIRECT_REF:
4590 if (GET_CODE (x) == MEM)
4591 return 0;
4592 break;
4593
4594 case CALL_EXPR:
4595 exp_rtl = CALL_EXPR_RTL (exp);
4596 if (exp_rtl == 0)
4597 {
4598 /* Assume that the call will clobber all hard registers and
4599 all of memory. */
4600 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4601 || GET_CODE (x) == MEM)
4602 return 0;
4603 }
4604
4605 break;
4606
4607 case RTL_EXPR:
4608 /* If a sequence exists, we would have to scan every instruction
4609 in the sequence to see if it was safe. This is probably not
4610 worthwhile. */
4611 if (RTL_EXPR_SEQUENCE (exp))
4612 return 0;
4613
4614 exp_rtl = RTL_EXPR_RTL (exp);
4615 break;
4616
4617 case WITH_CLEANUP_EXPR:
4618 exp_rtl = RTL_EXPR_RTL (exp);
4619 break;
4620
4621 case CLEANUP_POINT_EXPR:
4622 return safe_from_p (x, TREE_OPERAND (exp, 0));
4623
4624 case SAVE_EXPR:
4625 exp_rtl = SAVE_EXPR_RTL (exp);
4626 break;
4627
4628 case BIND_EXPR:
4629 /* The only operand we look at is operand 1. The rest aren't
4630 part of the expression. */
4631 return safe_from_p (x, TREE_OPERAND (exp, 1));
4632
4633 case METHOD_CALL_EXPR:
4634 /* This takes a rtx argument, but shouldn't appear here. */
4635 abort ();
4636 }
4637
4638 /* If we have an rtx, we do not need to scan our operands. */
4639 if (exp_rtl)
4640 break;
4641
4642 nops = tree_code_length[(int) TREE_CODE (exp)];
4643 for (i = 0; i < nops; i++)
4644 if (TREE_OPERAND (exp, i) != 0
4645 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4646 return 0;
4647 }
4648
4649 /* If we have an rtl, find any enclosed object. Then see if we conflict
4650 with it. */
4651 if (exp_rtl)
4652 {
4653 if (GET_CODE (exp_rtl) == SUBREG)
4654 {
4655 exp_rtl = SUBREG_REG (exp_rtl);
4656 if (GET_CODE (exp_rtl) == REG
4657 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4658 return 0;
4659 }
4660
4661 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4662 are memory and EXP is not readonly. */
4663 return ! (rtx_equal_p (x, exp_rtl)
4664 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4665 && ! TREE_READONLY (exp)));
4666 }
4667
4668 /* If we reach here, it is safe. */
4669 return 1;
4670 }
4671
4672 /* Subroutine of expand_expr: return nonzero iff EXP is an
4673 expression whose type is statically determinable. */
4674
4675 static int
4676 fixed_type_p (exp)
4677 tree exp;
4678 {
4679 if (TREE_CODE (exp) == PARM_DECL
4680 || TREE_CODE (exp) == VAR_DECL
4681 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4682 || TREE_CODE (exp) == COMPONENT_REF
4683 || TREE_CODE (exp) == ARRAY_REF)
4684 return 1;
4685 return 0;
4686 }
4687
4688 /* Subroutine of expand_expr: return rtx if EXP is a
4689 variable or parameter; else return 0. */
4690
4691 static rtx
4692 var_rtx (exp)
4693 tree exp;
4694 {
4695 STRIP_NOPS (exp);
4696 switch (TREE_CODE (exp))
4697 {
4698 case PARM_DECL:
4699 case VAR_DECL:
4700 return DECL_RTL (exp);
4701 default:
4702 return 0;
4703 }
4704 }
4705 \f
4706 /* expand_expr: generate code for computing expression EXP.
4707 An rtx for the computed value is returned. The value is never null.
4708 In the case of a void EXP, const0_rtx is returned.
4709
4710 The value may be stored in TARGET if TARGET is nonzero.
4711 TARGET is just a suggestion; callers must assume that
4712 the rtx returned may not be the same as TARGET.
4713
4714 If TARGET is CONST0_RTX, it means that the value will be ignored.
4715
4716 If TMODE is not VOIDmode, it suggests generating the
4717 result in mode TMODE. But this is done only when convenient.
4718 Otherwise, TMODE is ignored and the value generated in its natural mode.
4719 TMODE is just a suggestion; callers must assume that
4720 the rtx returned may not have mode TMODE.
4721
4722 Note that TARGET may have neither TMODE nor MODE. In that case, it
4723 probably will not be used.
4724
4725 If MODIFIER is EXPAND_SUM then when EXP is an addition
4726 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4727 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4728 products as above, or REG or MEM, or constant.
4729 Ordinarily in such cases we would output mul or add instructions
4730 and then return a pseudo reg containing the sum.
4731
4732 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4733 it also marks a label as absolutely required (it can't be dead).
4734 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4735 This is used for outputting expressions used in initializers.
4736
4737 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4738 with a constant address even if that address is not normally legitimate.
4739 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4740
4741 rtx
4742 expand_expr (exp, target, tmode, modifier)
4743 register tree exp;
4744 rtx target;
4745 enum machine_mode tmode;
4746 enum expand_modifier modifier;
4747 {
4748 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4749 This is static so it will be accessible to our recursive callees. */
4750 static tree placeholder_list = 0;
4751 register rtx op0, op1, temp;
4752 tree type = TREE_TYPE (exp);
4753 int unsignedp = TREE_UNSIGNED (type);
4754 register enum machine_mode mode = TYPE_MODE (type);
4755 register enum tree_code code = TREE_CODE (exp);
4756 optab this_optab;
4757 /* Use subtarget as the target for operand 0 of a binary operation. */
4758 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4759 rtx original_target = target;
4760 /* Maybe defer this until sure not doing bytecode? */
4761 int ignore = (target == const0_rtx
4762 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4763 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4764 || code == COND_EXPR)
4765 && TREE_CODE (type) == VOID_TYPE));
4766 tree context;
4767
4768
4769 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4770 {
4771 bc_expand_expr (exp);
4772 return NULL;
4773 }
4774
4775 /* Don't use hard regs as subtargets, because the combiner
4776 can only handle pseudo regs. */
4777 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4778 subtarget = 0;
4779 /* Avoid subtargets inside loops,
4780 since they hide some invariant expressions. */
4781 if (preserve_subexpressions_p ())
4782 subtarget = 0;
4783
4784 /* If we are going to ignore this result, we need only do something
4785 if there is a side-effect somewhere in the expression. If there
4786 is, short-circuit the most common cases here. Note that we must
4787 not call expand_expr with anything but const0_rtx in case this
4788 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4789
4790 if (ignore)
4791 {
4792 if (! TREE_SIDE_EFFECTS (exp))
4793 return const0_rtx;
4794
4795 /* Ensure we reference a volatile object even if value is ignored. */
4796 if (TREE_THIS_VOLATILE (exp)
4797 && TREE_CODE (exp) != FUNCTION_DECL
4798 && mode != VOIDmode && mode != BLKmode)
4799 {
4800 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4801 if (GET_CODE (temp) == MEM)
4802 temp = copy_to_reg (temp);
4803 return const0_rtx;
4804 }
4805
4806 if (TREE_CODE_CLASS (code) == '1')
4807 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4808 VOIDmode, modifier);
4809 else if (TREE_CODE_CLASS (code) == '2'
4810 || TREE_CODE_CLASS (code) == '<')
4811 {
4812 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4813 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4814 return const0_rtx;
4815 }
4816 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4817 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4818 /* If the second operand has no side effects, just evaluate
4819 the first. */
4820 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4821 VOIDmode, modifier);
4822
4823 target = 0;
4824 }
4825
4826 /* If will do cse, generate all results into pseudo registers
4827 since 1) that allows cse to find more things
4828 and 2) otherwise cse could produce an insn the machine
4829 cannot support. */
4830
4831 if (! cse_not_expected && mode != BLKmode && target
4832 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4833 target = subtarget;
4834
4835 switch (code)
4836 {
4837 case LABEL_DECL:
4838 {
4839 tree function = decl_function_context (exp);
4840 /* Handle using a label in a containing function. */
4841 if (function != current_function_decl && function != 0)
4842 {
4843 struct function *p = find_function_data (function);
4844 /* Allocate in the memory associated with the function
4845 that the label is in. */
4846 push_obstacks (p->function_obstack,
4847 p->function_maybepermanent_obstack);
4848
4849 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4850 label_rtx (exp), p->forced_labels);
4851 pop_obstacks ();
4852 }
4853 else if (modifier == EXPAND_INITIALIZER)
4854 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4855 label_rtx (exp), forced_labels);
4856 temp = gen_rtx (MEM, FUNCTION_MODE,
4857 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4858 if (function != current_function_decl && function != 0)
4859 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4860 return temp;
4861 }
4862
4863 case PARM_DECL:
4864 if (DECL_RTL (exp) == 0)
4865 {
4866 error_with_decl (exp, "prior parameter's size depends on `%s'");
4867 return CONST0_RTX (mode);
4868 }
4869
4870 /* ... fall through ... */
4871
4872 case VAR_DECL:
4873 /* If a static var's type was incomplete when the decl was written,
4874 but the type is complete now, lay out the decl now. */
4875 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4876 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4877 {
4878 push_obstacks_nochange ();
4879 end_temporary_allocation ();
4880 layout_decl (exp, 0);
4881 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4882 pop_obstacks ();
4883 }
4884
4885 /* ... fall through ... */
4886
4887 case FUNCTION_DECL:
4888 case RESULT_DECL:
4889 if (DECL_RTL (exp) == 0)
4890 abort ();
4891
4892 /* Ensure variable marked as used even if it doesn't go through
4893 a parser. If it hasn't be used yet, write out an external
4894 definition. */
4895 if (! TREE_USED (exp))
4896 {
4897 assemble_external (exp);
4898 TREE_USED (exp) = 1;
4899 }
4900
4901 /* Show we haven't gotten RTL for this yet. */
4902 temp = 0;
4903
4904 /* Handle variables inherited from containing functions. */
4905 context = decl_function_context (exp);
4906
4907 /* We treat inline_function_decl as an alias for the current function
4908 because that is the inline function whose vars, types, etc.
4909 are being merged into the current function.
4910 See expand_inline_function. */
4911
4912 if (context != 0 && context != current_function_decl
4913 && context != inline_function_decl
4914 /* If var is static, we don't need a static chain to access it. */
4915 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4916 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4917 {
4918 rtx addr;
4919
4920 /* Mark as non-local and addressable. */
4921 DECL_NONLOCAL (exp) = 1;
4922 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4923 abort ();
4924 mark_addressable (exp);
4925 if (GET_CODE (DECL_RTL (exp)) != MEM)
4926 abort ();
4927 addr = XEXP (DECL_RTL (exp), 0);
4928 if (GET_CODE (addr) == MEM)
4929 addr = gen_rtx (MEM, Pmode,
4930 fix_lexical_addr (XEXP (addr, 0), exp));
4931 else
4932 addr = fix_lexical_addr (addr, exp);
4933 temp = change_address (DECL_RTL (exp), mode, addr);
4934 }
4935
4936 /* This is the case of an array whose size is to be determined
4937 from its initializer, while the initializer is still being parsed.
4938 See expand_decl. */
4939
4940 else if (GET_CODE (DECL_RTL (exp)) == MEM
4941 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4942 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4943 XEXP (DECL_RTL (exp), 0));
4944
4945 /* If DECL_RTL is memory, we are in the normal case and either
4946 the address is not valid or it is not a register and -fforce-addr
4947 is specified, get the address into a register. */
4948
4949 else if (GET_CODE (DECL_RTL (exp)) == MEM
4950 && modifier != EXPAND_CONST_ADDRESS
4951 && modifier != EXPAND_SUM
4952 && modifier != EXPAND_INITIALIZER
4953 && (! memory_address_p (DECL_MODE (exp),
4954 XEXP (DECL_RTL (exp), 0))
4955 || (flag_force_addr
4956 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4957 temp = change_address (DECL_RTL (exp), VOIDmode,
4958 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4959
4960 /* If we got something, return it. But first, set the alignment
4961 the address is a register. */
4962 if (temp != 0)
4963 {
4964 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4965 mark_reg_pointer (XEXP (temp, 0),
4966 DECL_ALIGN (exp) / BITS_PER_UNIT);
4967
4968 return temp;
4969 }
4970
4971 /* If the mode of DECL_RTL does not match that of the decl, it
4972 must be a promoted value. We return a SUBREG of the wanted mode,
4973 but mark it so that we know that it was already extended. */
4974
4975 if (GET_CODE (DECL_RTL (exp)) == REG
4976 && GET_MODE (DECL_RTL (exp)) != mode)
4977 {
4978 /* Get the signedness used for this variable. Ensure we get the
4979 same mode we got when the variable was declared. */
4980 if (GET_MODE (DECL_RTL (exp))
4981 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4982 abort ();
4983
4984 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4985 SUBREG_PROMOTED_VAR_P (temp) = 1;
4986 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4987 return temp;
4988 }
4989
4990 return DECL_RTL (exp);
4991
4992 case INTEGER_CST:
4993 return immed_double_const (TREE_INT_CST_LOW (exp),
4994 TREE_INT_CST_HIGH (exp),
4995 mode);
4996
4997 case CONST_DECL:
4998 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4999
5000 case REAL_CST:
5001 /* If optimized, generate immediate CONST_DOUBLE
5002 which will be turned into memory by reload if necessary.
5003
5004 We used to force a register so that loop.c could see it. But
5005 this does not allow gen_* patterns to perform optimizations with
5006 the constants. It also produces two insns in cases like "x = 1.0;".
5007 On most machines, floating-point constants are not permitted in
5008 many insns, so we'd end up copying it to a register in any case.
5009
5010 Now, we do the copying in expand_binop, if appropriate. */
5011 return immed_real_const (exp);
5012
5013 case COMPLEX_CST:
5014 case STRING_CST:
5015 if (! TREE_CST_RTL (exp))
5016 output_constant_def (exp);
5017
5018 /* TREE_CST_RTL probably contains a constant address.
5019 On RISC machines where a constant address isn't valid,
5020 make some insns to get that address into a register. */
5021 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5022 && modifier != EXPAND_CONST_ADDRESS
5023 && modifier != EXPAND_INITIALIZER
5024 && modifier != EXPAND_SUM
5025 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5026 || (flag_force_addr
5027 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5028 return change_address (TREE_CST_RTL (exp), VOIDmode,
5029 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5030 return TREE_CST_RTL (exp);
5031
5032 case SAVE_EXPR:
5033 context = decl_function_context (exp);
5034
5035 /* We treat inline_function_decl as an alias for the current function
5036 because that is the inline function whose vars, types, etc.
5037 are being merged into the current function.
5038 See expand_inline_function. */
5039 if (context == current_function_decl || context == inline_function_decl)
5040 context = 0;
5041
5042 /* If this is non-local, handle it. */
5043 if (context)
5044 {
5045 temp = SAVE_EXPR_RTL (exp);
5046 if (temp && GET_CODE (temp) == REG)
5047 {
5048 put_var_into_stack (exp);
5049 temp = SAVE_EXPR_RTL (exp);
5050 }
5051 if (temp == 0 || GET_CODE (temp) != MEM)
5052 abort ();
5053 return change_address (temp, mode,
5054 fix_lexical_addr (XEXP (temp, 0), exp));
5055 }
5056 if (SAVE_EXPR_RTL (exp) == 0)
5057 {
5058 if (mode == VOIDmode)
5059 temp = const0_rtx;
5060 else
5061 temp = assign_temp (type, 0, 0, 0);
5062
5063 SAVE_EXPR_RTL (exp) = temp;
5064 if (!optimize && GET_CODE (temp) == REG)
5065 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5066 save_expr_regs);
5067
5068 /* If the mode of TEMP does not match that of the expression, it
5069 must be a promoted value. We pass store_expr a SUBREG of the
5070 wanted mode but mark it so that we know that it was already
5071 extended. Note that `unsignedp' was modified above in
5072 this case. */
5073
5074 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5075 {
5076 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5077 SUBREG_PROMOTED_VAR_P (temp) = 1;
5078 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5079 }
5080
5081 if (temp == const0_rtx)
5082 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5083 else
5084 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5085 }
5086
5087 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5088 must be a promoted value. We return a SUBREG of the wanted mode,
5089 but mark it so that we know that it was already extended. */
5090
5091 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5092 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5093 {
5094 /* Compute the signedness and make the proper SUBREG. */
5095 promote_mode (type, mode, &unsignedp, 0);
5096 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5097 SUBREG_PROMOTED_VAR_P (temp) = 1;
5098 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5099 return temp;
5100 }
5101
5102 return SAVE_EXPR_RTL (exp);
5103
5104 case UNSAVE_EXPR:
5105 {
5106 rtx temp;
5107 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5108 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5109 return temp;
5110 }
5111
5112 case PLACEHOLDER_EXPR:
5113 /* If there is an object on the head of the placeholder list,
5114 see if some object in it's references is of type TYPE. For
5115 further information, see tree.def. */
5116 if (placeholder_list)
5117 {
5118 tree need_type = TYPE_MAIN_VARIANT (type);
5119 tree object = 0;
5120 tree old_list = placeholder_list;
5121 tree elt;
5122
5123 /* See if the object is the type that we want. Then see if
5124 the operand of any reference is the type we want. */
5125 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5126 == need_type))
5127 object = TREE_PURPOSE (placeholder_list);
5128
5129 /* Find the innermost reference that is of the type we want. */
5130 for (elt = TREE_PURPOSE (placeholder_list);
5131 elt != 0
5132 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5133 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5134 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5135 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5136 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5137 || TREE_CODE (elt) == COND_EXPR)
5138 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5139 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5140 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5141 == need_type))
5142 object = TREE_OPERAND (elt, 0);
5143
5144 if (object != 0)
5145 {
5146 /* Expand this object skipping the list entries before
5147 it was found in case it is also a PLACEHOLDER_EXPR.
5148 In that case, we want to translate it using subsequent
5149 entries. */
5150 placeholder_list = TREE_CHAIN (placeholder_list);
5151 temp = expand_expr (object, original_target, tmode, modifier);
5152 placeholder_list = old_list;
5153 return temp;
5154 }
5155 }
5156
5157 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5158 abort ();
5159
5160 case WITH_RECORD_EXPR:
5161 /* Put the object on the placeholder list, expand our first operand,
5162 and pop the list. */
5163 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5164 placeholder_list);
5165 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5166 tmode, modifier);
5167 placeholder_list = TREE_CHAIN (placeholder_list);
5168 return target;
5169
5170 case EXIT_EXPR:
5171 expand_exit_loop_if_false (NULL_PTR,
5172 invert_truthvalue (TREE_OPERAND (exp, 0)));
5173 return const0_rtx;
5174
5175 case LOOP_EXPR:
5176 push_temp_slots ();
5177 expand_start_loop (1);
5178 expand_expr_stmt (TREE_OPERAND (exp, 0));
5179 expand_end_loop ();
5180 pop_temp_slots ();
5181
5182 return const0_rtx;
5183
5184 case BIND_EXPR:
5185 {
5186 tree vars = TREE_OPERAND (exp, 0);
5187 int vars_need_expansion = 0;
5188
5189 /* Need to open a binding contour here because
5190 if there are any cleanups they most be contained here. */
5191 expand_start_bindings (0);
5192
5193 /* Mark the corresponding BLOCK for output in its proper place. */
5194 if (TREE_OPERAND (exp, 2) != 0
5195 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5196 insert_block (TREE_OPERAND (exp, 2));
5197
5198 /* If VARS have not yet been expanded, expand them now. */
5199 while (vars)
5200 {
5201 if (DECL_RTL (vars) == 0)
5202 {
5203 vars_need_expansion = 1;
5204 expand_decl (vars);
5205 }
5206 expand_decl_init (vars);
5207 vars = TREE_CHAIN (vars);
5208 }
5209
5210 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5211
5212 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5213
5214 return temp;
5215 }
5216
5217 case RTL_EXPR:
5218 if (RTL_EXPR_SEQUENCE (exp))
5219 {
5220 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5221 abort ();
5222 emit_insns (RTL_EXPR_SEQUENCE (exp));
5223 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5224 }
5225 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5226 free_temps_for_rtl_expr (exp);
5227 return RTL_EXPR_RTL (exp);
5228
5229 case CONSTRUCTOR:
5230 /* If we don't need the result, just ensure we evaluate any
5231 subexpressions. */
5232 if (ignore)
5233 {
5234 tree elt;
5235 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5236 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5237 return const0_rtx;
5238 }
5239
5240 /* All elts simple constants => refer to a constant in memory. But
5241 if this is a non-BLKmode mode, let it store a field at a time
5242 since that should make a CONST_INT or CONST_DOUBLE when we
5243 fold. Likewise, if we have a target we can use, it is best to
5244 store directly into the target unless the type is large enough
5245 that memcpy will be used. If we are making an initializer and
5246 all operands are constant, put it in memory as well. */
5247 else if ((TREE_STATIC (exp)
5248 && ((mode == BLKmode
5249 && ! (target != 0 && safe_from_p (target, exp)))
5250 || TREE_ADDRESSABLE (exp)
5251 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5252 && (move_by_pieces_ninsns
5253 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5254 TYPE_ALIGN (type) / BITS_PER_UNIT)
5255 > MOVE_RATIO)
5256 && ! mostly_zeros_p (exp))))
5257 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5258 {
5259 rtx constructor = output_constant_def (exp);
5260 if (modifier != EXPAND_CONST_ADDRESS
5261 && modifier != EXPAND_INITIALIZER
5262 && modifier != EXPAND_SUM
5263 && (! memory_address_p (GET_MODE (constructor),
5264 XEXP (constructor, 0))
5265 || (flag_force_addr
5266 && GET_CODE (XEXP (constructor, 0)) != REG)))
5267 constructor = change_address (constructor, VOIDmode,
5268 XEXP (constructor, 0));
5269 return constructor;
5270 }
5271
5272 else
5273 {
5274 /* Handle calls that pass values in multiple non-contiguous
5275 locations. The Irix 6 ABI has examples of this. */
5276 if (target == 0 || ! safe_from_p (target, exp)
5277 || GET_CODE (target) == PARALLEL)
5278 {
5279 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5280 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5281 else
5282 target = assign_temp (type, 0, 1, 1);
5283 }
5284
5285 if (TREE_READONLY (exp))
5286 {
5287 if (GET_CODE (target) == MEM)
5288 target = copy_rtx (target);
5289
5290 RTX_UNCHANGING_P (target) = 1;
5291 }
5292
5293 store_constructor (exp, target, 0);
5294 return target;
5295 }
5296
5297 case INDIRECT_REF:
5298 {
5299 tree exp1 = TREE_OPERAND (exp, 0);
5300 tree exp2;
5301
5302 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5303 op0 = memory_address (mode, op0);
5304
5305 temp = gen_rtx (MEM, mode, op0);
5306 /* If address was computed by addition,
5307 mark this as an element of an aggregate. */
5308 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5309 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5310 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5311 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5312 || (TREE_CODE (exp1) == ADDR_EXPR
5313 && (exp2 = TREE_OPERAND (exp1, 0))
5314 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5315 MEM_IN_STRUCT_P (temp) = 1;
5316 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5317
5318 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5319 here, because, in C and C++, the fact that a location is accessed
5320 through a pointer to const does not mean that the value there can
5321 never change. Languages where it can never change should
5322 also set TREE_STATIC. */
5323 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5324 return temp;
5325 }
5326
5327 case ARRAY_REF:
5328 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5329 abort ();
5330
5331 {
5332 tree array = TREE_OPERAND (exp, 0);
5333 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5334 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5335 tree index = TREE_OPERAND (exp, 1);
5336 tree index_type = TREE_TYPE (index);
5337 int i;
5338
5339 if (TREE_CODE (low_bound) != INTEGER_CST
5340 && contains_placeholder_p (low_bound))
5341 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5342
5343 /* Optimize the special-case of a zero lower bound.
5344
5345 We convert the low_bound to sizetype to avoid some problems
5346 with constant folding. (E.g. suppose the lower bound is 1,
5347 and its mode is QI. Without the conversion, (ARRAY
5348 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5349 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5350
5351 But sizetype isn't quite right either (especially if
5352 the lowbound is negative). FIXME */
5353
5354 if (! integer_zerop (low_bound))
5355 index = fold (build (MINUS_EXPR, index_type, index,
5356 convert (sizetype, low_bound)));
5357
5358 if ((TREE_CODE (index) != INTEGER_CST
5359 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5360 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5361 {
5362 /* Nonconstant array index or nonconstant element size, and
5363 not an array in an unaligned (packed) structure field.
5364 Generate the tree for *(&array+index) and expand that,
5365 except do it in a language-independent way
5366 and don't complain about non-lvalue arrays.
5367 `mark_addressable' should already have been called
5368 for any array for which this case will be reached. */
5369
5370 /* Don't forget the const or volatile flag from the array
5371 element. */
5372 tree variant_type = build_type_variant (type,
5373 TREE_READONLY (exp),
5374 TREE_THIS_VOLATILE (exp));
5375 tree array_adr = build1 (ADDR_EXPR,
5376 build_pointer_type (variant_type), array);
5377 tree elt;
5378 tree size = size_in_bytes (type);
5379
5380 /* Convert the integer argument to a type the same size as sizetype
5381 so the multiply won't overflow spuriously. */
5382 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5383 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5384 index);
5385
5386 if (TREE_CODE (size) != INTEGER_CST
5387 && contains_placeholder_p (size))
5388 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5389
5390 /* Don't think the address has side effects
5391 just because the array does.
5392 (In some cases the address might have side effects,
5393 and we fail to record that fact here. However, it should not
5394 matter, since expand_expr should not care.) */
5395 TREE_SIDE_EFFECTS (array_adr) = 0;
5396
5397 elt
5398 = build1
5399 (INDIRECT_REF, type,
5400 fold (build (PLUS_EXPR,
5401 TYPE_POINTER_TO (variant_type),
5402 array_adr,
5403 fold
5404 (build1
5405 (NOP_EXPR,
5406 TYPE_POINTER_TO (variant_type),
5407 fold (build (MULT_EXPR, TREE_TYPE (index),
5408 index,
5409 convert (TREE_TYPE (index),
5410 size))))))));;
5411
5412 /* Volatility, etc., of new expression is same as old
5413 expression. */
5414 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5415 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5416 TREE_READONLY (elt) = TREE_READONLY (exp);
5417
5418 return expand_expr (elt, target, tmode, modifier);
5419 }
5420
5421 /* Fold an expression like: "foo"[2].
5422 This is not done in fold so it won't happen inside &.
5423 Don't fold if this is for wide characters since it's too
5424 difficult to do correctly and this is a very rare case. */
5425
5426 if (TREE_CODE (array) == STRING_CST
5427 && TREE_CODE (index) == INTEGER_CST
5428 && !TREE_INT_CST_HIGH (index)
5429 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5430 && GET_MODE_CLASS (mode) == MODE_INT
5431 && GET_MODE_SIZE (mode) == 1)
5432 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5433
5434 /* If this is a constant index into a constant array,
5435 just get the value from the array. Handle both the cases when
5436 we have an explicit constructor and when our operand is a variable
5437 that was declared const. */
5438
5439 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5440 {
5441 if (TREE_CODE (index) == INTEGER_CST
5442 && TREE_INT_CST_HIGH (index) == 0)
5443 {
5444 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5445
5446 i = TREE_INT_CST_LOW (index);
5447 while (elem && i--)
5448 elem = TREE_CHAIN (elem);
5449 if (elem)
5450 return expand_expr (fold (TREE_VALUE (elem)), target,
5451 tmode, modifier);
5452 }
5453 }
5454
5455 else if (optimize >= 1
5456 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5457 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5458 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5459 {
5460 if (TREE_CODE (index) == INTEGER_CST
5461 && TREE_INT_CST_HIGH (index) == 0)
5462 {
5463 tree init = DECL_INITIAL (array);
5464
5465 i = TREE_INT_CST_LOW (index);
5466 if (TREE_CODE (init) == CONSTRUCTOR)
5467 {
5468 tree elem = CONSTRUCTOR_ELTS (init);
5469
5470 while (elem
5471 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5472 elem = TREE_CHAIN (elem);
5473 if (elem)
5474 return expand_expr (fold (TREE_VALUE (elem)), target,
5475 tmode, modifier);
5476 }
5477 else if (TREE_CODE (init) == STRING_CST
5478 && i < TREE_STRING_LENGTH (init))
5479 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5480 }
5481 }
5482 }
5483
5484 /* Treat array-ref with constant index as a component-ref. */
5485
5486 case COMPONENT_REF:
5487 case BIT_FIELD_REF:
5488 /* If the operand is a CONSTRUCTOR, we can just extract the
5489 appropriate field if it is present. Don't do this if we have
5490 already written the data since we want to refer to that copy
5491 and varasm.c assumes that's what we'll do. */
5492 if (code != ARRAY_REF
5493 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5494 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5495 {
5496 tree elt;
5497
5498 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5499 elt = TREE_CHAIN (elt))
5500 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5501 /* We can normally use the value of the field in the
5502 CONSTRUCTOR. However, if this is a bitfield in
5503 an integral mode that we can fit in a HOST_WIDE_INT,
5504 we must mask only the number of bits in the bitfield,
5505 since this is done implicitly by the constructor. If
5506 the bitfield does not meet either of those conditions,
5507 we can't do this optimization. */
5508 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5509 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5510 == MODE_INT)
5511 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5512 <= HOST_BITS_PER_WIDE_INT))))
5513 {
5514 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5515 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5516 {
5517 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5518 enum machine_mode imode
5519 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5520
5521 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5522 {
5523 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5524 op0 = expand_and (op0, op1, target);
5525 }
5526 else
5527 {
5528 tree count
5529 = build_int_2 (imode - bitsize, 0);
5530
5531 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5532 target, 0);
5533 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5534 target, 0);
5535 }
5536 }
5537
5538 return op0;
5539 }
5540 }
5541
5542 {
5543 enum machine_mode mode1;
5544 int bitsize;
5545 int bitpos;
5546 tree offset;
5547 int volatilep = 0;
5548 int alignment;
5549 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5550 &mode1, &unsignedp, &volatilep,
5551 &alignment);
5552
5553 /* If we got back the original object, something is wrong. Perhaps
5554 we are evaluating an expression too early. In any event, don't
5555 infinitely recurse. */
5556 if (tem == exp)
5557 abort ();
5558
5559 /* If TEM's type is a union of variable size, pass TARGET to the inner
5560 computation, since it will need a temporary and TARGET is known
5561 to have to do. This occurs in unchecked conversion in Ada. */
5562
5563 op0 = expand_expr (tem,
5564 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5565 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5566 != INTEGER_CST)
5567 ? target : NULL_RTX),
5568 VOIDmode,
5569 modifier == EXPAND_INITIALIZER ? modifier : 0);
5570
5571 /* If this is a constant, put it into a register if it is a
5572 legitimate constant and memory if it isn't. */
5573 if (CONSTANT_P (op0))
5574 {
5575 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5576 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5577 op0 = force_reg (mode, op0);
5578 else
5579 op0 = validize_mem (force_const_mem (mode, op0));
5580 }
5581
5582 if (offset != 0)
5583 {
5584 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5585
5586 if (GET_CODE (op0) != MEM)
5587 abort ();
5588 op0 = change_address (op0, VOIDmode,
5589 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5590 force_reg (ptr_mode, offset_rtx)));
5591 }
5592
5593 /* Don't forget about volatility even if this is a bitfield. */
5594 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5595 {
5596 op0 = copy_rtx (op0);
5597 MEM_VOLATILE_P (op0) = 1;
5598 }
5599
5600 /* In cases where an aligned union has an unaligned object
5601 as a field, we might be extracting a BLKmode value from
5602 an integer-mode (e.g., SImode) object. Handle this case
5603 by doing the extract into an object as wide as the field
5604 (which we know to be the width of a basic mode), then
5605 storing into memory, and changing the mode to BLKmode.
5606 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5607 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5608 if (mode1 == VOIDmode
5609 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5610 || (modifier != EXPAND_CONST_ADDRESS
5611 && modifier != EXPAND_INITIALIZER
5612 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5613 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5614 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5615 /* If the field isn't aligned enough to fetch as a memref,
5616 fetch it as a bit field. */
5617 || (SLOW_UNALIGNED_ACCESS
5618 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5619 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5620 {
5621 enum machine_mode ext_mode = mode;
5622
5623 if (ext_mode == BLKmode)
5624 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5625
5626 if (ext_mode == BLKmode)
5627 {
5628 /* In this case, BITPOS must start at a byte boundary and
5629 TARGET, if specified, must be a MEM. */
5630 if (GET_CODE (op0) != MEM
5631 || (target != 0 && GET_CODE (target) != MEM)
5632 || bitpos % BITS_PER_UNIT != 0)
5633 abort ();
5634
5635 op0 = change_address (op0, VOIDmode,
5636 plus_constant (XEXP (op0, 0),
5637 bitpos / BITS_PER_UNIT));
5638 if (target == 0)
5639 target = assign_temp (type, 0, 1, 1);
5640
5641 emit_block_move (target, op0,
5642 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5643 / BITS_PER_UNIT),
5644 1);
5645
5646 return target;
5647 }
5648
5649 op0 = validize_mem (op0);
5650
5651 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5652 mark_reg_pointer (XEXP (op0, 0), alignment);
5653
5654 op0 = extract_bit_field (op0, bitsize, bitpos,
5655 unsignedp, target, ext_mode, ext_mode,
5656 alignment,
5657 int_size_in_bytes (TREE_TYPE (tem)));
5658
5659 /* If the result is a record type and BITSIZE is narrower than
5660 the mode of OP0, an integral mode, and this is a big endian
5661 machine, we must put the field into the high-order bits. */
5662 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5663 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5664 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5665 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5666 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5667 - bitsize),
5668 op0, 1);
5669
5670 if (mode == BLKmode)
5671 {
5672 rtx new = assign_stack_temp (ext_mode,
5673 bitsize / BITS_PER_UNIT, 0);
5674
5675 emit_move_insn (new, op0);
5676 op0 = copy_rtx (new);
5677 PUT_MODE (op0, BLKmode);
5678 MEM_IN_STRUCT_P (op0) = 1;
5679 }
5680
5681 return op0;
5682 }
5683
5684 /* If the result is BLKmode, use that to access the object
5685 now as well. */
5686 if (mode == BLKmode)
5687 mode1 = BLKmode;
5688
5689 /* Get a reference to just this component. */
5690 if (modifier == EXPAND_CONST_ADDRESS
5691 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5692 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5693 (bitpos / BITS_PER_UNIT)));
5694 else
5695 op0 = change_address (op0, mode1,
5696 plus_constant (XEXP (op0, 0),
5697 (bitpos / BITS_PER_UNIT)));
5698 if (GET_CODE (XEXP (op0, 0)) == REG)
5699 mark_reg_pointer (XEXP (op0, 0), alignment);
5700
5701 MEM_IN_STRUCT_P (op0) = 1;
5702 MEM_VOLATILE_P (op0) |= volatilep;
5703 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5704 || modifier == EXPAND_CONST_ADDRESS
5705 || modifier == EXPAND_INITIALIZER)
5706 return op0;
5707 else if (target == 0)
5708 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5709
5710 convert_move (target, op0, unsignedp);
5711 return target;
5712 }
5713
5714 /* Intended for a reference to a buffer of a file-object in Pascal.
5715 But it's not certain that a special tree code will really be
5716 necessary for these. INDIRECT_REF might work for them. */
5717 case BUFFER_REF:
5718 abort ();
5719
5720 case IN_EXPR:
5721 {
5722 /* Pascal set IN expression.
5723
5724 Algorithm:
5725 rlo = set_low - (set_low%bits_per_word);
5726 the_word = set [ (index - rlo)/bits_per_word ];
5727 bit_index = index % bits_per_word;
5728 bitmask = 1 << bit_index;
5729 return !!(the_word & bitmask); */
5730
5731 tree set = TREE_OPERAND (exp, 0);
5732 tree index = TREE_OPERAND (exp, 1);
5733 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5734 tree set_type = TREE_TYPE (set);
5735 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5736 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5737 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5738 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5739 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5740 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5741 rtx setaddr = XEXP (setval, 0);
5742 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5743 rtx rlow;
5744 rtx diff, quo, rem, addr, bit, result;
5745
5746 preexpand_calls (exp);
5747
5748 /* If domain is empty, answer is no. Likewise if index is constant
5749 and out of bounds. */
5750 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5751 && TREE_CODE (set_low_bound) == INTEGER_CST
5752 && tree_int_cst_lt (set_high_bound, set_low_bound)
5753 || (TREE_CODE (index) == INTEGER_CST
5754 && TREE_CODE (set_low_bound) == INTEGER_CST
5755 && tree_int_cst_lt (index, set_low_bound))
5756 || (TREE_CODE (set_high_bound) == INTEGER_CST
5757 && TREE_CODE (index) == INTEGER_CST
5758 && tree_int_cst_lt (set_high_bound, index))))
5759 return const0_rtx;
5760
5761 if (target == 0)
5762 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5763
5764 /* If we get here, we have to generate the code for both cases
5765 (in range and out of range). */
5766
5767 op0 = gen_label_rtx ();
5768 op1 = gen_label_rtx ();
5769
5770 if (! (GET_CODE (index_val) == CONST_INT
5771 && GET_CODE (lo_r) == CONST_INT))
5772 {
5773 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5774 GET_MODE (index_val), iunsignedp, 0);
5775 emit_jump_insn (gen_blt (op1));
5776 }
5777
5778 if (! (GET_CODE (index_val) == CONST_INT
5779 && GET_CODE (hi_r) == CONST_INT))
5780 {
5781 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5782 GET_MODE (index_val), iunsignedp, 0);
5783 emit_jump_insn (gen_bgt (op1));
5784 }
5785
5786 /* Calculate the element number of bit zero in the first word
5787 of the set. */
5788 if (GET_CODE (lo_r) == CONST_INT)
5789 rlow = GEN_INT (INTVAL (lo_r)
5790 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5791 else
5792 rlow = expand_binop (index_mode, and_optab, lo_r,
5793 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5794 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5795
5796 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5797 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5798
5799 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5800 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5801 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5802 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5803
5804 addr = memory_address (byte_mode,
5805 expand_binop (index_mode, add_optab, diff,
5806 setaddr, NULL_RTX, iunsignedp,
5807 OPTAB_LIB_WIDEN));
5808
5809 /* Extract the bit we want to examine */
5810 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5811 gen_rtx (MEM, byte_mode, addr),
5812 make_tree (TREE_TYPE (index), rem),
5813 NULL_RTX, 1);
5814 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5815 GET_MODE (target) == byte_mode ? target : 0,
5816 1, OPTAB_LIB_WIDEN);
5817
5818 if (result != target)
5819 convert_move (target, result, 1);
5820
5821 /* Output the code to handle the out-of-range case. */
5822 emit_jump (op0);
5823 emit_label (op1);
5824 emit_move_insn (target, const0_rtx);
5825 emit_label (op0);
5826 return target;
5827 }
5828
5829 case WITH_CLEANUP_EXPR:
5830 if (RTL_EXPR_RTL (exp) == 0)
5831 {
5832 RTL_EXPR_RTL (exp)
5833 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5834 cleanups_this_call
5835 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5836 /* That's it for this cleanup. */
5837 TREE_OPERAND (exp, 2) = 0;
5838 expand_eh_region_start ();
5839 }
5840 return RTL_EXPR_RTL (exp);
5841
5842 case CLEANUP_POINT_EXPR:
5843 {
5844 extern int temp_slot_level;
5845 tree old_cleanups = cleanups_this_call;
5846 int old_temp_level = target_temp_slot_level;
5847 push_temp_slots ();
5848 target_temp_slot_level = temp_slot_level;
5849 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5850 /* If we're going to use this value, load it up now. */
5851 if (! ignore)
5852 op0 = force_not_mem (op0);
5853 expand_cleanups_to (old_cleanups);
5854 preserve_temp_slots (op0);
5855 free_temp_slots ();
5856 pop_temp_slots ();
5857 target_temp_slot_level = old_temp_level;
5858 }
5859 return op0;
5860
5861 case CALL_EXPR:
5862 /* Check for a built-in function. */
5863 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5864 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5865 == FUNCTION_DECL)
5866 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5867 return expand_builtin (exp, target, subtarget, tmode, ignore);
5868
5869 /* If this call was expanded already by preexpand_calls,
5870 just return the result we got. */
5871 if (CALL_EXPR_RTL (exp) != 0)
5872 return CALL_EXPR_RTL (exp);
5873
5874 return expand_call (exp, target, ignore);
5875
5876 case NON_LVALUE_EXPR:
5877 case NOP_EXPR:
5878 case CONVERT_EXPR:
5879 case REFERENCE_EXPR:
5880 if (TREE_CODE (type) == UNION_TYPE)
5881 {
5882 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5883 if (target == 0)
5884 {
5885 if (mode != BLKmode)
5886 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5887 else
5888 target = assign_temp (type, 0, 1, 1);
5889 }
5890
5891 if (GET_CODE (target) == MEM)
5892 /* Store data into beginning of memory target. */
5893 store_expr (TREE_OPERAND (exp, 0),
5894 change_address (target, TYPE_MODE (valtype), 0), 0);
5895
5896 else if (GET_CODE (target) == REG)
5897 /* Store this field into a union of the proper type. */
5898 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5899 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5900 VOIDmode, 0, 1,
5901 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5902 else
5903 abort ();
5904
5905 /* Return the entire union. */
5906 return target;
5907 }
5908
5909 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5910 {
5911 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5912 modifier);
5913
5914 /* If the signedness of the conversion differs and OP0 is
5915 a promoted SUBREG, clear that indication since we now
5916 have to do the proper extension. */
5917 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5918 && GET_CODE (op0) == SUBREG)
5919 SUBREG_PROMOTED_VAR_P (op0) = 0;
5920
5921 return op0;
5922 }
5923
5924 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5925 if (GET_MODE (op0) == mode)
5926 return op0;
5927
5928 /* If OP0 is a constant, just convert it into the proper mode. */
5929 if (CONSTANT_P (op0))
5930 return
5931 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5932 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5933
5934 if (modifier == EXPAND_INITIALIZER)
5935 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5936
5937 if (target == 0)
5938 return
5939 convert_to_mode (mode, op0,
5940 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5941 else
5942 convert_move (target, op0,
5943 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5944 return target;
5945
5946 case PLUS_EXPR:
5947 /* We come here from MINUS_EXPR when the second operand is a
5948 constant. */
5949 plus_expr:
5950 this_optab = add_optab;
5951
5952 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5953 something else, make sure we add the register to the constant and
5954 then to the other thing. This case can occur during strength
5955 reduction and doing it this way will produce better code if the
5956 frame pointer or argument pointer is eliminated.
5957
5958 fold-const.c will ensure that the constant is always in the inner
5959 PLUS_EXPR, so the only case we need to do anything about is if
5960 sp, ap, or fp is our second argument, in which case we must swap
5961 the innermost first argument and our second argument. */
5962
5963 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5964 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5965 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5966 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5967 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5968 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5969 {
5970 tree t = TREE_OPERAND (exp, 1);
5971
5972 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5973 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5974 }
5975
5976 /* If the result is to be ptr_mode and we are adding an integer to
5977 something, we might be forming a constant. So try to use
5978 plus_constant. If it produces a sum and we can't accept it,
5979 use force_operand. This allows P = &ARR[const] to generate
5980 efficient code on machines where a SYMBOL_REF is not a valid
5981 address.
5982
5983 If this is an EXPAND_SUM call, always return the sum. */
5984 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5985 || mode == ptr_mode)
5986 {
5987 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5988 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5989 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5990 {
5991 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5992 EXPAND_SUM);
5993 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5994 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5995 op1 = force_operand (op1, target);
5996 return op1;
5997 }
5998
5999 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6000 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6001 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6002 {
6003 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6004 EXPAND_SUM);
6005 if (! CONSTANT_P (op0))
6006 {
6007 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6008 VOIDmode, modifier);
6009 /* Don't go to both_summands if modifier
6010 says it's not right to return a PLUS. */
6011 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6012 goto binop2;
6013 goto both_summands;
6014 }
6015 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6016 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6017 op0 = force_operand (op0, target);
6018 return op0;
6019 }
6020 }
6021
6022 /* No sense saving up arithmetic to be done
6023 if it's all in the wrong mode to form part of an address.
6024 And force_operand won't know whether to sign-extend or
6025 zero-extend. */
6026 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6027 || mode != ptr_mode)
6028 goto binop;
6029
6030 preexpand_calls (exp);
6031 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6032 subtarget = 0;
6033
6034 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
6035 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
6036
6037 both_summands:
6038 /* Make sure any term that's a sum with a constant comes last. */
6039 if (GET_CODE (op0) == PLUS
6040 && CONSTANT_P (XEXP (op0, 1)))
6041 {
6042 temp = op0;
6043 op0 = op1;
6044 op1 = temp;
6045 }
6046 /* If adding to a sum including a constant,
6047 associate it to put the constant outside. */
6048 if (GET_CODE (op1) == PLUS
6049 && CONSTANT_P (XEXP (op1, 1)))
6050 {
6051 rtx constant_term = const0_rtx;
6052
6053 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6054 if (temp != 0)
6055 op0 = temp;
6056 /* Ensure that MULT comes first if there is one. */
6057 else if (GET_CODE (op0) == MULT)
6058 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6059 else
6060 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6061
6062 /* Let's also eliminate constants from op0 if possible. */
6063 op0 = eliminate_constant_term (op0, &constant_term);
6064
6065 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6066 their sum should be a constant. Form it into OP1, since the
6067 result we want will then be OP0 + OP1. */
6068
6069 temp = simplify_binary_operation (PLUS, mode, constant_term,
6070 XEXP (op1, 1));
6071 if (temp != 0)
6072 op1 = temp;
6073 else
6074 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6075 }
6076
6077 /* Put a constant term last and put a multiplication first. */
6078 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6079 temp = op1, op1 = op0, op0 = temp;
6080
6081 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6082 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6083
6084 case MINUS_EXPR:
6085 /* For initializers, we are allowed to return a MINUS of two
6086 symbolic constants. Here we handle all cases when both operands
6087 are constant. */
6088 /* Handle difference of two symbolic constants,
6089 for the sake of an initializer. */
6090 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6091 && really_constant_p (TREE_OPERAND (exp, 0))
6092 && really_constant_p (TREE_OPERAND (exp, 1)))
6093 {
6094 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6095 VOIDmode, modifier);
6096 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6097 VOIDmode, modifier);
6098
6099 /* If the last operand is a CONST_INT, use plus_constant of
6100 the negated constant. Else make the MINUS. */
6101 if (GET_CODE (op1) == CONST_INT)
6102 return plus_constant (op0, - INTVAL (op1));
6103 else
6104 return gen_rtx (MINUS, mode, op0, op1);
6105 }
6106 /* Convert A - const to A + (-const). */
6107 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6108 {
6109 tree negated = fold (build1 (NEGATE_EXPR, type,
6110 TREE_OPERAND (exp, 1)));
6111
6112 /* Deal with the case where we can't negate the constant
6113 in TYPE. */
6114 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6115 {
6116 tree newtype = signed_type (type);
6117 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6118 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6119 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6120
6121 if (! TREE_OVERFLOW (newneg))
6122 return expand_expr (convert (type,
6123 build (PLUS_EXPR, newtype,
6124 newop0, newneg)),
6125 target, tmode, modifier);
6126 }
6127 else
6128 {
6129 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6130 goto plus_expr;
6131 }
6132 }
6133 this_optab = sub_optab;
6134 goto binop;
6135
6136 case MULT_EXPR:
6137 preexpand_calls (exp);
6138 /* If first operand is constant, swap them.
6139 Thus the following special case checks need only
6140 check the second operand. */
6141 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6142 {
6143 register tree t1 = TREE_OPERAND (exp, 0);
6144 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6145 TREE_OPERAND (exp, 1) = t1;
6146 }
6147
6148 /* Attempt to return something suitable for generating an
6149 indexed address, for machines that support that. */
6150
6151 if (modifier == EXPAND_SUM && mode == ptr_mode
6152 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6153 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6154 {
6155 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6156
6157 /* Apply distributive law if OP0 is x+c. */
6158 if (GET_CODE (op0) == PLUS
6159 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6160 return gen_rtx (PLUS, mode,
6161 gen_rtx (MULT, mode, XEXP (op0, 0),
6162 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6163 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6164 * INTVAL (XEXP (op0, 1))));
6165
6166 if (GET_CODE (op0) != REG)
6167 op0 = force_operand (op0, NULL_RTX);
6168 if (GET_CODE (op0) != REG)
6169 op0 = copy_to_mode_reg (mode, op0);
6170
6171 return gen_rtx (MULT, mode, op0,
6172 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6173 }
6174
6175 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6176 subtarget = 0;
6177
6178 /* Check for multiplying things that have been extended
6179 from a narrower type. If this machine supports multiplying
6180 in that narrower type with a result in the desired type,
6181 do it that way, and avoid the explicit type-conversion. */
6182 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6183 && TREE_CODE (type) == INTEGER_TYPE
6184 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6185 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6186 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6187 && int_fits_type_p (TREE_OPERAND (exp, 1),
6188 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6189 /* Don't use a widening multiply if a shift will do. */
6190 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6191 > HOST_BITS_PER_WIDE_INT)
6192 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6193 ||
6194 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6195 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6196 ==
6197 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6198 /* If both operands are extended, they must either both
6199 be zero-extended or both be sign-extended. */
6200 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6201 ==
6202 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6203 {
6204 enum machine_mode innermode
6205 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6206 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6207 ? smul_widen_optab : umul_widen_optab);
6208 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6209 ? umul_widen_optab : smul_widen_optab);
6210 if (mode == GET_MODE_WIDER_MODE (innermode))
6211 {
6212 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6213 {
6214 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6215 NULL_RTX, VOIDmode, 0);
6216 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6217 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6218 VOIDmode, 0);
6219 else
6220 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6221 NULL_RTX, VOIDmode, 0);
6222 goto binop2;
6223 }
6224 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6225 && innermode == word_mode)
6226 {
6227 rtx htem;
6228 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6229 NULL_RTX, VOIDmode, 0);
6230 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6231 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6232 VOIDmode, 0);
6233 else
6234 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6235 NULL_RTX, VOIDmode, 0);
6236 temp = expand_binop (mode, other_optab, op0, op1, target,
6237 unsignedp, OPTAB_LIB_WIDEN);
6238 htem = expand_mult_highpart_adjust (innermode,
6239 gen_highpart (innermode, temp),
6240 op0, op1,
6241 gen_highpart (innermode, temp),
6242 unsignedp);
6243 emit_move_insn (gen_highpart (innermode, temp), htem);
6244 return temp;
6245 }
6246 }
6247 }
6248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6250 return expand_mult (mode, op0, op1, target, unsignedp);
6251
6252 case TRUNC_DIV_EXPR:
6253 case FLOOR_DIV_EXPR:
6254 case CEIL_DIV_EXPR:
6255 case ROUND_DIV_EXPR:
6256 case EXACT_DIV_EXPR:
6257 preexpand_calls (exp);
6258 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6259 subtarget = 0;
6260 /* Possible optimization: compute the dividend with EXPAND_SUM
6261 then if the divisor is constant can optimize the case
6262 where some terms of the dividend have coeffs divisible by it. */
6263 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6264 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6265 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6266
6267 case RDIV_EXPR:
6268 this_optab = flodiv_optab;
6269 goto binop;
6270
6271 case TRUNC_MOD_EXPR:
6272 case FLOOR_MOD_EXPR:
6273 case CEIL_MOD_EXPR:
6274 case ROUND_MOD_EXPR:
6275 preexpand_calls (exp);
6276 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6277 subtarget = 0;
6278 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6279 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6280 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6281
6282 case FIX_ROUND_EXPR:
6283 case FIX_FLOOR_EXPR:
6284 case FIX_CEIL_EXPR:
6285 abort (); /* Not used for C. */
6286
6287 case FIX_TRUNC_EXPR:
6288 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6289 if (target == 0)
6290 target = gen_reg_rtx (mode);
6291 expand_fix (target, op0, unsignedp);
6292 return target;
6293
6294 case FLOAT_EXPR:
6295 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6296 if (target == 0)
6297 target = gen_reg_rtx (mode);
6298 /* expand_float can't figure out what to do if FROM has VOIDmode.
6299 So give it the correct mode. With -O, cse will optimize this. */
6300 if (GET_MODE (op0) == VOIDmode)
6301 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6302 op0);
6303 expand_float (target, op0,
6304 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6305 return target;
6306
6307 case NEGATE_EXPR:
6308 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6309 temp = expand_unop (mode, neg_optab, op0, target, 0);
6310 if (temp == 0)
6311 abort ();
6312 return temp;
6313
6314 case ABS_EXPR:
6315 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6316
6317 /* Handle complex values specially. */
6318 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6319 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6320 return expand_complex_abs (mode, op0, target, unsignedp);
6321
6322 /* Unsigned abs is simply the operand. Testing here means we don't
6323 risk generating incorrect code below. */
6324 if (TREE_UNSIGNED (type))
6325 return op0;
6326
6327 return expand_abs (mode, op0, target, unsignedp,
6328 safe_from_p (target, TREE_OPERAND (exp, 0)));
6329
6330 case MAX_EXPR:
6331 case MIN_EXPR:
6332 target = original_target;
6333 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6334 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6335 || GET_MODE (target) != mode
6336 || (GET_CODE (target) == REG
6337 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6338 target = gen_reg_rtx (mode);
6339 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6340 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6341
6342 /* First try to do it with a special MIN or MAX instruction.
6343 If that does not win, use a conditional jump to select the proper
6344 value. */
6345 this_optab = (TREE_UNSIGNED (type)
6346 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6347 : (code == MIN_EXPR ? smin_optab : smax_optab));
6348
6349 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6350 OPTAB_WIDEN);
6351 if (temp != 0)
6352 return temp;
6353
6354 /* At this point, a MEM target is no longer useful; we will get better
6355 code without it. */
6356
6357 if (GET_CODE (target) == MEM)
6358 target = gen_reg_rtx (mode);
6359
6360 if (target != op0)
6361 emit_move_insn (target, op0);
6362
6363 op0 = gen_label_rtx ();
6364
6365 /* If this mode is an integer too wide to compare properly,
6366 compare word by word. Rely on cse to optimize constant cases. */
6367 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6368 {
6369 if (code == MAX_EXPR)
6370 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6371 target, op1, NULL_RTX, op0);
6372 else
6373 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6374 op1, target, NULL_RTX, op0);
6375 emit_move_insn (target, op1);
6376 }
6377 else
6378 {
6379 if (code == MAX_EXPR)
6380 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6381 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6382 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6383 else
6384 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6385 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6386 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6387 if (temp == const0_rtx)
6388 emit_move_insn (target, op1);
6389 else if (temp != const_true_rtx)
6390 {
6391 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6392 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6393 else
6394 abort ();
6395 emit_move_insn (target, op1);
6396 }
6397 }
6398 emit_label (op0);
6399 return target;
6400
6401 case BIT_NOT_EXPR:
6402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6403 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6404 if (temp == 0)
6405 abort ();
6406 return temp;
6407
6408 case FFS_EXPR:
6409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6410 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6411 if (temp == 0)
6412 abort ();
6413 return temp;
6414
6415 /* ??? Can optimize bitwise operations with one arg constant.
6416 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6417 and (a bitwise1 b) bitwise2 b (etc)
6418 but that is probably not worth while. */
6419
6420 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6421 boolean values when we want in all cases to compute both of them. In
6422 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6423 as actual zero-or-1 values and then bitwise anding. In cases where
6424 there cannot be any side effects, better code would be made by
6425 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6426 how to recognize those cases. */
6427
6428 case TRUTH_AND_EXPR:
6429 case BIT_AND_EXPR:
6430 this_optab = and_optab;
6431 goto binop;
6432
6433 case TRUTH_OR_EXPR:
6434 case BIT_IOR_EXPR:
6435 this_optab = ior_optab;
6436 goto binop;
6437
6438 case TRUTH_XOR_EXPR:
6439 case BIT_XOR_EXPR:
6440 this_optab = xor_optab;
6441 goto binop;
6442
6443 case LSHIFT_EXPR:
6444 case RSHIFT_EXPR:
6445 case LROTATE_EXPR:
6446 case RROTATE_EXPR:
6447 preexpand_calls (exp);
6448 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6449 subtarget = 0;
6450 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6451 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6452 unsignedp);
6453
6454 /* Could determine the answer when only additive constants differ. Also,
6455 the addition of one can be handled by changing the condition. */
6456 case LT_EXPR:
6457 case LE_EXPR:
6458 case GT_EXPR:
6459 case GE_EXPR:
6460 case EQ_EXPR:
6461 case NE_EXPR:
6462 preexpand_calls (exp);
6463 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6464 if (temp != 0)
6465 return temp;
6466
6467 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6468 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6469 && original_target
6470 && GET_CODE (original_target) == REG
6471 && (GET_MODE (original_target)
6472 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6473 {
6474 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6475 VOIDmode, 0);
6476
6477 if (temp != original_target)
6478 temp = copy_to_reg (temp);
6479
6480 op1 = gen_label_rtx ();
6481 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6482 GET_MODE (temp), unsignedp, 0);
6483 emit_jump_insn (gen_beq (op1));
6484 emit_move_insn (temp, const1_rtx);
6485 emit_label (op1);
6486 return temp;
6487 }
6488
6489 /* If no set-flag instruction, must generate a conditional
6490 store into a temporary variable. Drop through
6491 and handle this like && and ||. */
6492
6493 case TRUTH_ANDIF_EXPR:
6494 case TRUTH_ORIF_EXPR:
6495 if (! ignore
6496 && (target == 0 || ! safe_from_p (target, exp)
6497 /* Make sure we don't have a hard reg (such as function's return
6498 value) live across basic blocks, if not optimizing. */
6499 || (!optimize && GET_CODE (target) == REG
6500 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6501 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6502
6503 if (target)
6504 emit_clr_insn (target);
6505
6506 op1 = gen_label_rtx ();
6507 jumpifnot (exp, op1);
6508
6509 if (target)
6510 emit_0_to_1_insn (target);
6511
6512 emit_label (op1);
6513 return ignore ? const0_rtx : target;
6514
6515 case TRUTH_NOT_EXPR:
6516 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6517 /* The parser is careful to generate TRUTH_NOT_EXPR
6518 only with operands that are always zero or one. */
6519 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6520 target, 1, OPTAB_LIB_WIDEN);
6521 if (temp == 0)
6522 abort ();
6523 return temp;
6524
6525 case COMPOUND_EXPR:
6526 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6527 emit_queue ();
6528 return expand_expr (TREE_OPERAND (exp, 1),
6529 (ignore ? const0_rtx : target),
6530 VOIDmode, 0);
6531
6532 case COND_EXPR:
6533 /* If we would have a "singleton" (see below) were it not for a
6534 conversion in each arm, bring that conversion back out. */
6535 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6536 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6537 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6538 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6539 {
6540 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6541 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6542
6543 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6544 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6545 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6546 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6547 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6548 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6549 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6550 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6551 return expand_expr (build1 (NOP_EXPR, type,
6552 build (COND_EXPR, TREE_TYPE (true),
6553 TREE_OPERAND (exp, 0),
6554 true, false)),
6555 target, tmode, modifier);
6556 }
6557
6558 {
6559 rtx flag = NULL_RTX;
6560 tree left_cleanups = NULL_TREE;
6561 tree right_cleanups = NULL_TREE;
6562
6563 /* Used to save a pointer to the place to put the setting of
6564 the flag that indicates if this side of the conditional was
6565 taken. We backpatch the code, if we find out later that we
6566 have any conditional cleanups that need to be performed. */
6567 rtx dest_right_flag = NULL_RTX;
6568 rtx dest_left_flag = NULL_RTX;
6569
6570 /* Note that COND_EXPRs whose type is a structure or union
6571 are required to be constructed to contain assignments of
6572 a temporary variable, so that we can evaluate them here
6573 for side effect only. If type is void, we must do likewise. */
6574
6575 /* If an arm of the branch requires a cleanup,
6576 only that cleanup is performed. */
6577
6578 tree singleton = 0;
6579 tree binary_op = 0, unary_op = 0;
6580 tree old_cleanups = cleanups_this_call;
6581
6582 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6583 convert it to our mode, if necessary. */
6584 if (integer_onep (TREE_OPERAND (exp, 1))
6585 && integer_zerop (TREE_OPERAND (exp, 2))
6586 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6587 {
6588 if (ignore)
6589 {
6590 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6591 modifier);
6592 return const0_rtx;
6593 }
6594
6595 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6596 if (GET_MODE (op0) == mode)
6597 return op0;
6598
6599 if (target == 0)
6600 target = gen_reg_rtx (mode);
6601 convert_move (target, op0, unsignedp);
6602 return target;
6603 }
6604
6605 /* Check for X ? A + B : A. If we have this, we can copy A to the
6606 output and conditionally add B. Similarly for unary operations.
6607 Don't do this if X has side-effects because those side effects
6608 might affect A or B and the "?" operation is a sequence point in
6609 ANSI. (operand_equal_p tests for side effects.) */
6610
6611 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6612 && operand_equal_p (TREE_OPERAND (exp, 2),
6613 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6614 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6615 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6616 && operand_equal_p (TREE_OPERAND (exp, 1),
6617 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6618 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6619 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6620 && operand_equal_p (TREE_OPERAND (exp, 2),
6621 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6622 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6623 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6624 && operand_equal_p (TREE_OPERAND (exp, 1),
6625 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6626 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6627
6628 /* If we are not to produce a result, we have no target. Otherwise,
6629 if a target was specified use it; it will not be used as an
6630 intermediate target unless it is safe. If no target, use a
6631 temporary. */
6632
6633 if (ignore)
6634 temp = 0;
6635 else if (original_target
6636 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6637 || (singleton && GET_CODE (original_target) == REG
6638 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6639 && original_target == var_rtx (singleton)))
6640 && GET_MODE (original_target) == mode
6641 && ! (GET_CODE (original_target) == MEM
6642 && MEM_VOLATILE_P (original_target)))
6643 temp = original_target;
6644 else if (TREE_ADDRESSABLE (type))
6645 abort ();
6646 else
6647 temp = assign_temp (type, 0, 0, 1);
6648
6649 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6650 do the test of X as a store-flag operation, do this as
6651 A + ((X != 0) << log C). Similarly for other simple binary
6652 operators. Only do for C == 1 if BRANCH_COST is low. */
6653 if (temp && singleton && binary_op
6654 && (TREE_CODE (binary_op) == PLUS_EXPR
6655 || TREE_CODE (binary_op) == MINUS_EXPR
6656 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6657 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6658 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6659 : integer_onep (TREE_OPERAND (binary_op, 1)))
6660 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6661 {
6662 rtx result;
6663 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6664 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6665 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6666 : xor_optab);
6667
6668 /* If we had X ? A : A + 1, do this as A + (X == 0).
6669
6670 We have to invert the truth value here and then put it
6671 back later if do_store_flag fails. We cannot simply copy
6672 TREE_OPERAND (exp, 0) to another variable and modify that
6673 because invert_truthvalue can modify the tree pointed to
6674 by its argument. */
6675 if (singleton == TREE_OPERAND (exp, 1))
6676 TREE_OPERAND (exp, 0)
6677 = invert_truthvalue (TREE_OPERAND (exp, 0));
6678
6679 result = do_store_flag (TREE_OPERAND (exp, 0),
6680 (safe_from_p (temp, singleton)
6681 ? temp : NULL_RTX),
6682 mode, BRANCH_COST <= 1);
6683
6684 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6685 result = expand_shift (LSHIFT_EXPR, mode, result,
6686 build_int_2 (tree_log2
6687 (TREE_OPERAND
6688 (binary_op, 1)),
6689 0),
6690 (safe_from_p (temp, singleton)
6691 ? temp : NULL_RTX), 0);
6692
6693 if (result)
6694 {
6695 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6696 return expand_binop (mode, boptab, op1, result, temp,
6697 unsignedp, OPTAB_LIB_WIDEN);
6698 }
6699 else if (singleton == TREE_OPERAND (exp, 1))
6700 TREE_OPERAND (exp, 0)
6701 = invert_truthvalue (TREE_OPERAND (exp, 0));
6702 }
6703
6704 do_pending_stack_adjust ();
6705 NO_DEFER_POP;
6706 op0 = gen_label_rtx ();
6707
6708 flag = gen_reg_rtx (word_mode);
6709 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6710 {
6711 if (temp != 0)
6712 {
6713 /* If the target conflicts with the other operand of the
6714 binary op, we can't use it. Also, we can't use the target
6715 if it is a hard register, because evaluating the condition
6716 might clobber it. */
6717 if ((binary_op
6718 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6719 || (GET_CODE (temp) == REG
6720 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6721 temp = gen_reg_rtx (mode);
6722 store_expr (singleton, temp, 0);
6723 }
6724 else
6725 expand_expr (singleton,
6726 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6727 dest_left_flag = get_last_insn ();
6728 if (singleton == TREE_OPERAND (exp, 1))
6729 jumpif (TREE_OPERAND (exp, 0), op0);
6730 else
6731 jumpifnot (TREE_OPERAND (exp, 0), op0);
6732
6733 /* Allows cleanups up to here. */
6734 old_cleanups = cleanups_this_call;
6735 if (binary_op && temp == 0)
6736 /* Just touch the other operand. */
6737 expand_expr (TREE_OPERAND (binary_op, 1),
6738 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6739 else if (binary_op)
6740 store_expr (build (TREE_CODE (binary_op), type,
6741 make_tree (type, temp),
6742 TREE_OPERAND (binary_op, 1)),
6743 temp, 0);
6744 else
6745 store_expr (build1 (TREE_CODE (unary_op), type,
6746 make_tree (type, temp)),
6747 temp, 0);
6748 op1 = op0;
6749 dest_right_flag = get_last_insn ();
6750 }
6751 #if 0
6752 /* This is now done in jump.c and is better done there because it
6753 produces shorter register lifetimes. */
6754
6755 /* Check for both possibilities either constants or variables
6756 in registers (but not the same as the target!). If so, can
6757 save branches by assigning one, branching, and assigning the
6758 other. */
6759 else if (temp && GET_MODE (temp) != BLKmode
6760 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6761 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6762 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6763 && DECL_RTL (TREE_OPERAND (exp, 1))
6764 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6765 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6766 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6767 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6768 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6769 && DECL_RTL (TREE_OPERAND (exp, 2))
6770 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6771 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6772 {
6773 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6774 temp = gen_reg_rtx (mode);
6775 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6776 dest_left_flag = get_last_insn ();
6777 jumpifnot (TREE_OPERAND (exp, 0), op0);
6778
6779 /* Allows cleanups up to here. */
6780 old_cleanups = cleanups_this_call;
6781 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6782 op1 = op0;
6783 dest_right_flag = get_last_insn ();
6784 }
6785 #endif
6786 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6787 comparison operator. If we have one of these cases, set the
6788 output to A, branch on A (cse will merge these two references),
6789 then set the output to FOO. */
6790 else if (temp
6791 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6792 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6793 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6794 TREE_OPERAND (exp, 1), 0)
6795 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6796 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6797 {
6798 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6799 temp = gen_reg_rtx (mode);
6800 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6801 dest_left_flag = get_last_insn ();
6802 jumpif (TREE_OPERAND (exp, 0), op0);
6803
6804 /* Allows cleanups up to here. */
6805 old_cleanups = cleanups_this_call;
6806 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6807 op1 = op0;
6808 dest_right_flag = get_last_insn ();
6809 }
6810 else if (temp
6811 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6812 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6813 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6814 TREE_OPERAND (exp, 2), 0)
6815 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6816 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6817 {
6818 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6819 temp = gen_reg_rtx (mode);
6820 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6821 dest_left_flag = get_last_insn ();
6822 jumpifnot (TREE_OPERAND (exp, 0), op0);
6823
6824 /* Allows cleanups up to here. */
6825 old_cleanups = cleanups_this_call;
6826 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6827 op1 = op0;
6828 dest_right_flag = get_last_insn ();
6829 }
6830 else
6831 {
6832 op1 = gen_label_rtx ();
6833 jumpifnot (TREE_OPERAND (exp, 0), op0);
6834
6835 /* Allows cleanups up to here. */
6836 old_cleanups = cleanups_this_call;
6837 if (temp != 0)
6838 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6839 else
6840 expand_expr (TREE_OPERAND (exp, 1),
6841 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6842 dest_left_flag = get_last_insn ();
6843
6844 /* Handle conditional cleanups, if any. */
6845 left_cleanups = defer_cleanups_to (old_cleanups);
6846
6847 emit_queue ();
6848 emit_jump_insn (gen_jump (op1));
6849 emit_barrier ();
6850 emit_label (op0);
6851 if (temp != 0)
6852 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6853 else
6854 expand_expr (TREE_OPERAND (exp, 2),
6855 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6856 dest_right_flag = get_last_insn ();
6857 }
6858
6859 /* Handle conditional cleanups, if any. */
6860 right_cleanups = defer_cleanups_to (old_cleanups);
6861
6862 emit_queue ();
6863 emit_label (op1);
6864 OK_DEFER_POP;
6865
6866 /* Add back in, any conditional cleanups. */
6867 if (left_cleanups || right_cleanups)
6868 {
6869 tree new_cleanups;
6870 tree cond;
6871 rtx last;
6872
6873 /* Now that we know that a flag is needed, go back and add in the
6874 setting of the flag. */
6875
6876 /* Do the left side flag. */
6877 last = get_last_insn ();
6878 /* Flag left cleanups as needed. */
6879 emit_move_insn (flag, const1_rtx);
6880 /* ??? deprecated, use sequences instead. */
6881 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6882
6883 /* Do the right side flag. */
6884 last = get_last_insn ();
6885 /* Flag left cleanups as needed. */
6886 emit_move_insn (flag, const0_rtx);
6887 /* ??? deprecated, use sequences instead. */
6888 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6889
6890 /* All cleanups must be on the function_obstack. */
6891 push_obstacks_nochange ();
6892 resume_temporary_allocation ();
6893
6894 /* convert flag, which is an rtx, into a tree. */
6895 cond = make_node (RTL_EXPR);
6896 TREE_TYPE (cond) = integer_type_node;
6897 RTL_EXPR_RTL (cond) = flag;
6898 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6899 cond = save_expr (cond);
6900
6901 if (! left_cleanups)
6902 left_cleanups = integer_zero_node;
6903 if (! right_cleanups)
6904 right_cleanups = integer_zero_node;
6905 new_cleanups = build (COND_EXPR, void_type_node,
6906 truthvalue_conversion (cond),
6907 left_cleanups, right_cleanups);
6908 new_cleanups = fold (new_cleanups);
6909
6910 pop_obstacks ();
6911
6912 /* Now add in the conditionalized cleanups. */
6913 cleanups_this_call
6914 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6915 expand_eh_region_start ();
6916 }
6917 return temp;
6918 }
6919
6920 case TARGET_EXPR:
6921 {
6922 /* Something needs to be initialized, but we didn't know
6923 where that thing was when building the tree. For example,
6924 it could be the return value of a function, or a parameter
6925 to a function which lays down in the stack, or a temporary
6926 variable which must be passed by reference.
6927
6928 We guarantee that the expression will either be constructed
6929 or copied into our original target. */
6930
6931 tree slot = TREE_OPERAND (exp, 0);
6932 tree cleanups = NULL_TREE;
6933 tree exp1;
6934 rtx temp;
6935
6936 if (TREE_CODE (slot) != VAR_DECL)
6937 abort ();
6938
6939 if (! ignore)
6940 target = original_target;
6941
6942 if (target == 0)
6943 {
6944 if (DECL_RTL (slot) != 0)
6945 {
6946 target = DECL_RTL (slot);
6947 /* If we have already expanded the slot, so don't do
6948 it again. (mrs) */
6949 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6950 return target;
6951 }
6952 else
6953 {
6954 target = assign_temp (type, 2, 1, 1);
6955 /* All temp slots at this level must not conflict. */
6956 preserve_temp_slots (target);
6957 DECL_RTL (slot) = target;
6958
6959 /* Since SLOT is not known to the called function
6960 to belong to its stack frame, we must build an explicit
6961 cleanup. This case occurs when we must build up a reference
6962 to pass the reference as an argument. In this case,
6963 it is very likely that such a reference need not be
6964 built here. */
6965
6966 if (TREE_OPERAND (exp, 2) == 0)
6967 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6968 cleanups = TREE_OPERAND (exp, 2);
6969 }
6970 }
6971 else
6972 {
6973 /* This case does occur, when expanding a parameter which
6974 needs to be constructed on the stack. The target
6975 is the actual stack address that we want to initialize.
6976 The function we call will perform the cleanup in this case. */
6977
6978 /* If we have already assigned it space, use that space,
6979 not target that we were passed in, as our target
6980 parameter is only a hint. */
6981 if (DECL_RTL (slot) != 0)
6982 {
6983 target = DECL_RTL (slot);
6984 /* If we have already expanded the slot, so don't do
6985 it again. (mrs) */
6986 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6987 return target;
6988 }
6989
6990 DECL_RTL (slot) = target;
6991 }
6992
6993 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6994 /* Mark it as expanded. */
6995 TREE_OPERAND (exp, 1) = NULL_TREE;
6996
6997 store_expr (exp1, target, 0);
6998
6999 if (cleanups)
7000 {
7001 cleanups_this_call = tree_cons (NULL_TREE,
7002 cleanups,
7003 cleanups_this_call);
7004 expand_eh_region_start ();
7005 }
7006
7007 return target;
7008 }
7009
7010 case INIT_EXPR:
7011 {
7012 tree lhs = TREE_OPERAND (exp, 0);
7013 tree rhs = TREE_OPERAND (exp, 1);
7014 tree noncopied_parts = 0;
7015 tree lhs_type = TREE_TYPE (lhs);
7016
7017 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7018 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7019 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7020 TYPE_NONCOPIED_PARTS (lhs_type));
7021 while (noncopied_parts != 0)
7022 {
7023 expand_assignment (TREE_VALUE (noncopied_parts),
7024 TREE_PURPOSE (noncopied_parts), 0, 0);
7025 noncopied_parts = TREE_CHAIN (noncopied_parts);
7026 }
7027 return temp;
7028 }
7029
7030 case MODIFY_EXPR:
7031 {
7032 /* If lhs is complex, expand calls in rhs before computing it.
7033 That's so we don't compute a pointer and save it over a call.
7034 If lhs is simple, compute it first so we can give it as a
7035 target if the rhs is just a call. This avoids an extra temp and copy
7036 and that prevents a partial-subsumption which makes bad code.
7037 Actually we could treat component_ref's of vars like vars. */
7038
7039 tree lhs = TREE_OPERAND (exp, 0);
7040 tree rhs = TREE_OPERAND (exp, 1);
7041 tree noncopied_parts = 0;
7042 tree lhs_type = TREE_TYPE (lhs);
7043
7044 temp = 0;
7045
7046 if (TREE_CODE (lhs) != VAR_DECL
7047 && TREE_CODE (lhs) != RESULT_DECL
7048 && TREE_CODE (lhs) != PARM_DECL)
7049 preexpand_calls (exp);
7050
7051 /* Check for |= or &= of a bitfield of size one into another bitfield
7052 of size 1. In this case, (unless we need the result of the
7053 assignment) we can do this more efficiently with a
7054 test followed by an assignment, if necessary.
7055
7056 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7057 things change so we do, this code should be enhanced to
7058 support it. */
7059 if (ignore
7060 && TREE_CODE (lhs) == COMPONENT_REF
7061 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7062 || TREE_CODE (rhs) == BIT_AND_EXPR)
7063 && TREE_OPERAND (rhs, 0) == lhs
7064 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7065 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7066 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7067 {
7068 rtx label = gen_label_rtx ();
7069
7070 do_jump (TREE_OPERAND (rhs, 1),
7071 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7072 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7073 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7074 (TREE_CODE (rhs) == BIT_IOR_EXPR
7075 ? integer_one_node
7076 : integer_zero_node)),
7077 0, 0);
7078 do_pending_stack_adjust ();
7079 emit_label (label);
7080 return const0_rtx;
7081 }
7082
7083 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7084 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7085 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7086 TYPE_NONCOPIED_PARTS (lhs_type));
7087
7088 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7089 while (noncopied_parts != 0)
7090 {
7091 expand_assignment (TREE_PURPOSE (noncopied_parts),
7092 TREE_VALUE (noncopied_parts), 0, 0);
7093 noncopied_parts = TREE_CHAIN (noncopied_parts);
7094 }
7095 return temp;
7096 }
7097
7098 case PREINCREMENT_EXPR:
7099 case PREDECREMENT_EXPR:
7100 return expand_increment (exp, 0, ignore);
7101
7102 case POSTINCREMENT_EXPR:
7103 case POSTDECREMENT_EXPR:
7104 /* Faster to treat as pre-increment if result is not used. */
7105 return expand_increment (exp, ! ignore, ignore);
7106
7107 case ADDR_EXPR:
7108 /* If nonzero, TEMP will be set to the address of something that might
7109 be a MEM corresponding to a stack slot. */
7110 temp = 0;
7111
7112 /* Are we taking the address of a nested function? */
7113 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7114 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7115 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7116 {
7117 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7118 op0 = force_operand (op0, target);
7119 }
7120 /* If we are taking the address of something erroneous, just
7121 return a zero. */
7122 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7123 return const0_rtx;
7124 else
7125 {
7126 /* We make sure to pass const0_rtx down if we came in with
7127 ignore set, to avoid doing the cleanups twice for something. */
7128 op0 = expand_expr (TREE_OPERAND (exp, 0),
7129 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7130 (modifier == EXPAND_INITIALIZER
7131 ? modifier : EXPAND_CONST_ADDRESS));
7132
7133 /* If we are going to ignore the result, OP0 will have been set
7134 to const0_rtx, so just return it. Don't get confused and
7135 think we are taking the address of the constant. */
7136 if (ignore)
7137 return op0;
7138
7139 op0 = protect_from_queue (op0, 0);
7140
7141 /* We would like the object in memory. If it is a constant,
7142 we can have it be statically allocated into memory. For
7143 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7144 memory and store the value into it. */
7145
7146 if (CONSTANT_P (op0))
7147 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7148 op0);
7149 else if (GET_CODE (op0) == MEM)
7150 {
7151 mark_temp_addr_taken (op0);
7152 temp = XEXP (op0, 0);
7153 }
7154
7155 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7156 || GET_CODE (op0) == CONCAT)
7157 {
7158 /* If this object is in a register, it must be not
7159 be BLKmode. */
7160 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7161 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7162
7163 mark_temp_addr_taken (memloc);
7164 emit_move_insn (memloc, op0);
7165 op0 = memloc;
7166 }
7167
7168 if (GET_CODE (op0) != MEM)
7169 abort ();
7170
7171 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7172 {
7173 temp = XEXP (op0, 0);
7174 #ifdef POINTERS_EXTEND_UNSIGNED
7175 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7176 && mode == ptr_mode)
7177 temp = convert_memory_address (ptr_mode, temp);
7178 #endif
7179 return temp;
7180 }
7181
7182 op0 = force_operand (XEXP (op0, 0), target);
7183 }
7184
7185 if (flag_force_addr && GET_CODE (op0) != REG)
7186 op0 = force_reg (Pmode, op0);
7187
7188 if (GET_CODE (op0) == REG
7189 && ! REG_USERVAR_P (op0))
7190 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7191
7192 /* If we might have had a temp slot, add an equivalent address
7193 for it. */
7194 if (temp != 0)
7195 update_temp_slot_address (temp, op0);
7196
7197 #ifdef POINTERS_EXTEND_UNSIGNED
7198 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7199 && mode == ptr_mode)
7200 op0 = convert_memory_address (ptr_mode, op0);
7201 #endif
7202
7203 return op0;
7204
7205 case ENTRY_VALUE_EXPR:
7206 abort ();
7207
7208 /* COMPLEX type for Extended Pascal & Fortran */
7209 case COMPLEX_EXPR:
7210 {
7211 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7212 rtx insns;
7213
7214 /* Get the rtx code of the operands. */
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7216 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7217
7218 if (! target)
7219 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7220
7221 start_sequence ();
7222
7223 /* Move the real (op0) and imaginary (op1) parts to their location. */
7224 emit_move_insn (gen_realpart (mode, target), op0);
7225 emit_move_insn (gen_imagpart (mode, target), op1);
7226
7227 insns = get_insns ();
7228 end_sequence ();
7229
7230 /* Complex construction should appear as a single unit. */
7231 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7232 each with a separate pseudo as destination.
7233 It's not correct for flow to treat them as a unit. */
7234 if (GET_CODE (target) != CONCAT)
7235 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7236 else
7237 emit_insns (insns);
7238
7239 return target;
7240 }
7241
7242 case REALPART_EXPR:
7243 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7244 return gen_realpart (mode, op0);
7245
7246 case IMAGPART_EXPR:
7247 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7248 return gen_imagpart (mode, op0);
7249
7250 case CONJ_EXPR:
7251 {
7252 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7253 rtx imag_t;
7254 rtx insns;
7255
7256 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7257
7258 if (! target)
7259 target = gen_reg_rtx (mode);
7260
7261 start_sequence ();
7262
7263 /* Store the realpart and the negated imagpart to target. */
7264 emit_move_insn (gen_realpart (partmode, target),
7265 gen_realpart (partmode, op0));
7266
7267 imag_t = gen_imagpart (partmode, target);
7268 temp = expand_unop (partmode, neg_optab,
7269 gen_imagpart (partmode, op0), imag_t, 0);
7270 if (temp != imag_t)
7271 emit_move_insn (imag_t, temp);
7272
7273 insns = get_insns ();
7274 end_sequence ();
7275
7276 /* Conjugate should appear as a single unit
7277 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7278 each with a separate pseudo as destination.
7279 It's not correct for flow to treat them as a unit. */
7280 if (GET_CODE (target) != CONCAT)
7281 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7282 else
7283 emit_insns (insns);
7284
7285 return target;
7286 }
7287
7288 case ERROR_MARK:
7289 op0 = CONST0_RTX (tmode);
7290 if (op0 != 0)
7291 return op0;
7292 return const0_rtx;
7293
7294 default:
7295 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7296 }
7297
7298 /* Here to do an ordinary binary operator, generating an instruction
7299 from the optab already placed in `this_optab'. */
7300 binop:
7301 preexpand_calls (exp);
7302 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7303 subtarget = 0;
7304 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7305 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7306 binop2:
7307 temp = expand_binop (mode, this_optab, op0, op1, target,
7308 unsignedp, OPTAB_LIB_WIDEN);
7309 if (temp == 0)
7310 abort ();
7311 return temp;
7312 }
7313
7314
7315 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7316
7317 void
7318 bc_expand_expr (exp)
7319 tree exp;
7320 {
7321 enum tree_code code;
7322 tree type, arg0;
7323 rtx r;
7324 struct binary_operator *binoptab;
7325 struct unary_operator *unoptab;
7326 struct increment_operator *incroptab;
7327 struct bc_label *lab, *lab1;
7328 enum bytecode_opcode opcode;
7329
7330
7331 code = TREE_CODE (exp);
7332
7333 switch (code)
7334 {
7335 case PARM_DECL:
7336
7337 if (DECL_RTL (exp) == 0)
7338 {
7339 error_with_decl (exp, "prior parameter's size depends on `%s'");
7340 return;
7341 }
7342
7343 bc_load_parmaddr (DECL_RTL (exp));
7344 bc_load_memory (TREE_TYPE (exp), exp);
7345
7346 return;
7347
7348 case VAR_DECL:
7349
7350 if (DECL_RTL (exp) == 0)
7351 abort ();
7352
7353 #if 0
7354 if (BYTECODE_LABEL (DECL_RTL (exp)))
7355 bc_load_externaddr (DECL_RTL (exp));
7356 else
7357 bc_load_localaddr (DECL_RTL (exp));
7358 #endif
7359 if (TREE_PUBLIC (exp))
7360 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7361 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7362 else
7363 bc_load_localaddr (DECL_RTL (exp));
7364
7365 bc_load_memory (TREE_TYPE (exp), exp);
7366 return;
7367
7368 case INTEGER_CST:
7369
7370 #ifdef DEBUG_PRINT_CODE
7371 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7372 #endif
7373 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7374 ? SImode
7375 : TYPE_MODE (TREE_TYPE (exp)))],
7376 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7377 return;
7378
7379 case REAL_CST:
7380
7381 #if 0
7382 #ifdef DEBUG_PRINT_CODE
7383 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7384 #endif
7385 /* FIX THIS: find a better way to pass real_cst's. -bson */
7386 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7387 (double) TREE_REAL_CST (exp));
7388 #else
7389 abort ();
7390 #endif
7391
7392 return;
7393
7394 case CALL_EXPR:
7395
7396 /* We build a call description vector describing the type of
7397 the return value and of the arguments; this call vector,
7398 together with a pointer to a location for the return value
7399 and the base of the argument list, is passed to the low
7400 level machine dependent call subroutine, which is responsible
7401 for putting the arguments wherever real functions expect
7402 them, as well as getting the return value back. */
7403 {
7404 tree calldesc = 0, arg;
7405 int nargs = 0, i;
7406 rtx retval;
7407
7408 /* Push the evaluated args on the evaluation stack in reverse
7409 order. Also make an entry for each arg in the calldesc
7410 vector while we're at it. */
7411
7412 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7413
7414 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7415 {
7416 ++nargs;
7417 bc_expand_expr (TREE_VALUE (arg));
7418
7419 calldesc = tree_cons ((tree) 0,
7420 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7421 calldesc);
7422 calldesc = tree_cons ((tree) 0,
7423 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7424 calldesc);
7425 }
7426
7427 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7428
7429 /* Allocate a location for the return value and push its
7430 address on the evaluation stack. Also make an entry
7431 at the front of the calldesc for the return value type. */
7432
7433 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7434 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7435 bc_load_localaddr (retval);
7436
7437 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7438 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7439
7440 /* Prepend the argument count. */
7441 calldesc = tree_cons ((tree) 0,
7442 build_int_2 (nargs, 0),
7443 calldesc);
7444
7445 /* Push the address of the call description vector on the stack. */
7446 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7447 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7448 build_index_type (build_int_2 (nargs * 2, 0)));
7449 r = output_constant_def (calldesc);
7450 bc_load_externaddr (r);
7451
7452 /* Push the address of the function to be called. */
7453 bc_expand_expr (TREE_OPERAND (exp, 0));
7454
7455 /* Call the function, popping its address and the calldesc vector
7456 address off the evaluation stack in the process. */
7457 bc_emit_instruction (call);
7458
7459 /* Pop the arguments off the stack. */
7460 bc_adjust_stack (nargs);
7461
7462 /* Load the return value onto the stack. */
7463 bc_load_localaddr (retval);
7464 bc_load_memory (type, TREE_OPERAND (exp, 0));
7465 }
7466 return;
7467
7468 case SAVE_EXPR:
7469
7470 if (!SAVE_EXPR_RTL (exp))
7471 {
7472 /* First time around: copy to local variable */
7473 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7474 TYPE_ALIGN (TREE_TYPE(exp)));
7475 bc_expand_expr (TREE_OPERAND (exp, 0));
7476 bc_emit_instruction (duplicate);
7477
7478 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7479 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7480 }
7481 else
7482 {
7483 /* Consecutive reference: use saved copy */
7484 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7485 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7486 }
7487 return;
7488
7489 #if 0
7490 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7491 how are they handled instead? */
7492 case LET_STMT:
7493
7494 TREE_USED (exp) = 1;
7495 bc_expand_expr (STMT_BODY (exp));
7496 return;
7497 #endif
7498
7499 case NOP_EXPR:
7500 case CONVERT_EXPR:
7501
7502 bc_expand_expr (TREE_OPERAND (exp, 0));
7503 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7504 return;
7505
7506 case MODIFY_EXPR:
7507
7508 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7509 return;
7510
7511 case ADDR_EXPR:
7512
7513 bc_expand_address (TREE_OPERAND (exp, 0));
7514 return;
7515
7516 case INDIRECT_REF:
7517
7518 bc_expand_expr (TREE_OPERAND (exp, 0));
7519 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7520 return;
7521
7522 case ARRAY_REF:
7523
7524 bc_expand_expr (bc_canonicalize_array_ref (exp));
7525 return;
7526
7527 case COMPONENT_REF:
7528
7529 bc_expand_component_address (exp);
7530
7531 /* If we have a bitfield, generate a proper load */
7532 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7533 return;
7534
7535 case COMPOUND_EXPR:
7536
7537 bc_expand_expr (TREE_OPERAND (exp, 0));
7538 bc_emit_instruction (drop);
7539 bc_expand_expr (TREE_OPERAND (exp, 1));
7540 return;
7541
7542 case COND_EXPR:
7543
7544 bc_expand_expr (TREE_OPERAND (exp, 0));
7545 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7546 lab = bc_get_bytecode_label ();
7547 bc_emit_bytecode (xjumpifnot);
7548 bc_emit_bytecode_labelref (lab);
7549
7550 #ifdef DEBUG_PRINT_CODE
7551 fputc ('\n', stderr);
7552 #endif
7553 bc_expand_expr (TREE_OPERAND (exp, 1));
7554 lab1 = bc_get_bytecode_label ();
7555 bc_emit_bytecode (jump);
7556 bc_emit_bytecode_labelref (lab1);
7557
7558 #ifdef DEBUG_PRINT_CODE
7559 fputc ('\n', stderr);
7560 #endif
7561
7562 bc_emit_bytecode_labeldef (lab);
7563 bc_expand_expr (TREE_OPERAND (exp, 2));
7564 bc_emit_bytecode_labeldef (lab1);
7565 return;
7566
7567 case TRUTH_ANDIF_EXPR:
7568
7569 opcode = xjumpifnot;
7570 goto andorif;
7571
7572 case TRUTH_ORIF_EXPR:
7573
7574 opcode = xjumpif;
7575 goto andorif;
7576
7577 case PLUS_EXPR:
7578
7579 binoptab = optab_plus_expr;
7580 goto binop;
7581
7582 case MINUS_EXPR:
7583
7584 binoptab = optab_minus_expr;
7585 goto binop;
7586
7587 case MULT_EXPR:
7588
7589 binoptab = optab_mult_expr;
7590 goto binop;
7591
7592 case TRUNC_DIV_EXPR:
7593 case FLOOR_DIV_EXPR:
7594 case CEIL_DIV_EXPR:
7595 case ROUND_DIV_EXPR:
7596 case EXACT_DIV_EXPR:
7597
7598 binoptab = optab_trunc_div_expr;
7599 goto binop;
7600
7601 case TRUNC_MOD_EXPR:
7602 case FLOOR_MOD_EXPR:
7603 case CEIL_MOD_EXPR:
7604 case ROUND_MOD_EXPR:
7605
7606 binoptab = optab_trunc_mod_expr;
7607 goto binop;
7608
7609 case FIX_ROUND_EXPR:
7610 case FIX_FLOOR_EXPR:
7611 case FIX_CEIL_EXPR:
7612 abort (); /* Not used for C. */
7613
7614 case FIX_TRUNC_EXPR:
7615 case FLOAT_EXPR:
7616 case MAX_EXPR:
7617 case MIN_EXPR:
7618 case FFS_EXPR:
7619 case LROTATE_EXPR:
7620 case RROTATE_EXPR:
7621 abort (); /* FIXME */
7622
7623 case RDIV_EXPR:
7624
7625 binoptab = optab_rdiv_expr;
7626 goto binop;
7627
7628 case BIT_AND_EXPR:
7629
7630 binoptab = optab_bit_and_expr;
7631 goto binop;
7632
7633 case BIT_IOR_EXPR:
7634
7635 binoptab = optab_bit_ior_expr;
7636 goto binop;
7637
7638 case BIT_XOR_EXPR:
7639
7640 binoptab = optab_bit_xor_expr;
7641 goto binop;
7642
7643 case LSHIFT_EXPR:
7644
7645 binoptab = optab_lshift_expr;
7646 goto binop;
7647
7648 case RSHIFT_EXPR:
7649
7650 binoptab = optab_rshift_expr;
7651 goto binop;
7652
7653 case TRUTH_AND_EXPR:
7654
7655 binoptab = optab_truth_and_expr;
7656 goto binop;
7657
7658 case TRUTH_OR_EXPR:
7659
7660 binoptab = optab_truth_or_expr;
7661 goto binop;
7662
7663 case LT_EXPR:
7664
7665 binoptab = optab_lt_expr;
7666 goto binop;
7667
7668 case LE_EXPR:
7669
7670 binoptab = optab_le_expr;
7671 goto binop;
7672
7673 case GE_EXPR:
7674
7675 binoptab = optab_ge_expr;
7676 goto binop;
7677
7678 case GT_EXPR:
7679
7680 binoptab = optab_gt_expr;
7681 goto binop;
7682
7683 case EQ_EXPR:
7684
7685 binoptab = optab_eq_expr;
7686 goto binop;
7687
7688 case NE_EXPR:
7689
7690 binoptab = optab_ne_expr;
7691 goto binop;
7692
7693 case NEGATE_EXPR:
7694
7695 unoptab = optab_negate_expr;
7696 goto unop;
7697
7698 case BIT_NOT_EXPR:
7699
7700 unoptab = optab_bit_not_expr;
7701 goto unop;
7702
7703 case TRUTH_NOT_EXPR:
7704
7705 unoptab = optab_truth_not_expr;
7706 goto unop;
7707
7708 case PREDECREMENT_EXPR:
7709
7710 incroptab = optab_predecrement_expr;
7711 goto increment;
7712
7713 case PREINCREMENT_EXPR:
7714
7715 incroptab = optab_preincrement_expr;
7716 goto increment;
7717
7718 case POSTDECREMENT_EXPR:
7719
7720 incroptab = optab_postdecrement_expr;
7721 goto increment;
7722
7723 case POSTINCREMENT_EXPR:
7724
7725 incroptab = optab_postincrement_expr;
7726 goto increment;
7727
7728 case CONSTRUCTOR:
7729
7730 bc_expand_constructor (exp);
7731 return;
7732
7733 case ERROR_MARK:
7734 case RTL_EXPR:
7735
7736 return;
7737
7738 case BIND_EXPR:
7739 {
7740 tree vars = TREE_OPERAND (exp, 0);
7741 int vars_need_expansion = 0;
7742
7743 /* Need to open a binding contour here because
7744 if there are any cleanups they most be contained here. */
7745 expand_start_bindings (0);
7746
7747 /* Mark the corresponding BLOCK for output. */
7748 if (TREE_OPERAND (exp, 2) != 0)
7749 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7750
7751 /* If VARS have not yet been expanded, expand them now. */
7752 while (vars)
7753 {
7754 if (DECL_RTL (vars) == 0)
7755 {
7756 vars_need_expansion = 1;
7757 expand_decl (vars);
7758 }
7759 expand_decl_init (vars);
7760 vars = TREE_CHAIN (vars);
7761 }
7762
7763 bc_expand_expr (TREE_OPERAND (exp, 1));
7764
7765 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7766
7767 return;
7768 }
7769 }
7770
7771 abort ();
7772
7773 binop:
7774
7775 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7776 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7777 return;
7778
7779
7780 unop:
7781
7782 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7783 return;
7784
7785
7786 andorif:
7787
7788 bc_expand_expr (TREE_OPERAND (exp, 0));
7789 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7790 lab = bc_get_bytecode_label ();
7791
7792 bc_emit_instruction (duplicate);
7793 bc_emit_bytecode (opcode);
7794 bc_emit_bytecode_labelref (lab);
7795
7796 #ifdef DEBUG_PRINT_CODE
7797 fputc ('\n', stderr);
7798 #endif
7799
7800 bc_emit_instruction (drop);
7801
7802 bc_expand_expr (TREE_OPERAND (exp, 1));
7803 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7804 bc_emit_bytecode_labeldef (lab);
7805 return;
7806
7807
7808 increment:
7809
7810 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7811
7812 /* Push the quantum. */
7813 bc_expand_expr (TREE_OPERAND (exp, 1));
7814
7815 /* Convert it to the lvalue's type. */
7816 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7817
7818 /* Push the address of the lvalue */
7819 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7820
7821 /* Perform actual increment */
7822 bc_expand_increment (incroptab, type);
7823 return;
7824 }
7825 \f
7826 /* Return the alignment in bits of EXP, a pointer valued expression.
7827 But don't return more than MAX_ALIGN no matter what.
7828 The alignment returned is, by default, the alignment of the thing that
7829 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7830
7831 Otherwise, look at the expression to see if we can do better, i.e., if the
7832 expression is actually pointing at an object whose alignment is tighter. */
7833
7834 static int
7835 get_pointer_alignment (exp, max_align)
7836 tree exp;
7837 unsigned max_align;
7838 {
7839 unsigned align, inner;
7840
7841 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7842 return 0;
7843
7844 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7845 align = MIN (align, max_align);
7846
7847 while (1)
7848 {
7849 switch (TREE_CODE (exp))
7850 {
7851 case NOP_EXPR:
7852 case CONVERT_EXPR:
7853 case NON_LVALUE_EXPR:
7854 exp = TREE_OPERAND (exp, 0);
7855 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7856 return align;
7857 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7858 align = MIN (inner, max_align);
7859 break;
7860
7861 case PLUS_EXPR:
7862 /* If sum of pointer + int, restrict our maximum alignment to that
7863 imposed by the integer. If not, we can't do any better than
7864 ALIGN. */
7865 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7866 return align;
7867
7868 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7869 & (max_align - 1))
7870 != 0)
7871 max_align >>= 1;
7872
7873 exp = TREE_OPERAND (exp, 0);
7874 break;
7875
7876 case ADDR_EXPR:
7877 /* See what we are pointing at and look at its alignment. */
7878 exp = TREE_OPERAND (exp, 0);
7879 if (TREE_CODE (exp) == FUNCTION_DECL)
7880 align = FUNCTION_BOUNDARY;
7881 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7882 align = DECL_ALIGN (exp);
7883 #ifdef CONSTANT_ALIGNMENT
7884 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7885 align = CONSTANT_ALIGNMENT (exp, align);
7886 #endif
7887 return MIN (align, max_align);
7888
7889 default:
7890 return align;
7891 }
7892 }
7893 }
7894 \f
7895 /* Return the tree node and offset if a given argument corresponds to
7896 a string constant. */
7897
7898 static tree
7899 string_constant (arg, ptr_offset)
7900 tree arg;
7901 tree *ptr_offset;
7902 {
7903 STRIP_NOPS (arg);
7904
7905 if (TREE_CODE (arg) == ADDR_EXPR
7906 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7907 {
7908 *ptr_offset = integer_zero_node;
7909 return TREE_OPERAND (arg, 0);
7910 }
7911 else if (TREE_CODE (arg) == PLUS_EXPR)
7912 {
7913 tree arg0 = TREE_OPERAND (arg, 0);
7914 tree arg1 = TREE_OPERAND (arg, 1);
7915
7916 STRIP_NOPS (arg0);
7917 STRIP_NOPS (arg1);
7918
7919 if (TREE_CODE (arg0) == ADDR_EXPR
7920 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7921 {
7922 *ptr_offset = arg1;
7923 return TREE_OPERAND (arg0, 0);
7924 }
7925 else if (TREE_CODE (arg1) == ADDR_EXPR
7926 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7927 {
7928 *ptr_offset = arg0;
7929 return TREE_OPERAND (arg1, 0);
7930 }
7931 }
7932
7933 return 0;
7934 }
7935
7936 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7937 way, because it could contain a zero byte in the middle.
7938 TREE_STRING_LENGTH is the size of the character array, not the string.
7939
7940 Unfortunately, string_constant can't access the values of const char
7941 arrays with initializers, so neither can we do so here. */
7942
7943 static tree
7944 c_strlen (src)
7945 tree src;
7946 {
7947 tree offset_node;
7948 int offset, max;
7949 char *ptr;
7950
7951 src = string_constant (src, &offset_node);
7952 if (src == 0)
7953 return 0;
7954 max = TREE_STRING_LENGTH (src);
7955 ptr = TREE_STRING_POINTER (src);
7956 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7957 {
7958 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7959 compute the offset to the following null if we don't know where to
7960 start searching for it. */
7961 int i;
7962 for (i = 0; i < max; i++)
7963 if (ptr[i] == 0)
7964 return 0;
7965 /* We don't know the starting offset, but we do know that the string
7966 has no internal zero bytes. We can assume that the offset falls
7967 within the bounds of the string; otherwise, the programmer deserves
7968 what he gets. Subtract the offset from the length of the string,
7969 and return that. */
7970 /* This would perhaps not be valid if we were dealing with named
7971 arrays in addition to literal string constants. */
7972 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7973 }
7974
7975 /* We have a known offset into the string. Start searching there for
7976 a null character. */
7977 if (offset_node == 0)
7978 offset = 0;
7979 else
7980 {
7981 /* Did we get a long long offset? If so, punt. */
7982 if (TREE_INT_CST_HIGH (offset_node) != 0)
7983 return 0;
7984 offset = TREE_INT_CST_LOW (offset_node);
7985 }
7986 /* If the offset is known to be out of bounds, warn, and call strlen at
7987 runtime. */
7988 if (offset < 0 || offset > max)
7989 {
7990 warning ("offset outside bounds of constant string");
7991 return 0;
7992 }
7993 /* Use strlen to search for the first zero byte. Since any strings
7994 constructed with build_string will have nulls appended, we win even
7995 if we get handed something like (char[4])"abcd".
7996
7997 Since OFFSET is our starting index into the string, no further
7998 calculation is needed. */
7999 return size_int (strlen (ptr + offset));
8000 }
8001
8002 rtx
8003 expand_builtin_return_addr (fndecl_code, count, tem)
8004 enum built_in_function fndecl_code;
8005 int count;
8006 rtx tem;
8007 {
8008 int i;
8009
8010 /* Some machines need special handling before we can access
8011 arbitrary frames. For example, on the sparc, we must first flush
8012 all register windows to the stack. */
8013 #ifdef SETUP_FRAME_ADDRESSES
8014 SETUP_FRAME_ADDRESSES ();
8015 #endif
8016
8017 /* On the sparc, the return address is not in the frame, it is in a
8018 register. There is no way to access it off of the current frame
8019 pointer, but it can be accessed off the previous frame pointer by
8020 reading the value from the register window save area. */
8021 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8022 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8023 count--;
8024 #endif
8025
8026 /* Scan back COUNT frames to the specified frame. */
8027 for (i = 0; i < count; i++)
8028 {
8029 /* Assume the dynamic chain pointer is in the word that the
8030 frame address points to, unless otherwise specified. */
8031 #ifdef DYNAMIC_CHAIN_ADDRESS
8032 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8033 #endif
8034 tem = memory_address (Pmode, tem);
8035 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8036 }
8037
8038 /* For __builtin_frame_address, return what we've got. */
8039 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8040 return tem;
8041
8042 /* For __builtin_return_address, Get the return address from that
8043 frame. */
8044 #ifdef RETURN_ADDR_RTX
8045 tem = RETURN_ADDR_RTX (count, tem);
8046 #else
8047 tem = memory_address (Pmode,
8048 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8049 tem = gen_rtx (MEM, Pmode, tem);
8050 #endif
8051 return tem;
8052 }
8053 \f
8054 /* Expand an expression EXP that calls a built-in function,
8055 with result going to TARGET if that's convenient
8056 (and in mode MODE if that's convenient).
8057 SUBTARGET may be used as the target for computing one of EXP's operands.
8058 IGNORE is nonzero if the value is to be ignored. */
8059
8060 #define CALLED_AS_BUILT_IN(NODE) \
8061 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8062
8063 static rtx
8064 expand_builtin (exp, target, subtarget, mode, ignore)
8065 tree exp;
8066 rtx target;
8067 rtx subtarget;
8068 enum machine_mode mode;
8069 int ignore;
8070 {
8071 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8072 tree arglist = TREE_OPERAND (exp, 1);
8073 rtx op0;
8074 rtx lab1, insns;
8075 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8076 optab builtin_optab;
8077
8078 switch (DECL_FUNCTION_CODE (fndecl))
8079 {
8080 case BUILT_IN_ABS:
8081 case BUILT_IN_LABS:
8082 case BUILT_IN_FABS:
8083 /* build_function_call changes these into ABS_EXPR. */
8084 abort ();
8085
8086 case BUILT_IN_SIN:
8087 case BUILT_IN_COS:
8088 /* Treat these like sqrt, but only if the user asks for them. */
8089 if (! flag_fast_math)
8090 break;
8091 case BUILT_IN_FSQRT:
8092 /* If not optimizing, call the library function. */
8093 if (! optimize)
8094 break;
8095
8096 if (arglist == 0
8097 /* Arg could be wrong type if user redeclared this fcn wrong. */
8098 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8099 break;
8100
8101 /* Stabilize and compute the argument. */
8102 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8103 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8104 {
8105 exp = copy_node (exp);
8106 arglist = copy_node (arglist);
8107 TREE_OPERAND (exp, 1) = arglist;
8108 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8109 }
8110 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8111
8112 /* Make a suitable register to place result in. */
8113 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8114
8115 emit_queue ();
8116 start_sequence ();
8117
8118 switch (DECL_FUNCTION_CODE (fndecl))
8119 {
8120 case BUILT_IN_SIN:
8121 builtin_optab = sin_optab; break;
8122 case BUILT_IN_COS:
8123 builtin_optab = cos_optab; break;
8124 case BUILT_IN_FSQRT:
8125 builtin_optab = sqrt_optab; break;
8126 default:
8127 abort ();
8128 }
8129
8130 /* Compute into TARGET.
8131 Set TARGET to wherever the result comes back. */
8132 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8133 builtin_optab, op0, target, 0);
8134
8135 /* If we were unable to expand via the builtin, stop the
8136 sequence (without outputting the insns) and break, causing
8137 a call the the library function. */
8138 if (target == 0)
8139 {
8140 end_sequence ();
8141 break;
8142 }
8143
8144 /* Check the results by default. But if flag_fast_math is turned on,
8145 then assume sqrt will always be called with valid arguments. */
8146
8147 if (! flag_fast_math)
8148 {
8149 /* Don't define the builtin FP instructions
8150 if your machine is not IEEE. */
8151 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8152 abort ();
8153
8154 lab1 = gen_label_rtx ();
8155
8156 /* Test the result; if it is NaN, set errno=EDOM because
8157 the argument was not in the domain. */
8158 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8159 emit_jump_insn (gen_beq (lab1));
8160
8161 #ifdef TARGET_EDOM
8162 {
8163 #ifdef GEN_ERRNO_RTX
8164 rtx errno_rtx = GEN_ERRNO_RTX;
8165 #else
8166 rtx errno_rtx
8167 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8168 #endif
8169
8170 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8171 }
8172 #else
8173 /* We can't set errno=EDOM directly; let the library call do it.
8174 Pop the arguments right away in case the call gets deleted. */
8175 NO_DEFER_POP;
8176 expand_call (exp, target, 0);
8177 OK_DEFER_POP;
8178 #endif
8179
8180 emit_label (lab1);
8181 }
8182
8183 /* Output the entire sequence. */
8184 insns = get_insns ();
8185 end_sequence ();
8186 emit_insns (insns);
8187
8188 return target;
8189
8190 /* __builtin_apply_args returns block of memory allocated on
8191 the stack into which is stored the arg pointer, structure
8192 value address, static chain, and all the registers that might
8193 possibly be used in performing a function call. The code is
8194 moved to the start of the function so the incoming values are
8195 saved. */
8196 case BUILT_IN_APPLY_ARGS:
8197 /* Don't do __builtin_apply_args more than once in a function.
8198 Save the result of the first call and reuse it. */
8199 if (apply_args_value != 0)
8200 return apply_args_value;
8201 {
8202 /* When this function is called, it means that registers must be
8203 saved on entry to this function. So we migrate the
8204 call to the first insn of this function. */
8205 rtx temp;
8206 rtx seq;
8207
8208 start_sequence ();
8209 temp = expand_builtin_apply_args ();
8210 seq = get_insns ();
8211 end_sequence ();
8212
8213 apply_args_value = temp;
8214
8215 /* Put the sequence after the NOTE that starts the function.
8216 If this is inside a SEQUENCE, make the outer-level insn
8217 chain current, so the code is placed at the start of the
8218 function. */
8219 push_topmost_sequence ();
8220 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8221 pop_topmost_sequence ();
8222 return temp;
8223 }
8224
8225 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8226 FUNCTION with a copy of the parameters described by
8227 ARGUMENTS, and ARGSIZE. It returns a block of memory
8228 allocated on the stack into which is stored all the registers
8229 that might possibly be used for returning the result of a
8230 function. ARGUMENTS is the value returned by
8231 __builtin_apply_args. ARGSIZE is the number of bytes of
8232 arguments that must be copied. ??? How should this value be
8233 computed? We'll also need a safe worst case value for varargs
8234 functions. */
8235 case BUILT_IN_APPLY:
8236 if (arglist == 0
8237 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8238 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8239 || TREE_CHAIN (arglist) == 0
8240 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8241 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8242 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8243 return const0_rtx;
8244 else
8245 {
8246 int i;
8247 tree t;
8248 rtx ops[3];
8249
8250 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8251 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8252
8253 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8254 }
8255
8256 /* __builtin_return (RESULT) causes the function to return the
8257 value described by RESULT. RESULT is address of the block of
8258 memory returned by __builtin_apply. */
8259 case BUILT_IN_RETURN:
8260 if (arglist
8261 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8262 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8263 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8264 NULL_RTX, VOIDmode, 0));
8265 return const0_rtx;
8266
8267 case BUILT_IN_SAVEREGS:
8268 /* Don't do __builtin_saveregs more than once in a function.
8269 Save the result of the first call and reuse it. */
8270 if (saveregs_value != 0)
8271 return saveregs_value;
8272 {
8273 /* When this function is called, it means that registers must be
8274 saved on entry to this function. So we migrate the
8275 call to the first insn of this function. */
8276 rtx temp;
8277 rtx seq;
8278
8279 /* Now really call the function. `expand_call' does not call
8280 expand_builtin, so there is no danger of infinite recursion here. */
8281 start_sequence ();
8282
8283 #ifdef EXPAND_BUILTIN_SAVEREGS
8284 /* Do whatever the machine needs done in this case. */
8285 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8286 #else
8287 /* The register where the function returns its value
8288 is likely to have something else in it, such as an argument.
8289 So preserve that register around the call. */
8290
8291 if (value_mode != VOIDmode)
8292 {
8293 rtx valreg = hard_libcall_value (value_mode);
8294 rtx saved_valreg = gen_reg_rtx (value_mode);
8295
8296 emit_move_insn (saved_valreg, valreg);
8297 temp = expand_call (exp, target, ignore);
8298 emit_move_insn (valreg, saved_valreg);
8299 }
8300 else
8301 /* Generate the call, putting the value in a pseudo. */
8302 temp = expand_call (exp, target, ignore);
8303 #endif
8304
8305 seq = get_insns ();
8306 end_sequence ();
8307
8308 saveregs_value = temp;
8309
8310 /* Put the sequence after the NOTE that starts the function.
8311 If this is inside a SEQUENCE, make the outer-level insn
8312 chain current, so the code is placed at the start of the
8313 function. */
8314 push_topmost_sequence ();
8315 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8316 pop_topmost_sequence ();
8317 return temp;
8318 }
8319
8320 /* __builtin_args_info (N) returns word N of the arg space info
8321 for the current function. The number and meanings of words
8322 is controlled by the definition of CUMULATIVE_ARGS. */
8323 case BUILT_IN_ARGS_INFO:
8324 {
8325 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8326 int i;
8327 int *word_ptr = (int *) &current_function_args_info;
8328 tree type, elts, result;
8329
8330 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8331 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8332 __FILE__, __LINE__);
8333
8334 if (arglist != 0)
8335 {
8336 tree arg = TREE_VALUE (arglist);
8337 if (TREE_CODE (arg) != INTEGER_CST)
8338 error ("argument of `__builtin_args_info' must be constant");
8339 else
8340 {
8341 int wordnum = TREE_INT_CST_LOW (arg);
8342
8343 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8344 error ("argument of `__builtin_args_info' out of range");
8345 else
8346 return GEN_INT (word_ptr[wordnum]);
8347 }
8348 }
8349 else
8350 error ("missing argument in `__builtin_args_info'");
8351
8352 return const0_rtx;
8353
8354 #if 0
8355 for (i = 0; i < nwords; i++)
8356 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8357
8358 type = build_array_type (integer_type_node,
8359 build_index_type (build_int_2 (nwords, 0)));
8360 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8361 TREE_CONSTANT (result) = 1;
8362 TREE_STATIC (result) = 1;
8363 result = build (INDIRECT_REF, build_pointer_type (type), result);
8364 TREE_CONSTANT (result) = 1;
8365 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8366 #endif
8367 }
8368
8369 /* Return the address of the first anonymous stack arg. */
8370 case BUILT_IN_NEXT_ARG:
8371 {
8372 tree fntype = TREE_TYPE (current_function_decl);
8373
8374 if ((TYPE_ARG_TYPES (fntype) == 0
8375 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8376 == void_type_node))
8377 && ! current_function_varargs)
8378 {
8379 error ("`va_start' used in function with fixed args");
8380 return const0_rtx;
8381 }
8382
8383 if (arglist)
8384 {
8385 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8386 tree arg = TREE_VALUE (arglist);
8387
8388 /* Strip off all nops for the sake of the comparison. This
8389 is not quite the same as STRIP_NOPS. It does more.
8390 We must also strip off INDIRECT_EXPR for C++ reference
8391 parameters. */
8392 while (TREE_CODE (arg) == NOP_EXPR
8393 || TREE_CODE (arg) == CONVERT_EXPR
8394 || TREE_CODE (arg) == NON_LVALUE_EXPR
8395 || TREE_CODE (arg) == INDIRECT_REF)
8396 arg = TREE_OPERAND (arg, 0);
8397 if (arg != last_parm)
8398 warning ("second parameter of `va_start' not last named argument");
8399 }
8400 else if (! current_function_varargs)
8401 /* Evidently an out of date version of <stdarg.h>; can't validate
8402 va_start's second argument, but can still work as intended. */
8403 warning ("`__builtin_next_arg' called without an argument");
8404 }
8405
8406 return expand_binop (Pmode, add_optab,
8407 current_function_internal_arg_pointer,
8408 current_function_arg_offset_rtx,
8409 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8410
8411 case BUILT_IN_CLASSIFY_TYPE:
8412 if (arglist != 0)
8413 {
8414 tree type = TREE_TYPE (TREE_VALUE (arglist));
8415 enum tree_code code = TREE_CODE (type);
8416 if (code == VOID_TYPE)
8417 return GEN_INT (void_type_class);
8418 if (code == INTEGER_TYPE)
8419 return GEN_INT (integer_type_class);
8420 if (code == CHAR_TYPE)
8421 return GEN_INT (char_type_class);
8422 if (code == ENUMERAL_TYPE)
8423 return GEN_INT (enumeral_type_class);
8424 if (code == BOOLEAN_TYPE)
8425 return GEN_INT (boolean_type_class);
8426 if (code == POINTER_TYPE)
8427 return GEN_INT (pointer_type_class);
8428 if (code == REFERENCE_TYPE)
8429 return GEN_INT (reference_type_class);
8430 if (code == OFFSET_TYPE)
8431 return GEN_INT (offset_type_class);
8432 if (code == REAL_TYPE)
8433 return GEN_INT (real_type_class);
8434 if (code == COMPLEX_TYPE)
8435 return GEN_INT (complex_type_class);
8436 if (code == FUNCTION_TYPE)
8437 return GEN_INT (function_type_class);
8438 if (code == METHOD_TYPE)
8439 return GEN_INT (method_type_class);
8440 if (code == RECORD_TYPE)
8441 return GEN_INT (record_type_class);
8442 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8443 return GEN_INT (union_type_class);
8444 if (code == ARRAY_TYPE)
8445 {
8446 if (TYPE_STRING_FLAG (type))
8447 return GEN_INT (string_type_class);
8448 else
8449 return GEN_INT (array_type_class);
8450 }
8451 if (code == SET_TYPE)
8452 return GEN_INT (set_type_class);
8453 if (code == FILE_TYPE)
8454 return GEN_INT (file_type_class);
8455 if (code == LANG_TYPE)
8456 return GEN_INT (lang_type_class);
8457 }
8458 return GEN_INT (no_type_class);
8459
8460 case BUILT_IN_CONSTANT_P:
8461 if (arglist == 0)
8462 return const0_rtx;
8463 else
8464 {
8465 tree arg = TREE_VALUE (arglist);
8466
8467 STRIP_NOPS (arg);
8468 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8469 || (TREE_CODE (arg) == ADDR_EXPR
8470 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8471 ? const1_rtx : const0_rtx);
8472 }
8473
8474 case BUILT_IN_FRAME_ADDRESS:
8475 /* The argument must be a nonnegative integer constant.
8476 It counts the number of frames to scan up the stack.
8477 The value is the address of that frame. */
8478 case BUILT_IN_RETURN_ADDRESS:
8479 /* The argument must be a nonnegative integer constant.
8480 It counts the number of frames to scan up the stack.
8481 The value is the return address saved in that frame. */
8482 if (arglist == 0)
8483 /* Warning about missing arg was already issued. */
8484 return const0_rtx;
8485 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8486 {
8487 error ("invalid arg to `__builtin_return_address'");
8488 return const0_rtx;
8489 }
8490 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8491 {
8492 error ("invalid arg to `__builtin_return_address'");
8493 return const0_rtx;
8494 }
8495 else
8496 {
8497 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8498 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8499 hard_frame_pointer_rtx);
8500
8501 /* For __builtin_frame_address, return what we've got. */
8502 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8503 return tem;
8504
8505 if (GET_CODE (tem) != REG)
8506 tem = copy_to_reg (tem);
8507 return tem;
8508 }
8509
8510 case BUILT_IN_ALLOCA:
8511 if (arglist == 0
8512 /* Arg could be non-integer if user redeclared this fcn wrong. */
8513 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8514 break;
8515
8516 /* Compute the argument. */
8517 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8518
8519 /* Allocate the desired space. */
8520 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8521
8522 case BUILT_IN_FFS:
8523 /* If not optimizing, call the library function. */
8524 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8525 break;
8526
8527 if (arglist == 0
8528 /* Arg could be non-integer if user redeclared this fcn wrong. */
8529 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8530 break;
8531
8532 /* Compute the argument. */
8533 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8534 /* Compute ffs, into TARGET if possible.
8535 Set TARGET to wherever the result comes back. */
8536 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8537 ffs_optab, op0, target, 1);
8538 if (target == 0)
8539 abort ();
8540 return target;
8541
8542 case BUILT_IN_STRLEN:
8543 /* If not optimizing, call the library function. */
8544 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8545 break;
8546
8547 if (arglist == 0
8548 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8549 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8550 break;
8551 else
8552 {
8553 tree src = TREE_VALUE (arglist);
8554 tree len = c_strlen (src);
8555
8556 int align
8557 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8558
8559 rtx result, src_rtx, char_rtx;
8560 enum machine_mode insn_mode = value_mode, char_mode;
8561 enum insn_code icode;
8562
8563 /* If the length is known, just return it. */
8564 if (len != 0)
8565 return expand_expr (len, target, mode, 0);
8566
8567 /* If SRC is not a pointer type, don't do this operation inline. */
8568 if (align == 0)
8569 break;
8570
8571 /* Call a function if we can't compute strlen in the right mode. */
8572
8573 while (insn_mode != VOIDmode)
8574 {
8575 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8576 if (icode != CODE_FOR_nothing)
8577 break;
8578
8579 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8580 }
8581 if (insn_mode == VOIDmode)
8582 break;
8583
8584 /* Make a place to write the result of the instruction. */
8585 result = target;
8586 if (! (result != 0
8587 && GET_CODE (result) == REG
8588 && GET_MODE (result) == insn_mode
8589 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8590 result = gen_reg_rtx (insn_mode);
8591
8592 /* Make sure the operands are acceptable to the predicates. */
8593
8594 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8595 result = gen_reg_rtx (insn_mode);
8596
8597 src_rtx = memory_address (BLKmode,
8598 expand_expr (src, NULL_RTX, ptr_mode,
8599 EXPAND_NORMAL));
8600 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8601 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8602
8603 char_rtx = const0_rtx;
8604 char_mode = insn_operand_mode[(int)icode][2];
8605 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8606 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8607
8608 emit_insn (GEN_FCN (icode) (result,
8609 gen_rtx (MEM, BLKmode, src_rtx),
8610 char_rtx, GEN_INT (align)));
8611
8612 /* Return the value in the proper mode for this function. */
8613 if (GET_MODE (result) == value_mode)
8614 return result;
8615 else if (target != 0)
8616 {
8617 convert_move (target, result, 0);
8618 return target;
8619 }
8620 else
8621 return convert_to_mode (value_mode, result, 0);
8622 }
8623
8624 case BUILT_IN_STRCPY:
8625 /* If not optimizing, call the library function. */
8626 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8627 break;
8628
8629 if (arglist == 0
8630 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8631 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8632 || TREE_CHAIN (arglist) == 0
8633 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8634 break;
8635 else
8636 {
8637 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8638
8639 if (len == 0)
8640 break;
8641
8642 len = size_binop (PLUS_EXPR, len, integer_one_node);
8643
8644 chainon (arglist, build_tree_list (NULL_TREE, len));
8645 }
8646
8647 /* Drops in. */
8648 case BUILT_IN_MEMCPY:
8649 /* If not optimizing, call the library function. */
8650 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8651 break;
8652
8653 if (arglist == 0
8654 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8655 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8656 || TREE_CHAIN (arglist) == 0
8657 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8658 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8659 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8660 break;
8661 else
8662 {
8663 tree dest = TREE_VALUE (arglist);
8664 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8665 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8666 tree type;
8667
8668 int src_align
8669 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8670 int dest_align
8671 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8672 rtx dest_rtx, dest_mem, src_mem;
8673
8674 /* If either SRC or DEST is not a pointer type, don't do
8675 this operation in-line. */
8676 if (src_align == 0 || dest_align == 0)
8677 {
8678 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8679 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8680 break;
8681 }
8682
8683 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8684 dest_mem = gen_rtx (MEM, BLKmode,
8685 memory_address (BLKmode, dest_rtx));
8686 /* There could be a void* cast on top of the object. */
8687 while (TREE_CODE (dest) == NOP_EXPR)
8688 dest = TREE_OPERAND (dest, 0);
8689 type = TREE_TYPE (TREE_TYPE (dest));
8690 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8691 src_mem = gen_rtx (MEM, BLKmode,
8692 memory_address (BLKmode,
8693 expand_expr (src, NULL_RTX,
8694 ptr_mode,
8695 EXPAND_SUM)));
8696 /* There could be a void* cast on top of the object. */
8697 while (TREE_CODE (src) == NOP_EXPR)
8698 src = TREE_OPERAND (src, 0);
8699 type = TREE_TYPE (TREE_TYPE (src));
8700 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8701
8702 /* Copy word part most expediently. */
8703 emit_block_move (dest_mem, src_mem,
8704 expand_expr (len, NULL_RTX, VOIDmode, 0),
8705 MIN (src_align, dest_align));
8706 return force_operand (dest_rtx, NULL_RTX);
8707 }
8708
8709 case BUILT_IN_MEMSET:
8710 /* If not optimizing, call the library function. */
8711 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8712 break;
8713
8714 if (arglist == 0
8715 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8716 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8717 || TREE_CHAIN (arglist) == 0
8718 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8719 != INTEGER_TYPE)
8720 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8721 || (INTEGER_CST
8722 != (TREE_CODE (TREE_TYPE
8723 (TREE_VALUE
8724 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8725 break;
8726 else
8727 {
8728 tree dest = TREE_VALUE (arglist);
8729 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8730 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8731 tree type;
8732
8733 int dest_align
8734 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8735 rtx dest_rtx, dest_mem;
8736
8737 /* If DEST is not a pointer type, don't do this
8738 operation in-line. */
8739 if (dest_align == 0)
8740 break;
8741
8742 /* If VAL is not 0, don't do this operation in-line. */
8743 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8744 break;
8745
8746 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8747 dest_mem = gen_rtx (MEM, BLKmode,
8748 memory_address (BLKmode, dest_rtx));
8749 /* There could be a void* cast on top of the object. */
8750 while (TREE_CODE (dest) == NOP_EXPR)
8751 dest = TREE_OPERAND (dest, 0);
8752 type = TREE_TYPE (TREE_TYPE (dest));
8753 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8754
8755 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8756 dest_align);
8757
8758 return force_operand (dest_rtx, NULL_RTX);
8759 }
8760
8761 /* These comparison functions need an instruction that returns an actual
8762 index. An ordinary compare that just sets the condition codes
8763 is not enough. */
8764 #ifdef HAVE_cmpstrsi
8765 case BUILT_IN_STRCMP:
8766 /* If not optimizing, call the library function. */
8767 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8768 break;
8769
8770 if (arglist == 0
8771 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8772 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8773 || TREE_CHAIN (arglist) == 0
8774 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8775 break;
8776 else if (!HAVE_cmpstrsi)
8777 break;
8778 {
8779 tree arg1 = TREE_VALUE (arglist);
8780 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8781 tree offset;
8782 tree len, len2;
8783
8784 len = c_strlen (arg1);
8785 if (len)
8786 len = size_binop (PLUS_EXPR, integer_one_node, len);
8787 len2 = c_strlen (arg2);
8788 if (len2)
8789 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8790
8791 /* If we don't have a constant length for the first, use the length
8792 of the second, if we know it. We don't require a constant for
8793 this case; some cost analysis could be done if both are available
8794 but neither is constant. For now, assume they're equally cheap.
8795
8796 If both strings have constant lengths, use the smaller. This
8797 could arise if optimization results in strcpy being called with
8798 two fixed strings, or if the code was machine-generated. We should
8799 add some code to the `memcmp' handler below to deal with such
8800 situations, someday. */
8801 if (!len || TREE_CODE (len) != INTEGER_CST)
8802 {
8803 if (len2)
8804 len = len2;
8805 else if (len == 0)
8806 break;
8807 }
8808 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8809 {
8810 if (tree_int_cst_lt (len2, len))
8811 len = len2;
8812 }
8813
8814 chainon (arglist, build_tree_list (NULL_TREE, len));
8815 }
8816
8817 /* Drops in. */
8818 case BUILT_IN_MEMCMP:
8819 /* If not optimizing, call the library function. */
8820 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8821 break;
8822
8823 if (arglist == 0
8824 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8825 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8826 || TREE_CHAIN (arglist) == 0
8827 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8828 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8829 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8830 break;
8831 else if (!HAVE_cmpstrsi)
8832 break;
8833 {
8834 tree arg1 = TREE_VALUE (arglist);
8835 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8836 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8837 rtx result;
8838
8839 int arg1_align
8840 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8841 int arg2_align
8842 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8843 enum machine_mode insn_mode
8844 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8845
8846 /* If we don't have POINTER_TYPE, call the function. */
8847 if (arg1_align == 0 || arg2_align == 0)
8848 {
8849 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8850 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8851 break;
8852 }
8853
8854 /* Make a place to write the result of the instruction. */
8855 result = target;
8856 if (! (result != 0
8857 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8858 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8859 result = gen_reg_rtx (insn_mode);
8860
8861 emit_insn (gen_cmpstrsi (result,
8862 gen_rtx (MEM, BLKmode,
8863 expand_expr (arg1, NULL_RTX,
8864 ptr_mode,
8865 EXPAND_NORMAL)),
8866 gen_rtx (MEM, BLKmode,
8867 expand_expr (arg2, NULL_RTX,
8868 ptr_mode,
8869 EXPAND_NORMAL)),
8870 expand_expr (len, NULL_RTX, VOIDmode, 0),
8871 GEN_INT (MIN (arg1_align, arg2_align))));
8872
8873 /* Return the value in the proper mode for this function. */
8874 mode = TYPE_MODE (TREE_TYPE (exp));
8875 if (GET_MODE (result) == mode)
8876 return result;
8877 else if (target != 0)
8878 {
8879 convert_move (target, result, 0);
8880 return target;
8881 }
8882 else
8883 return convert_to_mode (mode, result, 0);
8884 }
8885 #else
8886 case BUILT_IN_STRCMP:
8887 case BUILT_IN_MEMCMP:
8888 break;
8889 #endif
8890
8891 /* __builtin_setjmp is passed a pointer to an array of five words
8892 (not all will be used on all machines). It operates similarly to
8893 the C library function of the same name, but is more efficient.
8894 Much of the code below (and for longjmp) is copied from the handling
8895 of non-local gotos.
8896
8897 NOTE: This is intended for use by GNAT and will only work in
8898 the method used by it. This code will likely NOT survive to
8899 the GCC 2.8.0 release. */
8900 case BUILT_IN_SETJMP:
8901 if (arglist == 0
8902 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8903 break;
8904
8905 {
8906 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8907 VOIDmode, 0);
8908 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8909 enum machine_mode sa_mode = Pmode;
8910 rtx stack_save;
8911 int old_inhibit_defer_pop = inhibit_defer_pop;
8912 int return_pops
8913 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8914 build_function_type (void_type_node, NULL_TREE),
8915 0);
8916 rtx next_arg_reg;
8917 CUMULATIVE_ARGS args_so_far;
8918 int i;
8919
8920 #ifdef POINTERS_EXTEND_UNSIGNED
8921 buf_addr = convert_memory_address (Pmode, buf_addr);
8922 #endif
8923
8924 buf_addr = force_reg (Pmode, buf_addr);
8925
8926 if (target == 0 || GET_CODE (target) != REG
8927 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8928 target = gen_reg_rtx (value_mode);
8929
8930 emit_queue ();
8931
8932 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8933 current_function_calls_setjmp = 1;
8934
8935 /* We store the frame pointer and the address of lab1 in the buffer
8936 and use the rest of it for the stack save area, which is
8937 machine-dependent. */
8938 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8939 virtual_stack_vars_rtx);
8940 emit_move_insn
8941 (validize_mem (gen_rtx (MEM, Pmode,
8942 plus_constant (buf_addr,
8943 GET_MODE_SIZE (Pmode)))),
8944 gen_rtx (LABEL_REF, Pmode, lab1));
8945
8946 #ifdef HAVE_save_stack_nonlocal
8947 if (HAVE_save_stack_nonlocal)
8948 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8949 #endif
8950
8951 stack_save = gen_rtx (MEM, sa_mode,
8952 plus_constant (buf_addr,
8953 2 * GET_MODE_SIZE (Pmode)));
8954 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8955
8956 #ifdef HAVE_setjmp
8957 if (HAVE_setjmp)
8958 emit_insn (gen_setjmp ());
8959 #endif
8960
8961 /* Set TARGET to zero and branch around the other case. */
8962 emit_move_insn (target, const0_rtx);
8963 emit_jump_insn (gen_jump (lab2));
8964 emit_barrier ();
8965 emit_label (lab1);
8966
8967 /* Note that setjmp clobbers FP when we get here, so we have to
8968 make sure it's marked as used by this function. */
8969 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8970
8971 /* Mark the static chain as clobbered here so life information
8972 doesn't get messed up for it. */
8973 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8974
8975 /* Now put in the code to restore the frame pointer, and argument
8976 pointer, if needed. The code below is from expand_end_bindings
8977 in stmt.c; see detailed documentation there. */
8978 #ifdef HAVE_nonlocal_goto
8979 if (! HAVE_nonlocal_goto)
8980 #endif
8981 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8982
8983 current_function_has_nonlocal_goto = 1;
8984
8985 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8986 if (fixed_regs[ARG_POINTER_REGNUM])
8987 {
8988 #ifdef ELIMINABLE_REGS
8989 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8990
8991 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8992 if (elim_regs[i].from == ARG_POINTER_REGNUM
8993 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8994 break;
8995
8996 if (i == sizeof elim_regs / sizeof elim_regs [0])
8997 #endif
8998 {
8999 /* Now restore our arg pointer from the address at which it
9000 was saved in our stack frame.
9001 If there hasn't be space allocated for it yet, make
9002 some now. */
9003 if (arg_pointer_save_area == 0)
9004 arg_pointer_save_area
9005 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
9006 emit_move_insn (virtual_incoming_args_rtx,
9007 copy_to_reg (arg_pointer_save_area));
9008 }
9009 }
9010 #endif
9011
9012 #ifdef HAVE_nonlocal_goto_receiver
9013 if (HAVE_nonlocal_goto_receiver)
9014 emit_insn (gen_nonlocal_goto_receiver ());
9015 #endif
9016 /* The static chain pointer contains the address of dummy function.
9017 We need to call it here to handle some PIC cases of restoring
9018 a global pointer. Then return 1. */
9019 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
9020
9021 /* We can't actually call emit_library_call here, so do everything
9022 it does, which isn't much for a libfunc with no args. */
9023 op0 = memory_address (FUNCTION_MODE, op0);
9024
9025 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
9026 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
9027 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
9028
9029 #ifndef ACCUMULATE_OUTGOING_ARGS
9030 #ifdef HAVE_call_pop
9031 if (HAVE_call_pop)
9032 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
9033 const0_rtx, next_arg_reg,
9034 GEN_INT (return_pops)));
9035 else
9036 #endif
9037 #endif
9038
9039 #ifdef HAVE_call
9040 if (HAVE_call)
9041 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
9042 const0_rtx, next_arg_reg, const0_rtx));
9043 else
9044 #endif
9045 abort ();
9046
9047 emit_move_insn (target, const1_rtx);
9048 emit_label (lab2);
9049 return target;
9050 }
9051
9052 /* __builtin_longjmp is passed a pointer to an array of five words
9053 and a value, which is a dummy. It's similar to the C library longjmp
9054 function but works with __builtin_setjmp above. */
9055 case BUILT_IN_LONGJMP:
9056 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9057 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9058 break;
9059
9060 {
9061 tree dummy_id = get_identifier ("__dummy");
9062 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9063 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9064 #ifdef POINTERS_EXTEND_UNSIGNED
9065 rtx buf_addr
9066 = force_reg (Pmode,
9067 convert_memory_address
9068 (Pmode,
9069 expand_expr (TREE_VALUE (arglist),
9070 NULL_RTX, VOIDmode, 0)));
9071 #else
9072 rtx buf_addr
9073 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9074 NULL_RTX,
9075 VOIDmode, 0));
9076 #endif
9077 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9078 rtx lab = gen_rtx (MEM, Pmode,
9079 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9080 enum machine_mode sa_mode
9081 #ifdef HAVE_save_stack_nonlocal
9082 = (HAVE_save_stack_nonlocal
9083 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9084 : Pmode);
9085 #else
9086 = Pmode;
9087 #endif
9088 rtx stack = gen_rtx (MEM, sa_mode,
9089 plus_constant (buf_addr,
9090 2 * GET_MODE_SIZE (Pmode)));
9091
9092 DECL_EXTERNAL (dummy_decl) = 1;
9093 TREE_PUBLIC (dummy_decl) = 1;
9094 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9095
9096 /* Expand the second expression just for side-effects. */
9097 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9098 const0_rtx, VOIDmode, 0);
9099
9100 assemble_external (dummy_decl);
9101
9102 /* Pick up FP, label, and SP from the block and jump. This code is
9103 from expand_goto in stmt.c; see there for detailed comments. */
9104 #if HAVE_nonlocal_goto
9105 if (HAVE_nonlocal_goto)
9106 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9107 XEXP (DECL_RTL (dummy_decl), 0)));
9108 else
9109 #endif
9110 {
9111 lab = copy_to_reg (lab);
9112 emit_move_insn (hard_frame_pointer_rtx, fp);
9113 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9114
9115 /* Put in the static chain register the address of the dummy
9116 function. */
9117 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9118 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9119 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9120 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9121 emit_indirect_jump (lab);
9122 }
9123
9124 return const0_rtx;
9125 }
9126
9127 default: /* just do library call, if unknown builtin */
9128 error ("built-in function `%s' not currently supported",
9129 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9130 }
9131
9132 /* The switch statement above can drop through to cause the function
9133 to be called normally. */
9134
9135 return expand_call (exp, target, ignore);
9136 }
9137 \f
9138 /* Built-in functions to perform an untyped call and return. */
9139
9140 /* For each register that may be used for calling a function, this
9141 gives a mode used to copy the register's value. VOIDmode indicates
9142 the register is not used for calling a function. If the machine
9143 has register windows, this gives only the outbound registers.
9144 INCOMING_REGNO gives the corresponding inbound register. */
9145 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9146
9147 /* For each register that may be used for returning values, this gives
9148 a mode used to copy the register's value. VOIDmode indicates the
9149 register is not used for returning values. If the machine has
9150 register windows, this gives only the outbound registers.
9151 INCOMING_REGNO gives the corresponding inbound register. */
9152 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9153
9154 /* For each register that may be used for calling a function, this
9155 gives the offset of that register into the block returned by
9156 __builtin_apply_args. 0 indicates that the register is not
9157 used for calling a function. */
9158 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9159
9160 /* Return the offset of register REGNO into the block returned by
9161 __builtin_apply_args. This is not declared static, since it is
9162 needed in objc-act.c. */
9163
9164 int
9165 apply_args_register_offset (regno)
9166 int regno;
9167 {
9168 apply_args_size ();
9169
9170 /* Arguments are always put in outgoing registers (in the argument
9171 block) if such make sense. */
9172 #ifdef OUTGOING_REGNO
9173 regno = OUTGOING_REGNO(regno);
9174 #endif
9175 return apply_args_reg_offset[regno];
9176 }
9177
9178 /* Return the size required for the block returned by __builtin_apply_args,
9179 and initialize apply_args_mode. */
9180
9181 static int
9182 apply_args_size ()
9183 {
9184 static int size = -1;
9185 int align, regno;
9186 enum machine_mode mode;
9187
9188 /* The values computed by this function never change. */
9189 if (size < 0)
9190 {
9191 /* The first value is the incoming arg-pointer. */
9192 size = GET_MODE_SIZE (Pmode);
9193
9194 /* The second value is the structure value address unless this is
9195 passed as an "invisible" first argument. */
9196 if (struct_value_rtx)
9197 size += GET_MODE_SIZE (Pmode);
9198
9199 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9200 if (FUNCTION_ARG_REGNO_P (regno))
9201 {
9202 /* Search for the proper mode for copying this register's
9203 value. I'm not sure this is right, but it works so far. */
9204 enum machine_mode best_mode = VOIDmode;
9205
9206 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9207 mode != VOIDmode;
9208 mode = GET_MODE_WIDER_MODE (mode))
9209 if (HARD_REGNO_MODE_OK (regno, mode)
9210 && HARD_REGNO_NREGS (regno, mode) == 1)
9211 best_mode = mode;
9212
9213 if (best_mode == VOIDmode)
9214 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9215 mode != VOIDmode;
9216 mode = GET_MODE_WIDER_MODE (mode))
9217 if (HARD_REGNO_MODE_OK (regno, mode)
9218 && (mov_optab->handlers[(int) mode].insn_code
9219 != CODE_FOR_nothing))
9220 best_mode = mode;
9221
9222 mode = best_mode;
9223 if (mode == VOIDmode)
9224 abort ();
9225
9226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9227 if (size % align != 0)
9228 size = CEIL (size, align) * align;
9229 apply_args_reg_offset[regno] = size;
9230 size += GET_MODE_SIZE (mode);
9231 apply_args_mode[regno] = mode;
9232 }
9233 else
9234 {
9235 apply_args_mode[regno] = VOIDmode;
9236 apply_args_reg_offset[regno] = 0;
9237 }
9238 }
9239 return size;
9240 }
9241
9242 /* Return the size required for the block returned by __builtin_apply,
9243 and initialize apply_result_mode. */
9244
9245 static int
9246 apply_result_size ()
9247 {
9248 static int size = -1;
9249 int align, regno;
9250 enum machine_mode mode;
9251
9252 /* The values computed by this function never change. */
9253 if (size < 0)
9254 {
9255 size = 0;
9256
9257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9258 if (FUNCTION_VALUE_REGNO_P (regno))
9259 {
9260 /* Search for the proper mode for copying this register's
9261 value. I'm not sure this is right, but it works so far. */
9262 enum machine_mode best_mode = VOIDmode;
9263
9264 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9265 mode != TImode;
9266 mode = GET_MODE_WIDER_MODE (mode))
9267 if (HARD_REGNO_MODE_OK (regno, mode))
9268 best_mode = mode;
9269
9270 if (best_mode == VOIDmode)
9271 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9272 mode != VOIDmode;
9273 mode = GET_MODE_WIDER_MODE (mode))
9274 if (HARD_REGNO_MODE_OK (regno, mode)
9275 && (mov_optab->handlers[(int) mode].insn_code
9276 != CODE_FOR_nothing))
9277 best_mode = mode;
9278
9279 mode = best_mode;
9280 if (mode == VOIDmode)
9281 abort ();
9282
9283 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9284 if (size % align != 0)
9285 size = CEIL (size, align) * align;
9286 size += GET_MODE_SIZE (mode);
9287 apply_result_mode[regno] = mode;
9288 }
9289 else
9290 apply_result_mode[regno] = VOIDmode;
9291
9292 /* Allow targets that use untyped_call and untyped_return to override
9293 the size so that machine-specific information can be stored here. */
9294 #ifdef APPLY_RESULT_SIZE
9295 size = APPLY_RESULT_SIZE;
9296 #endif
9297 }
9298 return size;
9299 }
9300
9301 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9302 /* Create a vector describing the result block RESULT. If SAVEP is true,
9303 the result block is used to save the values; otherwise it is used to
9304 restore the values. */
9305
9306 static rtx
9307 result_vector (savep, result)
9308 int savep;
9309 rtx result;
9310 {
9311 int regno, size, align, nelts;
9312 enum machine_mode mode;
9313 rtx reg, mem;
9314 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9315
9316 size = nelts = 0;
9317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9318 if ((mode = apply_result_mode[regno]) != VOIDmode)
9319 {
9320 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9321 if (size % align != 0)
9322 size = CEIL (size, align) * align;
9323 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9324 mem = change_address (result, mode,
9325 plus_constant (XEXP (result, 0), size));
9326 savevec[nelts++] = (savep
9327 ? gen_rtx (SET, VOIDmode, mem, reg)
9328 : gen_rtx (SET, VOIDmode, reg, mem));
9329 size += GET_MODE_SIZE (mode);
9330 }
9331 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9332 }
9333 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9334
9335 /* Save the state required to perform an untyped call with the same
9336 arguments as were passed to the current function. */
9337
9338 static rtx
9339 expand_builtin_apply_args ()
9340 {
9341 rtx registers;
9342 int size, align, regno;
9343 enum machine_mode mode;
9344
9345 /* Create a block where the arg-pointer, structure value address,
9346 and argument registers can be saved. */
9347 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9348
9349 /* Walk past the arg-pointer and structure value address. */
9350 size = GET_MODE_SIZE (Pmode);
9351 if (struct_value_rtx)
9352 size += GET_MODE_SIZE (Pmode);
9353
9354 /* Save each register used in calling a function to the block. */
9355 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9356 if ((mode = apply_args_mode[regno]) != VOIDmode)
9357 {
9358 rtx tem;
9359
9360 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9361 if (size % align != 0)
9362 size = CEIL (size, align) * align;
9363
9364 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9365
9366 #ifdef STACK_REGS
9367 /* For reg-stack.c's stack register household.
9368 Compare with a similar piece of code in function.c. */
9369
9370 emit_insn (gen_rtx (USE, mode, tem));
9371 #endif
9372
9373 emit_move_insn (change_address (registers, mode,
9374 plus_constant (XEXP (registers, 0),
9375 size)),
9376 tem);
9377 size += GET_MODE_SIZE (mode);
9378 }
9379
9380 /* Save the arg pointer to the block. */
9381 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9382 copy_to_reg (virtual_incoming_args_rtx));
9383 size = GET_MODE_SIZE (Pmode);
9384
9385 /* Save the structure value address unless this is passed as an
9386 "invisible" first argument. */
9387 if (struct_value_incoming_rtx)
9388 {
9389 emit_move_insn (change_address (registers, Pmode,
9390 plus_constant (XEXP (registers, 0),
9391 size)),
9392 copy_to_reg (struct_value_incoming_rtx));
9393 size += GET_MODE_SIZE (Pmode);
9394 }
9395
9396 /* Return the address of the block. */
9397 return copy_addr_to_reg (XEXP (registers, 0));
9398 }
9399
9400 /* Perform an untyped call and save the state required to perform an
9401 untyped return of whatever value was returned by the given function. */
9402
9403 static rtx
9404 expand_builtin_apply (function, arguments, argsize)
9405 rtx function, arguments, argsize;
9406 {
9407 int size, align, regno;
9408 enum machine_mode mode;
9409 rtx incoming_args, result, reg, dest, call_insn;
9410 rtx old_stack_level = 0;
9411 rtx call_fusage = 0;
9412
9413 /* Create a block where the return registers can be saved. */
9414 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9415
9416 /* ??? The argsize value should be adjusted here. */
9417
9418 /* Fetch the arg pointer from the ARGUMENTS block. */
9419 incoming_args = gen_reg_rtx (Pmode);
9420 emit_move_insn (incoming_args,
9421 gen_rtx (MEM, Pmode, arguments));
9422 #ifndef STACK_GROWS_DOWNWARD
9423 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9424 incoming_args, 0, OPTAB_LIB_WIDEN);
9425 #endif
9426
9427 /* Perform postincrements before actually calling the function. */
9428 emit_queue ();
9429
9430 /* Push a new argument block and copy the arguments. */
9431 do_pending_stack_adjust ();
9432 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9433
9434 /* Push a block of memory onto the stack to store the memory arguments.
9435 Save the address in a register, and copy the memory arguments. ??? I
9436 haven't figured out how the calling convention macros effect this,
9437 but it's likely that the source and/or destination addresses in
9438 the block copy will need updating in machine specific ways. */
9439 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9440 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9441 gen_rtx (MEM, BLKmode, incoming_args),
9442 argsize,
9443 PARM_BOUNDARY / BITS_PER_UNIT);
9444
9445 /* Refer to the argument block. */
9446 apply_args_size ();
9447 arguments = gen_rtx (MEM, BLKmode, arguments);
9448
9449 /* Walk past the arg-pointer and structure value address. */
9450 size = GET_MODE_SIZE (Pmode);
9451 if (struct_value_rtx)
9452 size += GET_MODE_SIZE (Pmode);
9453
9454 /* Restore each of the registers previously saved. Make USE insns
9455 for each of these registers for use in making the call. */
9456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9457 if ((mode = apply_args_mode[regno]) != VOIDmode)
9458 {
9459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9460 if (size % align != 0)
9461 size = CEIL (size, align) * align;
9462 reg = gen_rtx (REG, mode, regno);
9463 emit_move_insn (reg,
9464 change_address (arguments, mode,
9465 plus_constant (XEXP (arguments, 0),
9466 size)));
9467
9468 use_reg (&call_fusage, reg);
9469 size += GET_MODE_SIZE (mode);
9470 }
9471
9472 /* Restore the structure value address unless this is passed as an
9473 "invisible" first argument. */
9474 size = GET_MODE_SIZE (Pmode);
9475 if (struct_value_rtx)
9476 {
9477 rtx value = gen_reg_rtx (Pmode);
9478 emit_move_insn (value,
9479 change_address (arguments, Pmode,
9480 plus_constant (XEXP (arguments, 0),
9481 size)));
9482 emit_move_insn (struct_value_rtx, value);
9483 if (GET_CODE (struct_value_rtx) == REG)
9484 use_reg (&call_fusage, struct_value_rtx);
9485 size += GET_MODE_SIZE (Pmode);
9486 }
9487
9488 /* All arguments and registers used for the call are set up by now! */
9489 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9490
9491 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9492 and we don't want to load it into a register as an optimization,
9493 because prepare_call_address already did it if it should be done. */
9494 if (GET_CODE (function) != SYMBOL_REF)
9495 function = memory_address (FUNCTION_MODE, function);
9496
9497 /* Generate the actual call instruction and save the return value. */
9498 #ifdef HAVE_untyped_call
9499 if (HAVE_untyped_call)
9500 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9501 result, result_vector (1, result)));
9502 else
9503 #endif
9504 #ifdef HAVE_call_value
9505 if (HAVE_call_value)
9506 {
9507 rtx valreg = 0;
9508
9509 /* Locate the unique return register. It is not possible to
9510 express a call that sets more than one return register using
9511 call_value; use untyped_call for that. In fact, untyped_call
9512 only needs to save the return registers in the given block. */
9513 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9514 if ((mode = apply_result_mode[regno]) != VOIDmode)
9515 {
9516 if (valreg)
9517 abort (); /* HAVE_untyped_call required. */
9518 valreg = gen_rtx (REG, mode, regno);
9519 }
9520
9521 emit_call_insn (gen_call_value (valreg,
9522 gen_rtx (MEM, FUNCTION_MODE, function),
9523 const0_rtx, NULL_RTX, const0_rtx));
9524
9525 emit_move_insn (change_address (result, GET_MODE (valreg),
9526 XEXP (result, 0)),
9527 valreg);
9528 }
9529 else
9530 #endif
9531 abort ();
9532
9533 /* Find the CALL insn we just emitted. */
9534 for (call_insn = get_last_insn ();
9535 call_insn && GET_CODE (call_insn) != CALL_INSN;
9536 call_insn = PREV_INSN (call_insn))
9537 ;
9538
9539 if (! call_insn)
9540 abort ();
9541
9542 /* Put the register usage information on the CALL. If there is already
9543 some usage information, put ours at the end. */
9544 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9545 {
9546 rtx link;
9547
9548 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9549 link = XEXP (link, 1))
9550 ;
9551
9552 XEXP (link, 1) = call_fusage;
9553 }
9554 else
9555 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9556
9557 /* Restore the stack. */
9558 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9559
9560 /* Return the address of the result block. */
9561 return copy_addr_to_reg (XEXP (result, 0));
9562 }
9563
9564 /* Perform an untyped return. */
9565
9566 static void
9567 expand_builtin_return (result)
9568 rtx result;
9569 {
9570 int size, align, regno;
9571 enum machine_mode mode;
9572 rtx reg;
9573 rtx call_fusage = 0;
9574
9575 apply_result_size ();
9576 result = gen_rtx (MEM, BLKmode, result);
9577
9578 #ifdef HAVE_untyped_return
9579 if (HAVE_untyped_return)
9580 {
9581 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9582 emit_barrier ();
9583 return;
9584 }
9585 #endif
9586
9587 /* Restore the return value and note that each value is used. */
9588 size = 0;
9589 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9590 if ((mode = apply_result_mode[regno]) != VOIDmode)
9591 {
9592 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9593 if (size % align != 0)
9594 size = CEIL (size, align) * align;
9595 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9596 emit_move_insn (reg,
9597 change_address (result, mode,
9598 plus_constant (XEXP (result, 0),
9599 size)));
9600
9601 push_to_sequence (call_fusage);
9602 emit_insn (gen_rtx (USE, VOIDmode, reg));
9603 call_fusage = get_insns ();
9604 end_sequence ();
9605 size += GET_MODE_SIZE (mode);
9606 }
9607
9608 /* Put the USE insns before the return. */
9609 emit_insns (call_fusage);
9610
9611 /* Return whatever values was restored by jumping directly to the end
9612 of the function. */
9613 expand_null_return ();
9614 }
9615 \f
9616 /* Expand code for a post- or pre- increment or decrement
9617 and return the RTX for the result.
9618 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9619
9620 static rtx
9621 expand_increment (exp, post, ignore)
9622 register tree exp;
9623 int post, ignore;
9624 {
9625 register rtx op0, op1;
9626 register rtx temp, value;
9627 register tree incremented = TREE_OPERAND (exp, 0);
9628 optab this_optab = add_optab;
9629 int icode;
9630 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9631 int op0_is_copy = 0;
9632 int single_insn = 0;
9633 /* 1 means we can't store into OP0 directly,
9634 because it is a subreg narrower than a word,
9635 and we don't dare clobber the rest of the word. */
9636 int bad_subreg = 0;
9637
9638 if (output_bytecode)
9639 {
9640 bc_expand_expr (exp);
9641 return NULL_RTX;
9642 }
9643
9644 /* Stabilize any component ref that might need to be
9645 evaluated more than once below. */
9646 if (!post
9647 || TREE_CODE (incremented) == BIT_FIELD_REF
9648 || (TREE_CODE (incremented) == COMPONENT_REF
9649 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9650 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9651 incremented = stabilize_reference (incremented);
9652 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9653 ones into save exprs so that they don't accidentally get evaluated
9654 more than once by the code below. */
9655 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9656 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9657 incremented = save_expr (incremented);
9658
9659 /* Compute the operands as RTX.
9660 Note whether OP0 is the actual lvalue or a copy of it:
9661 I believe it is a copy iff it is a register or subreg
9662 and insns were generated in computing it. */
9663
9664 temp = get_last_insn ();
9665 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9666
9667 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9668 in place but instead must do sign- or zero-extension during assignment,
9669 so we copy it into a new register and let the code below use it as
9670 a copy.
9671
9672 Note that we can safely modify this SUBREG since it is know not to be
9673 shared (it was made by the expand_expr call above). */
9674
9675 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9676 {
9677 if (post)
9678 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9679 else
9680 bad_subreg = 1;
9681 }
9682 else if (GET_CODE (op0) == SUBREG
9683 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9684 {
9685 /* We cannot increment this SUBREG in place. If we are
9686 post-incrementing, get a copy of the old value. Otherwise,
9687 just mark that we cannot increment in place. */
9688 if (post)
9689 op0 = copy_to_reg (op0);
9690 else
9691 bad_subreg = 1;
9692 }
9693
9694 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9695 && temp != get_last_insn ());
9696 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9697
9698 /* Decide whether incrementing or decrementing. */
9699 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9700 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9701 this_optab = sub_optab;
9702
9703 /* Convert decrement by a constant into a negative increment. */
9704 if (this_optab == sub_optab
9705 && GET_CODE (op1) == CONST_INT)
9706 {
9707 op1 = GEN_INT (- INTVAL (op1));
9708 this_optab = add_optab;
9709 }
9710
9711 /* For a preincrement, see if we can do this with a single instruction. */
9712 if (!post)
9713 {
9714 icode = (int) this_optab->handlers[(int) mode].insn_code;
9715 if (icode != (int) CODE_FOR_nothing
9716 /* Make sure that OP0 is valid for operands 0 and 1
9717 of the insn we want to queue. */
9718 && (*insn_operand_predicate[icode][0]) (op0, mode)
9719 && (*insn_operand_predicate[icode][1]) (op0, mode)
9720 && (*insn_operand_predicate[icode][2]) (op1, mode))
9721 single_insn = 1;
9722 }
9723
9724 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9725 then we cannot just increment OP0. We must therefore contrive to
9726 increment the original value. Then, for postincrement, we can return
9727 OP0 since it is a copy of the old value. For preincrement, expand here
9728 unless we can do it with a single insn.
9729
9730 Likewise if storing directly into OP0 would clobber high bits
9731 we need to preserve (bad_subreg). */
9732 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9733 {
9734 /* This is the easiest way to increment the value wherever it is.
9735 Problems with multiple evaluation of INCREMENTED are prevented
9736 because either (1) it is a component_ref or preincrement,
9737 in which case it was stabilized above, or (2) it is an array_ref
9738 with constant index in an array in a register, which is
9739 safe to reevaluate. */
9740 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9741 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9742 ? MINUS_EXPR : PLUS_EXPR),
9743 TREE_TYPE (exp),
9744 incremented,
9745 TREE_OPERAND (exp, 1));
9746
9747 while (TREE_CODE (incremented) == NOP_EXPR
9748 || TREE_CODE (incremented) == CONVERT_EXPR)
9749 {
9750 newexp = convert (TREE_TYPE (incremented), newexp);
9751 incremented = TREE_OPERAND (incremented, 0);
9752 }
9753
9754 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9755 return post ? op0 : temp;
9756 }
9757
9758 if (post)
9759 {
9760 /* We have a true reference to the value in OP0.
9761 If there is an insn to add or subtract in this mode, queue it.
9762 Queueing the increment insn avoids the register shuffling
9763 that often results if we must increment now and first save
9764 the old value for subsequent use. */
9765
9766 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9767 op0 = stabilize (op0);
9768 #endif
9769
9770 icode = (int) this_optab->handlers[(int) mode].insn_code;
9771 if (icode != (int) CODE_FOR_nothing
9772 /* Make sure that OP0 is valid for operands 0 and 1
9773 of the insn we want to queue. */
9774 && (*insn_operand_predicate[icode][0]) (op0, mode)
9775 && (*insn_operand_predicate[icode][1]) (op0, mode))
9776 {
9777 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9778 op1 = force_reg (mode, op1);
9779
9780 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9781 }
9782 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9783 {
9784 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9785 rtx temp, result;
9786
9787 op0 = change_address (op0, VOIDmode, addr);
9788 temp = force_reg (GET_MODE (op0), op0);
9789 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9790 op1 = force_reg (mode, op1);
9791
9792 /* The increment queue is LIFO, thus we have to `queue'
9793 the instructions in reverse order. */
9794 enqueue_insn (op0, gen_move_insn (op0, temp));
9795 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9796 return result;
9797 }
9798 }
9799
9800 /* Preincrement, or we can't increment with one simple insn. */
9801 if (post)
9802 /* Save a copy of the value before inc or dec, to return it later. */
9803 temp = value = copy_to_reg (op0);
9804 else
9805 /* Arrange to return the incremented value. */
9806 /* Copy the rtx because expand_binop will protect from the queue,
9807 and the results of that would be invalid for us to return
9808 if our caller does emit_queue before using our result. */
9809 temp = copy_rtx (value = op0);
9810
9811 /* Increment however we can. */
9812 op1 = expand_binop (mode, this_optab, value, op1, op0,
9813 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9814 /* Make sure the value is stored into OP0. */
9815 if (op1 != op0)
9816 emit_move_insn (op0, op1);
9817
9818 return temp;
9819 }
9820 \f
9821 /* Expand all function calls contained within EXP, innermost ones first.
9822 But don't look within expressions that have sequence points.
9823 For each CALL_EXPR, record the rtx for its value
9824 in the CALL_EXPR_RTL field. */
9825
9826 static void
9827 preexpand_calls (exp)
9828 tree exp;
9829 {
9830 register int nops, i;
9831 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9832
9833 if (! do_preexpand_calls)
9834 return;
9835
9836 /* Only expressions and references can contain calls. */
9837
9838 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9839 return;
9840
9841 switch (TREE_CODE (exp))
9842 {
9843 case CALL_EXPR:
9844 /* Do nothing if already expanded. */
9845 if (CALL_EXPR_RTL (exp) != 0
9846 /* Do nothing if the call returns a variable-sized object. */
9847 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9848 /* Do nothing to built-in functions. */
9849 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9850 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9851 == FUNCTION_DECL)
9852 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9853 return;
9854
9855 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9856 return;
9857
9858 case COMPOUND_EXPR:
9859 case COND_EXPR:
9860 case TRUTH_ANDIF_EXPR:
9861 case TRUTH_ORIF_EXPR:
9862 /* If we find one of these, then we can be sure
9863 the adjust will be done for it (since it makes jumps).
9864 Do it now, so that if this is inside an argument
9865 of a function, we don't get the stack adjustment
9866 after some other args have already been pushed. */
9867 do_pending_stack_adjust ();
9868 return;
9869
9870 case BLOCK:
9871 case RTL_EXPR:
9872 case WITH_CLEANUP_EXPR:
9873 case CLEANUP_POINT_EXPR:
9874 return;
9875
9876 case SAVE_EXPR:
9877 if (SAVE_EXPR_RTL (exp) != 0)
9878 return;
9879 }
9880
9881 nops = tree_code_length[(int) TREE_CODE (exp)];
9882 for (i = 0; i < nops; i++)
9883 if (TREE_OPERAND (exp, i) != 0)
9884 {
9885 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9886 if (type == 'e' || type == '<' || type == '1' || type == '2'
9887 || type == 'r')
9888 preexpand_calls (TREE_OPERAND (exp, i));
9889 }
9890 }
9891 \f
9892 /* At the start of a function, record that we have no previously-pushed
9893 arguments waiting to be popped. */
9894
9895 void
9896 init_pending_stack_adjust ()
9897 {
9898 pending_stack_adjust = 0;
9899 }
9900
9901 /* When exiting from function, if safe, clear out any pending stack adjust
9902 so the adjustment won't get done. */
9903
9904 void
9905 clear_pending_stack_adjust ()
9906 {
9907 #ifdef EXIT_IGNORE_STACK
9908 if (optimize > 0
9909 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9910 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9911 && ! flag_inline_functions)
9912 pending_stack_adjust = 0;
9913 #endif
9914 }
9915
9916 /* Pop any previously-pushed arguments that have not been popped yet. */
9917
9918 void
9919 do_pending_stack_adjust ()
9920 {
9921 if (inhibit_defer_pop == 0)
9922 {
9923 if (pending_stack_adjust != 0)
9924 adjust_stack (GEN_INT (pending_stack_adjust));
9925 pending_stack_adjust = 0;
9926 }
9927 }
9928
9929 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9930 Returns the cleanups to be performed. */
9931
9932 static tree
9933 defer_cleanups_to (old_cleanups)
9934 tree old_cleanups;
9935 {
9936 tree new_cleanups = NULL_TREE;
9937 tree cleanups = cleanups_this_call;
9938 tree last = NULL_TREE;
9939
9940 while (cleanups_this_call != old_cleanups)
9941 {
9942 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9943 last = cleanups_this_call;
9944 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9945 }
9946
9947 if (last)
9948 {
9949 /* Remove the list from the chain of cleanups. */
9950 TREE_CHAIN (last) = NULL_TREE;
9951
9952 /* reverse them so that we can build them in the right order. */
9953 cleanups = nreverse (cleanups);
9954
9955 /* All cleanups must be on the function_obstack. */
9956 push_obstacks_nochange ();
9957 resume_temporary_allocation ();
9958
9959 while (cleanups)
9960 {
9961 if (new_cleanups)
9962 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9963 TREE_VALUE (cleanups), new_cleanups);
9964 else
9965 new_cleanups = TREE_VALUE (cleanups);
9966
9967 cleanups = TREE_CHAIN (cleanups);
9968 }
9969
9970 pop_obstacks ();
9971 }
9972
9973 return new_cleanups;
9974 }
9975
9976 /* Expand all cleanups up to OLD_CLEANUPS.
9977 Needed here, and also for language-dependent calls. */
9978
9979 void
9980 expand_cleanups_to (old_cleanups)
9981 tree old_cleanups;
9982 {
9983 while (cleanups_this_call != old_cleanups)
9984 {
9985 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9986 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9987 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9988 }
9989 }
9990 \f
9991 /* Expand conditional expressions. */
9992
9993 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9994 LABEL is an rtx of code CODE_LABEL, in this function and all the
9995 functions here. */
9996
9997 void
9998 jumpifnot (exp, label)
9999 tree exp;
10000 rtx label;
10001 {
10002 do_jump (exp, label, NULL_RTX);
10003 }
10004
10005 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10006
10007 void
10008 jumpif (exp, label)
10009 tree exp;
10010 rtx label;
10011 {
10012 do_jump (exp, NULL_RTX, label);
10013 }
10014
10015 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10016 the result is zero, or IF_TRUE_LABEL if the result is one.
10017 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10018 meaning fall through in that case.
10019
10020 do_jump always does any pending stack adjust except when it does not
10021 actually perform a jump. An example where there is no jump
10022 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10023
10024 This function is responsible for optimizing cases such as
10025 &&, || and comparison operators in EXP. */
10026
10027 void
10028 do_jump (exp, if_false_label, if_true_label)
10029 tree exp;
10030 rtx if_false_label, if_true_label;
10031 {
10032 register enum tree_code code = TREE_CODE (exp);
10033 /* Some cases need to create a label to jump to
10034 in order to properly fall through.
10035 These cases set DROP_THROUGH_LABEL nonzero. */
10036 rtx drop_through_label = 0;
10037 rtx temp;
10038 rtx comparison = 0;
10039 int i;
10040 tree type;
10041 enum machine_mode mode;
10042
10043 emit_queue ();
10044
10045 switch (code)
10046 {
10047 case ERROR_MARK:
10048 break;
10049
10050 case INTEGER_CST:
10051 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10052 if (temp)
10053 emit_jump (temp);
10054 break;
10055
10056 #if 0
10057 /* This is not true with #pragma weak */
10058 case ADDR_EXPR:
10059 /* The address of something can never be zero. */
10060 if (if_true_label)
10061 emit_jump (if_true_label);
10062 break;
10063 #endif
10064
10065 case NOP_EXPR:
10066 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10067 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10068 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10069 goto normal;
10070 case CONVERT_EXPR:
10071 /* If we are narrowing the operand, we have to do the compare in the
10072 narrower mode. */
10073 if ((TYPE_PRECISION (TREE_TYPE (exp))
10074 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10075 goto normal;
10076 case NON_LVALUE_EXPR:
10077 case REFERENCE_EXPR:
10078 case ABS_EXPR:
10079 case NEGATE_EXPR:
10080 case LROTATE_EXPR:
10081 case RROTATE_EXPR:
10082 /* These cannot change zero->non-zero or vice versa. */
10083 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10084 break;
10085
10086 #if 0
10087 /* This is never less insns than evaluating the PLUS_EXPR followed by
10088 a test and can be longer if the test is eliminated. */
10089 case PLUS_EXPR:
10090 /* Reduce to minus. */
10091 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10092 TREE_OPERAND (exp, 0),
10093 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10094 TREE_OPERAND (exp, 1))));
10095 /* Process as MINUS. */
10096 #endif
10097
10098 case MINUS_EXPR:
10099 /* Non-zero iff operands of minus differ. */
10100 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10101 TREE_OPERAND (exp, 0),
10102 TREE_OPERAND (exp, 1)),
10103 NE, NE);
10104 break;
10105
10106 case BIT_AND_EXPR:
10107 /* If we are AND'ing with a small constant, do this comparison in the
10108 smallest type that fits. If the machine doesn't have comparisons
10109 that small, it will be converted back to the wider comparison.
10110 This helps if we are testing the sign bit of a narrower object.
10111 combine can't do this for us because it can't know whether a
10112 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10113
10114 if (! SLOW_BYTE_ACCESS
10115 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10116 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10117 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10118 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10119 && (type = type_for_mode (mode, 1)) != 0
10120 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10121 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10122 != CODE_FOR_nothing))
10123 {
10124 do_jump (convert (type, exp), if_false_label, if_true_label);
10125 break;
10126 }
10127 goto normal;
10128
10129 case TRUTH_NOT_EXPR:
10130 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10131 break;
10132
10133 case TRUTH_ANDIF_EXPR:
10134 {
10135 rtx seq1, seq2;
10136 tree cleanups, old_cleanups;
10137
10138 if (if_false_label == 0)
10139 if_false_label = drop_through_label = gen_label_rtx ();
10140 start_sequence ();
10141 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10142 seq1 = get_insns ();
10143 end_sequence ();
10144
10145 old_cleanups = cleanups_this_call;
10146 start_sequence ();
10147 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10148 seq2 = get_insns ();
10149 cleanups = defer_cleanups_to (old_cleanups);
10150 end_sequence ();
10151
10152 if (cleanups)
10153 {
10154 rtx flag = gen_reg_rtx (word_mode);
10155 tree new_cleanups;
10156 tree cond;
10157
10158 /* Flag cleanups as not needed. */
10159 emit_move_insn (flag, const0_rtx);
10160 emit_insns (seq1);
10161
10162 /* Flag cleanups as needed. */
10163 emit_move_insn (flag, const1_rtx);
10164 emit_insns (seq2);
10165
10166 /* All cleanups must be on the function_obstack. */
10167 push_obstacks_nochange ();
10168 resume_temporary_allocation ();
10169
10170 /* convert flag, which is an rtx, into a tree. */
10171 cond = make_node (RTL_EXPR);
10172 TREE_TYPE (cond) = integer_type_node;
10173 RTL_EXPR_RTL (cond) = flag;
10174 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10175 cond = save_expr (cond);
10176
10177 new_cleanups = build (COND_EXPR, void_type_node,
10178 truthvalue_conversion (cond),
10179 cleanups, integer_zero_node);
10180 new_cleanups = fold (new_cleanups);
10181
10182 pop_obstacks ();
10183
10184 /* Now add in the conditionalized cleanups. */
10185 cleanups_this_call
10186 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10187 expand_eh_region_start ();
10188 }
10189 else
10190 {
10191 emit_insns (seq1);
10192 emit_insns (seq2);
10193 }
10194 }
10195 break;
10196
10197 case TRUTH_ORIF_EXPR:
10198 {
10199 rtx seq1, seq2;
10200 tree cleanups, old_cleanups;
10201
10202 if (if_true_label == 0)
10203 if_true_label = drop_through_label = gen_label_rtx ();
10204 start_sequence ();
10205 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10206 seq1 = get_insns ();
10207 end_sequence ();
10208
10209 old_cleanups = cleanups_this_call;
10210 start_sequence ();
10211 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10212 seq2 = get_insns ();
10213 cleanups = defer_cleanups_to (old_cleanups);
10214 end_sequence ();
10215
10216 if (cleanups)
10217 {
10218 rtx flag = gen_reg_rtx (word_mode);
10219 tree new_cleanups;
10220 tree cond;
10221
10222 /* Flag cleanups as not needed. */
10223 emit_move_insn (flag, const0_rtx);
10224 emit_insns (seq1);
10225
10226 /* Flag cleanups as needed. */
10227 emit_move_insn (flag, const1_rtx);
10228 emit_insns (seq2);
10229
10230 /* All cleanups must be on the function_obstack. */
10231 push_obstacks_nochange ();
10232 resume_temporary_allocation ();
10233
10234 /* convert flag, which is an rtx, into a tree. */
10235 cond = make_node (RTL_EXPR);
10236 TREE_TYPE (cond) = integer_type_node;
10237 RTL_EXPR_RTL (cond) = flag;
10238 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10239 cond = save_expr (cond);
10240
10241 new_cleanups = build (COND_EXPR, void_type_node,
10242 truthvalue_conversion (cond),
10243 cleanups, integer_zero_node);
10244 new_cleanups = fold (new_cleanups);
10245
10246 pop_obstacks ();
10247
10248 /* Now add in the conditionalized cleanups. */
10249 cleanups_this_call
10250 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10251 expand_eh_region_start ();
10252 }
10253 else
10254 {
10255 emit_insns (seq1);
10256 emit_insns (seq2);
10257 }
10258 }
10259 break;
10260
10261 case COMPOUND_EXPR:
10262 push_temp_slots ();
10263 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10264 preserve_temp_slots (NULL_RTX);
10265 free_temp_slots ();
10266 pop_temp_slots ();
10267 emit_queue ();
10268 do_pending_stack_adjust ();
10269 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10270 break;
10271
10272 case COMPONENT_REF:
10273 case BIT_FIELD_REF:
10274 case ARRAY_REF:
10275 {
10276 int bitsize, bitpos, unsignedp;
10277 enum machine_mode mode;
10278 tree type;
10279 tree offset;
10280 int volatilep = 0;
10281 int alignment;
10282
10283 /* Get description of this reference. We don't actually care
10284 about the underlying object here. */
10285 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10286 &mode, &unsignedp, &volatilep,
10287 &alignment);
10288
10289 type = type_for_size (bitsize, unsignedp);
10290 if (! SLOW_BYTE_ACCESS
10291 && type != 0 && bitsize >= 0
10292 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10293 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10294 != CODE_FOR_nothing))
10295 {
10296 do_jump (convert (type, exp), if_false_label, if_true_label);
10297 break;
10298 }
10299 goto normal;
10300 }
10301
10302 case COND_EXPR:
10303 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10304 if (integer_onep (TREE_OPERAND (exp, 1))
10305 && integer_zerop (TREE_OPERAND (exp, 2)))
10306 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10307
10308 else if (integer_zerop (TREE_OPERAND (exp, 1))
10309 && integer_onep (TREE_OPERAND (exp, 2)))
10310 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10311
10312 else
10313 {
10314 rtx seq1, seq2;
10315 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10316
10317 register rtx label1 = gen_label_rtx ();
10318 drop_through_label = gen_label_rtx ();
10319
10320 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10321
10322 /* We need to save the cleanups for the lhs and rhs separately.
10323 Keep track of the cleanups seen before the lhs. */
10324 old_cleanups = cleanups_this_call;
10325 start_sequence ();
10326 /* Now the THEN-expression. */
10327 do_jump (TREE_OPERAND (exp, 1),
10328 if_false_label ? if_false_label : drop_through_label,
10329 if_true_label ? if_true_label : drop_through_label);
10330 /* In case the do_jump just above never jumps. */
10331 do_pending_stack_adjust ();
10332 emit_label (label1);
10333 seq1 = get_insns ();
10334 /* Now grab the cleanups for the lhs. */
10335 cleanups_left_side = defer_cleanups_to (old_cleanups);
10336 end_sequence ();
10337
10338 /* And keep track of where we start before the rhs. */
10339 old_cleanups = cleanups_this_call;
10340 start_sequence ();
10341 /* Now the ELSE-expression. */
10342 do_jump (TREE_OPERAND (exp, 2),
10343 if_false_label ? if_false_label : drop_through_label,
10344 if_true_label ? if_true_label : drop_through_label);
10345 seq2 = get_insns ();
10346 /* Grab the cleanups for the rhs. */
10347 cleanups_right_side = defer_cleanups_to (old_cleanups);
10348 end_sequence ();
10349
10350 if (cleanups_left_side || cleanups_right_side)
10351 {
10352 /* Make the cleanups for the THEN and ELSE clauses
10353 conditional based on which half is executed. */
10354 rtx flag = gen_reg_rtx (word_mode);
10355 tree new_cleanups;
10356 tree cond;
10357
10358 /* Set the flag to 0 so that we know we executed the lhs. */
10359 emit_move_insn (flag, const0_rtx);
10360 emit_insns (seq1);
10361
10362 /* Set the flag to 1 so that we know we executed the rhs. */
10363 emit_move_insn (flag, const1_rtx);
10364 emit_insns (seq2);
10365
10366 /* Make sure the cleanup lives on the function_obstack. */
10367 push_obstacks_nochange ();
10368 resume_temporary_allocation ();
10369
10370 /* Now, build up a COND_EXPR that tests the value of the
10371 flag, and then either do the cleanups for the lhs or the
10372 rhs. */
10373 cond = make_node (RTL_EXPR);
10374 TREE_TYPE (cond) = integer_type_node;
10375 RTL_EXPR_RTL (cond) = flag;
10376 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10377 cond = save_expr (cond);
10378
10379 new_cleanups = build (COND_EXPR, void_type_node,
10380 truthvalue_conversion (cond),
10381 cleanups_right_side, cleanups_left_side);
10382 new_cleanups = fold (new_cleanups);
10383
10384 pop_obstacks ();
10385
10386 /* Now add in the conditionalized cleanups. */
10387 cleanups_this_call
10388 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10389 expand_eh_region_start ();
10390 }
10391 else
10392 {
10393 /* No cleanups were needed, so emit the two sequences
10394 directly. */
10395 emit_insns (seq1);
10396 emit_insns (seq2);
10397 }
10398 }
10399 break;
10400
10401 case EQ_EXPR:
10402 {
10403 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10404
10405 if (integer_zerop (TREE_OPERAND (exp, 1)))
10406 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10407 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10408 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10409 do_jump
10410 (fold
10411 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10412 fold (build (EQ_EXPR, TREE_TYPE (exp),
10413 fold (build1 (REALPART_EXPR,
10414 TREE_TYPE (inner_type),
10415 TREE_OPERAND (exp, 0))),
10416 fold (build1 (REALPART_EXPR,
10417 TREE_TYPE (inner_type),
10418 TREE_OPERAND (exp, 1))))),
10419 fold (build (EQ_EXPR, TREE_TYPE (exp),
10420 fold (build1 (IMAGPART_EXPR,
10421 TREE_TYPE (inner_type),
10422 TREE_OPERAND (exp, 0))),
10423 fold (build1 (IMAGPART_EXPR,
10424 TREE_TYPE (inner_type),
10425 TREE_OPERAND (exp, 1))))))),
10426 if_false_label, if_true_label);
10427 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10428 && !can_compare_p (TYPE_MODE (inner_type)))
10429 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10430 else
10431 comparison = compare (exp, EQ, EQ);
10432 break;
10433 }
10434
10435 case NE_EXPR:
10436 {
10437 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10438
10439 if (integer_zerop (TREE_OPERAND (exp, 1)))
10440 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10441 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10442 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10443 do_jump
10444 (fold
10445 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10446 fold (build (NE_EXPR, TREE_TYPE (exp),
10447 fold (build1 (REALPART_EXPR,
10448 TREE_TYPE (inner_type),
10449 TREE_OPERAND (exp, 0))),
10450 fold (build1 (REALPART_EXPR,
10451 TREE_TYPE (inner_type),
10452 TREE_OPERAND (exp, 1))))),
10453 fold (build (NE_EXPR, TREE_TYPE (exp),
10454 fold (build1 (IMAGPART_EXPR,
10455 TREE_TYPE (inner_type),
10456 TREE_OPERAND (exp, 0))),
10457 fold (build1 (IMAGPART_EXPR,
10458 TREE_TYPE (inner_type),
10459 TREE_OPERAND (exp, 1))))))),
10460 if_false_label, if_true_label);
10461 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10462 && !can_compare_p (TYPE_MODE (inner_type)))
10463 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10464 else
10465 comparison = compare (exp, NE, NE);
10466 break;
10467 }
10468
10469 case LT_EXPR:
10470 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10471 == MODE_INT)
10472 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10473 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10474 else
10475 comparison = compare (exp, LT, LTU);
10476 break;
10477
10478 case LE_EXPR:
10479 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10480 == MODE_INT)
10481 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10482 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10483 else
10484 comparison = compare (exp, LE, LEU);
10485 break;
10486
10487 case GT_EXPR:
10488 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10489 == MODE_INT)
10490 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10491 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10492 else
10493 comparison = compare (exp, GT, GTU);
10494 break;
10495
10496 case GE_EXPR:
10497 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10498 == MODE_INT)
10499 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10500 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10501 else
10502 comparison = compare (exp, GE, GEU);
10503 break;
10504
10505 default:
10506 normal:
10507 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10508 #if 0
10509 /* This is not needed any more and causes poor code since it causes
10510 comparisons and tests from non-SI objects to have different code
10511 sequences. */
10512 /* Copy to register to avoid generating bad insns by cse
10513 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10514 if (!cse_not_expected && GET_CODE (temp) == MEM)
10515 temp = copy_to_reg (temp);
10516 #endif
10517 do_pending_stack_adjust ();
10518 if (GET_CODE (temp) == CONST_INT)
10519 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10520 else if (GET_CODE (temp) == LABEL_REF)
10521 comparison = const_true_rtx;
10522 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10523 && !can_compare_p (GET_MODE (temp)))
10524 /* Note swapping the labels gives us not-equal. */
10525 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10526 else if (GET_MODE (temp) != VOIDmode)
10527 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10528 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10529 GET_MODE (temp), NULL_RTX, 0);
10530 else
10531 abort ();
10532 }
10533
10534 /* Do any postincrements in the expression that was tested. */
10535 emit_queue ();
10536
10537 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10538 straight into a conditional jump instruction as the jump condition.
10539 Otherwise, all the work has been done already. */
10540
10541 if (comparison == const_true_rtx)
10542 {
10543 if (if_true_label)
10544 emit_jump (if_true_label);
10545 }
10546 else if (comparison == const0_rtx)
10547 {
10548 if (if_false_label)
10549 emit_jump (if_false_label);
10550 }
10551 else if (comparison)
10552 do_jump_for_compare (comparison, if_false_label, if_true_label);
10553
10554 if (drop_through_label)
10555 {
10556 /* If do_jump produces code that might be jumped around,
10557 do any stack adjusts from that code, before the place
10558 where control merges in. */
10559 do_pending_stack_adjust ();
10560 emit_label (drop_through_label);
10561 }
10562 }
10563 \f
10564 /* Given a comparison expression EXP for values too wide to be compared
10565 with one insn, test the comparison and jump to the appropriate label.
10566 The code of EXP is ignored; we always test GT if SWAP is 0,
10567 and LT if SWAP is 1. */
10568
10569 static void
10570 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10571 tree exp;
10572 int swap;
10573 rtx if_false_label, if_true_label;
10574 {
10575 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10576 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10577 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10578 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10579 rtx drop_through_label = 0;
10580 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10581 int i;
10582
10583 if (! if_true_label || ! if_false_label)
10584 drop_through_label = gen_label_rtx ();
10585 if (! if_true_label)
10586 if_true_label = drop_through_label;
10587 if (! if_false_label)
10588 if_false_label = drop_through_label;
10589
10590 /* Compare a word at a time, high order first. */
10591 for (i = 0; i < nwords; i++)
10592 {
10593 rtx comp;
10594 rtx op0_word, op1_word;
10595
10596 if (WORDS_BIG_ENDIAN)
10597 {
10598 op0_word = operand_subword_force (op0, i, mode);
10599 op1_word = operand_subword_force (op1, i, mode);
10600 }
10601 else
10602 {
10603 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10604 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10605 }
10606
10607 /* All but high-order word must be compared as unsigned. */
10608 comp = compare_from_rtx (op0_word, op1_word,
10609 (unsignedp || i > 0) ? GTU : GT,
10610 unsignedp, word_mode, NULL_RTX, 0);
10611 if (comp == const_true_rtx)
10612 emit_jump (if_true_label);
10613 else if (comp != const0_rtx)
10614 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10615
10616 /* Consider lower words only if these are equal. */
10617 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10618 NULL_RTX, 0);
10619 if (comp == const_true_rtx)
10620 emit_jump (if_false_label);
10621 else if (comp != const0_rtx)
10622 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10623 }
10624
10625 if (if_false_label)
10626 emit_jump (if_false_label);
10627 if (drop_through_label)
10628 emit_label (drop_through_label);
10629 }
10630
10631 /* Compare OP0 with OP1, word at a time, in mode MODE.
10632 UNSIGNEDP says to do unsigned comparison.
10633 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10634
10635 void
10636 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10637 enum machine_mode mode;
10638 int unsignedp;
10639 rtx op0, op1;
10640 rtx if_false_label, if_true_label;
10641 {
10642 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10643 rtx drop_through_label = 0;
10644 int i;
10645
10646 if (! if_true_label || ! if_false_label)
10647 drop_through_label = gen_label_rtx ();
10648 if (! if_true_label)
10649 if_true_label = drop_through_label;
10650 if (! if_false_label)
10651 if_false_label = drop_through_label;
10652
10653 /* Compare a word at a time, high order first. */
10654 for (i = 0; i < nwords; i++)
10655 {
10656 rtx comp;
10657 rtx op0_word, op1_word;
10658
10659 if (WORDS_BIG_ENDIAN)
10660 {
10661 op0_word = operand_subword_force (op0, i, mode);
10662 op1_word = operand_subword_force (op1, i, mode);
10663 }
10664 else
10665 {
10666 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10667 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10668 }
10669
10670 /* All but high-order word must be compared as unsigned. */
10671 comp = compare_from_rtx (op0_word, op1_word,
10672 (unsignedp || i > 0) ? GTU : GT,
10673 unsignedp, word_mode, NULL_RTX, 0);
10674 if (comp == const_true_rtx)
10675 emit_jump (if_true_label);
10676 else if (comp != const0_rtx)
10677 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10678
10679 /* Consider lower words only if these are equal. */
10680 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10681 NULL_RTX, 0);
10682 if (comp == const_true_rtx)
10683 emit_jump (if_false_label);
10684 else if (comp != const0_rtx)
10685 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10686 }
10687
10688 if (if_false_label)
10689 emit_jump (if_false_label);
10690 if (drop_through_label)
10691 emit_label (drop_through_label);
10692 }
10693
10694 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10695 with one insn, test the comparison and jump to the appropriate label. */
10696
10697 static void
10698 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10699 tree exp;
10700 rtx if_false_label, if_true_label;
10701 {
10702 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10703 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10704 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10705 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10706 int i;
10707 rtx drop_through_label = 0;
10708
10709 if (! if_false_label)
10710 drop_through_label = if_false_label = gen_label_rtx ();
10711
10712 for (i = 0; i < nwords; i++)
10713 {
10714 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10715 operand_subword_force (op1, i, mode),
10716 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10717 word_mode, NULL_RTX, 0);
10718 if (comp == const_true_rtx)
10719 emit_jump (if_false_label);
10720 else if (comp != const0_rtx)
10721 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10722 }
10723
10724 if (if_true_label)
10725 emit_jump (if_true_label);
10726 if (drop_through_label)
10727 emit_label (drop_through_label);
10728 }
10729 \f
10730 /* Jump according to whether OP0 is 0.
10731 We assume that OP0 has an integer mode that is too wide
10732 for the available compare insns. */
10733
10734 static void
10735 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10736 rtx op0;
10737 rtx if_false_label, if_true_label;
10738 {
10739 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10740 int i;
10741 rtx drop_through_label = 0;
10742
10743 if (! if_false_label)
10744 drop_through_label = if_false_label = gen_label_rtx ();
10745
10746 for (i = 0; i < nwords; i++)
10747 {
10748 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10749 GET_MODE (op0)),
10750 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10751 if (comp == const_true_rtx)
10752 emit_jump (if_false_label);
10753 else if (comp != const0_rtx)
10754 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10755 }
10756
10757 if (if_true_label)
10758 emit_jump (if_true_label);
10759 if (drop_through_label)
10760 emit_label (drop_through_label);
10761 }
10762
10763 /* Given a comparison expression in rtl form, output conditional branches to
10764 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10765
10766 static void
10767 do_jump_for_compare (comparison, if_false_label, if_true_label)
10768 rtx comparison, if_false_label, if_true_label;
10769 {
10770 if (if_true_label)
10771 {
10772 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10773 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10774 else
10775 abort ();
10776
10777 if (if_false_label)
10778 emit_jump (if_false_label);
10779 }
10780 else if (if_false_label)
10781 {
10782 rtx insn;
10783 rtx prev = get_last_insn ();
10784 rtx branch = 0;
10785
10786 /* Output the branch with the opposite condition. Then try to invert
10787 what is generated. If more than one insn is a branch, or if the
10788 branch is not the last insn written, abort. If we can't invert
10789 the branch, emit make a true label, redirect this jump to that,
10790 emit a jump to the false label and define the true label. */
10791
10792 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10793 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10794 else
10795 abort ();
10796
10797 /* Here we get the first insn that was just emitted. It used to be the
10798 case that, on some machines, emitting the branch would discard
10799 the previous compare insn and emit a replacement. This isn't
10800 done anymore, but abort if we see that PREV is deleted. */
10801
10802 if (prev == 0)
10803 insn = get_insns ();
10804 else if (INSN_DELETED_P (prev))
10805 abort ();
10806 else
10807 insn = NEXT_INSN (prev);
10808
10809 for (; insn; insn = NEXT_INSN (insn))
10810 if (GET_CODE (insn) == JUMP_INSN)
10811 {
10812 if (branch)
10813 abort ();
10814 branch = insn;
10815 }
10816
10817 if (branch != get_last_insn ())
10818 abort ();
10819
10820 JUMP_LABEL (branch) = if_false_label;
10821 if (! invert_jump (branch, if_false_label))
10822 {
10823 if_true_label = gen_label_rtx ();
10824 redirect_jump (branch, if_true_label);
10825 emit_jump (if_false_label);
10826 emit_label (if_true_label);
10827 }
10828 }
10829 }
10830 \f
10831 /* Generate code for a comparison expression EXP
10832 (including code to compute the values to be compared)
10833 and set (CC0) according to the result.
10834 SIGNED_CODE should be the rtx operation for this comparison for
10835 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10836
10837 We force a stack adjustment unless there are currently
10838 things pushed on the stack that aren't yet used. */
10839
10840 static rtx
10841 compare (exp, signed_code, unsigned_code)
10842 register tree exp;
10843 enum rtx_code signed_code, unsigned_code;
10844 {
10845 register rtx op0
10846 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10847 register rtx op1
10848 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10849 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10850 register enum machine_mode mode = TYPE_MODE (type);
10851 int unsignedp = TREE_UNSIGNED (type);
10852 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10853
10854 #ifdef HAVE_canonicalize_funcptr_for_compare
10855 /* If function pointers need to be "canonicalized" before they can
10856 be reliably compared, then canonicalize them. */
10857 if (HAVE_canonicalize_funcptr_for_compare
10858 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10859 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10860 == FUNCTION_TYPE))
10861 {
10862 rtx new_op0 = gen_reg_rtx (mode);
10863
10864 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10865 op0 = new_op0;
10866 }
10867
10868 if (HAVE_canonicalize_funcptr_for_compare
10869 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10870 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10871 == FUNCTION_TYPE))
10872 {
10873 rtx new_op1 = gen_reg_rtx (mode);
10874
10875 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10876 op1 = new_op1;
10877 }
10878 #endif
10879
10880 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10881 ((mode == BLKmode)
10882 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10883 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10884 }
10885
10886 /* Like compare but expects the values to compare as two rtx's.
10887 The decision as to signed or unsigned comparison must be made by the caller.
10888
10889 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10890 compared.
10891
10892 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10893 size of MODE should be used. */
10894
10895 rtx
10896 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10897 register rtx op0, op1;
10898 enum rtx_code code;
10899 int unsignedp;
10900 enum machine_mode mode;
10901 rtx size;
10902 int align;
10903 {
10904 rtx tem;
10905
10906 /* If one operand is constant, make it the second one. Only do this
10907 if the other operand is not constant as well. */
10908
10909 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10910 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10911 {
10912 tem = op0;
10913 op0 = op1;
10914 op1 = tem;
10915 code = swap_condition (code);
10916 }
10917
10918 if (flag_force_mem)
10919 {
10920 op0 = force_not_mem (op0);
10921 op1 = force_not_mem (op1);
10922 }
10923
10924 do_pending_stack_adjust ();
10925
10926 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10927 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10928 return tem;
10929
10930 #if 0
10931 /* There's no need to do this now that combine.c can eliminate lots of
10932 sign extensions. This can be less efficient in certain cases on other
10933 machines. */
10934
10935 /* If this is a signed equality comparison, we can do it as an
10936 unsigned comparison since zero-extension is cheaper than sign
10937 extension and comparisons with zero are done as unsigned. This is
10938 the case even on machines that can do fast sign extension, since
10939 zero-extension is easier to combine with other operations than
10940 sign-extension is. If we are comparing against a constant, we must
10941 convert it to what it would look like unsigned. */
10942 if ((code == EQ || code == NE) && ! unsignedp
10943 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10944 {
10945 if (GET_CODE (op1) == CONST_INT
10946 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10947 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10948 unsignedp = 1;
10949 }
10950 #endif
10951
10952 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10953
10954 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10955 }
10956 \f
10957 /* Generate code to calculate EXP using a store-flag instruction
10958 and return an rtx for the result. EXP is either a comparison
10959 or a TRUTH_NOT_EXPR whose operand is a comparison.
10960
10961 If TARGET is nonzero, store the result there if convenient.
10962
10963 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10964 cheap.
10965
10966 Return zero if there is no suitable set-flag instruction
10967 available on this machine.
10968
10969 Once expand_expr has been called on the arguments of the comparison,
10970 we are committed to doing the store flag, since it is not safe to
10971 re-evaluate the expression. We emit the store-flag insn by calling
10972 emit_store_flag, but only expand the arguments if we have a reason
10973 to believe that emit_store_flag will be successful. If we think that
10974 it will, but it isn't, we have to simulate the store-flag with a
10975 set/jump/set sequence. */
10976
10977 static rtx
10978 do_store_flag (exp, target, mode, only_cheap)
10979 tree exp;
10980 rtx target;
10981 enum machine_mode mode;
10982 int only_cheap;
10983 {
10984 enum rtx_code code;
10985 tree arg0, arg1, type;
10986 tree tem;
10987 enum machine_mode operand_mode;
10988 int invert = 0;
10989 int unsignedp;
10990 rtx op0, op1;
10991 enum insn_code icode;
10992 rtx subtarget = target;
10993 rtx result, label, pattern, jump_pat;
10994
10995 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10996 result at the end. We can't simply invert the test since it would
10997 have already been inverted if it were valid. This case occurs for
10998 some floating-point comparisons. */
10999
11000 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11001 invert = 1, exp = TREE_OPERAND (exp, 0);
11002
11003 arg0 = TREE_OPERAND (exp, 0);
11004 arg1 = TREE_OPERAND (exp, 1);
11005 type = TREE_TYPE (arg0);
11006 operand_mode = TYPE_MODE (type);
11007 unsignedp = TREE_UNSIGNED (type);
11008
11009 /* We won't bother with BLKmode store-flag operations because it would mean
11010 passing a lot of information to emit_store_flag. */
11011 if (operand_mode == BLKmode)
11012 return 0;
11013
11014 /* We won't bother with store-flag operations involving function pointers
11015 when function pointers must be canonicalized before comparisons. */
11016 #ifdef HAVE_canonicalize_funcptr_for_compare
11017 if (HAVE_canonicalize_funcptr_for_compare
11018 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11019 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11020 == FUNCTION_TYPE))
11021 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11022 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11023 == FUNCTION_TYPE))))
11024 return 0;
11025 #endif
11026
11027 STRIP_NOPS (arg0);
11028 STRIP_NOPS (arg1);
11029
11030 /* Get the rtx comparison code to use. We know that EXP is a comparison
11031 operation of some type. Some comparisons against 1 and -1 can be
11032 converted to comparisons with zero. Do so here so that the tests
11033 below will be aware that we have a comparison with zero. These
11034 tests will not catch constants in the first operand, but constants
11035 are rarely passed as the first operand. */
11036
11037 switch (TREE_CODE (exp))
11038 {
11039 case EQ_EXPR:
11040 code = EQ;
11041 break;
11042 case NE_EXPR:
11043 code = NE;
11044 break;
11045 case LT_EXPR:
11046 if (integer_onep (arg1))
11047 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11048 else
11049 code = unsignedp ? LTU : LT;
11050 break;
11051 case LE_EXPR:
11052 if (! unsignedp && integer_all_onesp (arg1))
11053 arg1 = integer_zero_node, code = LT;
11054 else
11055 code = unsignedp ? LEU : LE;
11056 break;
11057 case GT_EXPR:
11058 if (! unsignedp && integer_all_onesp (arg1))
11059 arg1 = integer_zero_node, code = GE;
11060 else
11061 code = unsignedp ? GTU : GT;
11062 break;
11063 case GE_EXPR:
11064 if (integer_onep (arg1))
11065 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11066 else
11067 code = unsignedp ? GEU : GE;
11068 break;
11069 default:
11070 abort ();
11071 }
11072
11073 /* Put a constant second. */
11074 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11075 {
11076 tem = arg0; arg0 = arg1; arg1 = tem;
11077 code = swap_condition (code);
11078 }
11079
11080 /* If this is an equality or inequality test of a single bit, we can
11081 do this by shifting the bit being tested to the low-order bit and
11082 masking the result with the constant 1. If the condition was EQ,
11083 we xor it with 1. This does not require an scc insn and is faster
11084 than an scc insn even if we have it. */
11085
11086 if ((code == NE || code == EQ)
11087 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11088 && integer_pow2p (TREE_OPERAND (arg0, 1))
11089 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11090 {
11091 tree inner = TREE_OPERAND (arg0, 0);
11092 HOST_WIDE_INT tem;
11093 int bitnum;
11094 int ops_unsignedp;
11095
11096 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11097 NULL_RTX, VOIDmode, 0));
11098 /* In this case, immed_double_const will sign extend the value to make
11099 it look the same on the host and target. We must remove the
11100 sign-extension before calling exact_log2, since exact_log2 will
11101 fail for negative values. */
11102 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11103 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11104 /* We don't use the obvious constant shift to generate the mask,
11105 because that generates compiler warnings when BITS_PER_WORD is
11106 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11107 code is unreachable in that case. */
11108 tem = tem & GET_MODE_MASK (word_mode);
11109 bitnum = exact_log2 (tem);
11110
11111 /* If INNER is a right shift of a constant and it plus BITNUM does
11112 not overflow, adjust BITNUM and INNER. */
11113
11114 if (TREE_CODE (inner) == RSHIFT_EXPR
11115 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11116 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11117 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11118 < TYPE_PRECISION (type)))
11119 {
11120 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11121 inner = TREE_OPERAND (inner, 0);
11122 }
11123
11124 /* If we are going to be able to omit the AND below, we must do our
11125 operations as unsigned. If we must use the AND, we have a choice.
11126 Normally unsigned is faster, but for some machines signed is. */
11127 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11128 #ifdef LOAD_EXTEND_OP
11129 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11130 #else
11131 : 1
11132 #endif
11133 );
11134
11135 if (subtarget == 0 || GET_CODE (subtarget) != REG
11136 || GET_MODE (subtarget) != operand_mode
11137 || ! safe_from_p (subtarget, inner))
11138 subtarget = 0;
11139
11140 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11141
11142 if (bitnum != 0)
11143 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11144 size_int (bitnum), subtarget, ops_unsignedp);
11145
11146 if (GET_MODE (op0) != mode)
11147 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11148
11149 if ((code == EQ && ! invert) || (code == NE && invert))
11150 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11151 ops_unsignedp, OPTAB_LIB_WIDEN);
11152
11153 /* Put the AND last so it can combine with more things. */
11154 if (bitnum != TYPE_PRECISION (type) - 1)
11155 op0 = expand_and (op0, const1_rtx, subtarget);
11156
11157 return op0;
11158 }
11159
11160 /* Now see if we are likely to be able to do this. Return if not. */
11161 if (! can_compare_p (operand_mode))
11162 return 0;
11163 icode = setcc_gen_code[(int) code];
11164 if (icode == CODE_FOR_nothing
11165 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11166 {
11167 /* We can only do this if it is one of the special cases that
11168 can be handled without an scc insn. */
11169 if ((code == LT && integer_zerop (arg1))
11170 || (! only_cheap && code == GE && integer_zerop (arg1)))
11171 ;
11172 else if (BRANCH_COST >= 0
11173 && ! only_cheap && (code == NE || code == EQ)
11174 && TREE_CODE (type) != REAL_TYPE
11175 && ((abs_optab->handlers[(int) operand_mode].insn_code
11176 != CODE_FOR_nothing)
11177 || (ffs_optab->handlers[(int) operand_mode].insn_code
11178 != CODE_FOR_nothing)))
11179 ;
11180 else
11181 return 0;
11182 }
11183
11184 preexpand_calls (exp);
11185 if (subtarget == 0 || GET_CODE (subtarget) != REG
11186 || GET_MODE (subtarget) != operand_mode
11187 || ! safe_from_p (subtarget, arg1))
11188 subtarget = 0;
11189
11190 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11191 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11192
11193 if (target == 0)
11194 target = gen_reg_rtx (mode);
11195
11196 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11197 because, if the emit_store_flag does anything it will succeed and
11198 OP0 and OP1 will not be used subsequently. */
11199
11200 result = emit_store_flag (target, code,
11201 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11202 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11203 operand_mode, unsignedp, 1);
11204
11205 if (result)
11206 {
11207 if (invert)
11208 result = expand_binop (mode, xor_optab, result, const1_rtx,
11209 result, 0, OPTAB_LIB_WIDEN);
11210 return result;
11211 }
11212
11213 /* If this failed, we have to do this with set/compare/jump/set code. */
11214 if (GET_CODE (target) != REG
11215 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11216 target = gen_reg_rtx (GET_MODE (target));
11217
11218 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11219 result = compare_from_rtx (op0, op1, code, unsignedp,
11220 operand_mode, NULL_RTX, 0);
11221 if (GET_CODE (result) == CONST_INT)
11222 return (((result == const0_rtx && ! invert)
11223 || (result != const0_rtx && invert))
11224 ? const0_rtx : const1_rtx);
11225
11226 label = gen_label_rtx ();
11227 if (bcc_gen_fctn[(int) code] == 0)
11228 abort ();
11229
11230 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11231 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11232 emit_label (label);
11233
11234 return target;
11235 }
11236 \f
11237 /* Generate a tablejump instruction (used for switch statements). */
11238
11239 #ifdef HAVE_tablejump
11240
11241 /* INDEX is the value being switched on, with the lowest value
11242 in the table already subtracted.
11243 MODE is its expected mode (needed if INDEX is constant).
11244 RANGE is the length of the jump table.
11245 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11246
11247 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11248 index value is out of range. */
11249
11250 void
11251 do_tablejump (index, mode, range, table_label, default_label)
11252 rtx index, range, table_label, default_label;
11253 enum machine_mode mode;
11254 {
11255 register rtx temp, vector;
11256
11257 /* Do an unsigned comparison (in the proper mode) between the index
11258 expression and the value which represents the length of the range.
11259 Since we just finished subtracting the lower bound of the range
11260 from the index expression, this comparison allows us to simultaneously
11261 check that the original index expression value is both greater than
11262 or equal to the minimum value of the range and less than or equal to
11263 the maximum value of the range. */
11264
11265 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11266 emit_jump_insn (gen_bgtu (default_label));
11267
11268 /* If index is in range, it must fit in Pmode.
11269 Convert to Pmode so we can index with it. */
11270 if (mode != Pmode)
11271 index = convert_to_mode (Pmode, index, 1);
11272
11273 /* Don't let a MEM slip thru, because then INDEX that comes
11274 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11275 and break_out_memory_refs will go to work on it and mess it up. */
11276 #ifdef PIC_CASE_VECTOR_ADDRESS
11277 if (flag_pic && GET_CODE (index) != REG)
11278 index = copy_to_mode_reg (Pmode, index);
11279 #endif
11280
11281 /* If flag_force_addr were to affect this address
11282 it could interfere with the tricky assumptions made
11283 about addresses that contain label-refs,
11284 which may be valid only very near the tablejump itself. */
11285 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11286 GET_MODE_SIZE, because this indicates how large insns are. The other
11287 uses should all be Pmode, because they are addresses. This code
11288 could fail if addresses and insns are not the same size. */
11289 index = gen_rtx (PLUS, Pmode,
11290 gen_rtx (MULT, Pmode, index,
11291 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11292 gen_rtx (LABEL_REF, Pmode, table_label));
11293 #ifdef PIC_CASE_VECTOR_ADDRESS
11294 if (flag_pic)
11295 index = PIC_CASE_VECTOR_ADDRESS (index);
11296 else
11297 #endif
11298 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11299 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11300 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11301 RTX_UNCHANGING_P (vector) = 1;
11302 convert_move (temp, vector, 0);
11303
11304 emit_jump_insn (gen_tablejump (temp, table_label));
11305
11306 #ifndef CASE_VECTOR_PC_RELATIVE
11307 /* If we are generating PIC code or if the table is PC-relative, the
11308 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11309 if (! flag_pic)
11310 emit_barrier ();
11311 #endif
11312 }
11313
11314 #endif /* HAVE_tablejump */
11315
11316
11317 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11318 to that value is on the top of the stack. The resulting type is TYPE, and
11319 the source declaration is DECL. */
11320
11321 void
11322 bc_load_memory (type, decl)
11323 tree type, decl;
11324 {
11325 enum bytecode_opcode opcode;
11326
11327
11328 /* Bit fields are special. We only know about signed and
11329 unsigned ints, and enums. The latter are treated as
11330 signed integers. */
11331
11332 if (DECL_BIT_FIELD (decl))
11333 if (TREE_CODE (type) == ENUMERAL_TYPE
11334 || TREE_CODE (type) == INTEGER_TYPE)
11335 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11336 else
11337 abort ();
11338 else
11339 /* See corresponding comment in bc_store_memory(). */
11340 if (TYPE_MODE (type) == BLKmode
11341 || TYPE_MODE (type) == VOIDmode)
11342 return;
11343 else
11344 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11345
11346 if (opcode == neverneverland)
11347 abort ();
11348
11349 bc_emit_bytecode (opcode);
11350
11351 #ifdef DEBUG_PRINT_CODE
11352 fputc ('\n', stderr);
11353 #endif
11354 }
11355
11356
11357 /* Store the contents of the second stack slot to the address in the
11358 top stack slot. DECL is the declaration of the destination and is used
11359 to determine whether we're dealing with a bitfield. */
11360
11361 void
11362 bc_store_memory (type, decl)
11363 tree type, decl;
11364 {
11365 enum bytecode_opcode opcode;
11366
11367
11368 if (DECL_BIT_FIELD (decl))
11369 {
11370 if (TREE_CODE (type) == ENUMERAL_TYPE
11371 || TREE_CODE (type) == INTEGER_TYPE)
11372 opcode = sstoreBI;
11373 else
11374 abort ();
11375 }
11376 else
11377 if (TYPE_MODE (type) == BLKmode)
11378 {
11379 /* Copy structure. This expands to a block copy instruction, storeBLK.
11380 In addition to the arguments expected by the other store instructions,
11381 it also expects a type size (SImode) on top of the stack, which is the
11382 structure size in size units (usually bytes). The two first arguments
11383 are already on the stack; so we just put the size on level 1. For some
11384 other languages, the size may be variable, this is why we don't encode
11385 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11386
11387 bc_expand_expr (TYPE_SIZE (type));
11388 opcode = storeBLK;
11389 }
11390 else
11391 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11392
11393 if (opcode == neverneverland)
11394 abort ();
11395
11396 bc_emit_bytecode (opcode);
11397
11398 #ifdef DEBUG_PRINT_CODE
11399 fputc ('\n', stderr);
11400 #endif
11401 }
11402
11403
11404 /* Allocate local stack space sufficient to hold a value of the given
11405 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11406 integral power of 2. A special case is locals of type VOID, which
11407 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11408 remapped into the corresponding attribute of SI. */
11409
11410 rtx
11411 bc_allocate_local (size, alignment)
11412 int size, alignment;
11413 {
11414 rtx retval;
11415 int byte_alignment;
11416
11417 if (size < 0)
11418 abort ();
11419
11420 /* Normalize size and alignment */
11421 if (!size)
11422 size = UNITS_PER_WORD;
11423
11424 if (alignment < BITS_PER_UNIT)
11425 byte_alignment = 1 << (INT_ALIGN - 1);
11426 else
11427 /* Align */
11428 byte_alignment = alignment / BITS_PER_UNIT;
11429
11430 if (local_vars_size & (byte_alignment - 1))
11431 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11432
11433 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11434 local_vars_size += size;
11435
11436 return retval;
11437 }
11438
11439
11440 /* Allocate variable-sized local array. Variable-sized arrays are
11441 actually pointers to the address in memory where they are stored. */
11442
11443 rtx
11444 bc_allocate_variable_array (size)
11445 tree size;
11446 {
11447 rtx retval;
11448 const int ptralign = (1 << (PTR_ALIGN - 1));
11449
11450 /* Align pointer */
11451 if (local_vars_size & ptralign)
11452 local_vars_size += ptralign - (local_vars_size & ptralign);
11453
11454 /* Note down local space needed: pointer to block; also return
11455 dummy rtx */
11456
11457 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11458 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11459 return retval;
11460 }
11461
11462
11463 /* Push the machine address for the given external variable offset. */
11464
11465 void
11466 bc_load_externaddr (externaddr)
11467 rtx externaddr;
11468 {
11469 bc_emit_bytecode (constP);
11470 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11471 BYTECODE_BC_LABEL (externaddr)->offset);
11472
11473 #ifdef DEBUG_PRINT_CODE
11474 fputc ('\n', stderr);
11475 #endif
11476 }
11477
11478
11479 /* Like above, but expects an IDENTIFIER. */
11480
11481 void
11482 bc_load_externaddr_id (id, offset)
11483 tree id;
11484 int offset;
11485 {
11486 if (!IDENTIFIER_POINTER (id))
11487 abort ();
11488
11489 bc_emit_bytecode (constP);
11490 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11491
11492 #ifdef DEBUG_PRINT_CODE
11493 fputc ('\n', stderr);
11494 #endif
11495 }
11496
11497
11498 /* Push the machine address for the given local variable offset. */
11499
11500 void
11501 bc_load_localaddr (localaddr)
11502 rtx localaddr;
11503 {
11504 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11505 }
11506
11507
11508 /* Push the machine address for the given parameter offset.
11509 NOTE: offset is in bits. */
11510
11511 void
11512 bc_load_parmaddr (parmaddr)
11513 rtx parmaddr;
11514 {
11515 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11516 / BITS_PER_UNIT));
11517 }
11518
11519
11520 /* Convert a[i] into *(a + i). */
11521
11522 tree
11523 bc_canonicalize_array_ref (exp)
11524 tree exp;
11525 {
11526 tree type = TREE_TYPE (exp);
11527 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11528 TREE_OPERAND (exp, 0));
11529 tree index = TREE_OPERAND (exp, 1);
11530
11531
11532 /* Convert the integer argument to a type the same size as a pointer
11533 so the multiply won't overflow spuriously. */
11534
11535 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11536 index = convert (type_for_size (POINTER_SIZE, 0), index);
11537
11538 /* The array address isn't volatile even if the array is.
11539 (Of course this isn't terribly relevant since the bytecode
11540 translator treats nearly everything as volatile anyway.) */
11541 TREE_THIS_VOLATILE (array_adr) = 0;
11542
11543 return build1 (INDIRECT_REF, type,
11544 fold (build (PLUS_EXPR,
11545 TYPE_POINTER_TO (type),
11546 array_adr,
11547 fold (build (MULT_EXPR,
11548 TYPE_POINTER_TO (type),
11549 index,
11550 size_in_bytes (type))))));
11551 }
11552
11553
11554 /* Load the address of the component referenced by the given
11555 COMPONENT_REF expression.
11556
11557 Returns innermost lvalue. */
11558
11559 tree
11560 bc_expand_component_address (exp)
11561 tree exp;
11562 {
11563 tree tem, chain;
11564 enum machine_mode mode;
11565 int bitpos = 0;
11566 HOST_WIDE_INT SIval;
11567
11568
11569 tem = TREE_OPERAND (exp, 1);
11570 mode = DECL_MODE (tem);
11571
11572
11573 /* Compute cumulative bit offset for nested component refs
11574 and array refs, and find the ultimate containing object. */
11575
11576 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11577 {
11578 if (TREE_CODE (tem) == COMPONENT_REF)
11579 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11580 else
11581 if (TREE_CODE (tem) == ARRAY_REF
11582 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11583 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11584
11585 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11586 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11587 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11588 else
11589 break;
11590 }
11591
11592 bc_expand_expr (tem);
11593
11594
11595 /* For bitfields also push their offset and size */
11596 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11597 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11598 else
11599 if (SIval = bitpos / BITS_PER_UNIT)
11600 bc_emit_instruction (addconstPSI, SIval);
11601
11602 return (TREE_OPERAND (exp, 1));
11603 }
11604
11605
11606 /* Emit code to push two SI constants */
11607
11608 void
11609 bc_push_offset_and_size (offset, size)
11610 HOST_WIDE_INT offset, size;
11611 {
11612 bc_emit_instruction (constSI, offset);
11613 bc_emit_instruction (constSI, size);
11614 }
11615
11616
11617 /* Emit byte code to push the address of the given lvalue expression to
11618 the stack. If it's a bit field, we also push offset and size info.
11619
11620 Returns innermost component, which allows us to determine not only
11621 its type, but also whether it's a bitfield. */
11622
11623 tree
11624 bc_expand_address (exp)
11625 tree exp;
11626 {
11627 /* Safeguard */
11628 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11629 return (exp);
11630
11631
11632 switch (TREE_CODE (exp))
11633 {
11634 case ARRAY_REF:
11635
11636 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11637
11638 case COMPONENT_REF:
11639
11640 return (bc_expand_component_address (exp));
11641
11642 case INDIRECT_REF:
11643
11644 bc_expand_expr (TREE_OPERAND (exp, 0));
11645
11646 /* For variable-sized types: retrieve pointer. Sometimes the
11647 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11648 also make sure we have an operand, just in case... */
11649
11650 if (TREE_OPERAND (exp, 0)
11651 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11652 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11653 bc_emit_instruction (loadP);
11654
11655 /* If packed, also return offset and size */
11656 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11657
11658 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11659 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11660
11661 return (TREE_OPERAND (exp, 0));
11662
11663 case FUNCTION_DECL:
11664
11665 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11666 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11667 break;
11668
11669 case PARM_DECL:
11670
11671 bc_load_parmaddr (DECL_RTL (exp));
11672
11673 /* For variable-sized types: retrieve pointer */
11674 if (TYPE_SIZE (TREE_TYPE (exp))
11675 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11676 bc_emit_instruction (loadP);
11677
11678 /* If packed, also return offset and size */
11679 if (DECL_BIT_FIELD (exp))
11680 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11681 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11682
11683 break;
11684
11685 case RESULT_DECL:
11686
11687 bc_emit_instruction (returnP);
11688 break;
11689
11690 case VAR_DECL:
11691
11692 #if 0
11693 if (BYTECODE_LABEL (DECL_RTL (exp)))
11694 bc_load_externaddr (DECL_RTL (exp));
11695 #endif
11696
11697 if (DECL_EXTERNAL (exp))
11698 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11699 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11700 else
11701 bc_load_localaddr (DECL_RTL (exp));
11702
11703 /* For variable-sized types: retrieve pointer */
11704 if (TYPE_SIZE (TREE_TYPE (exp))
11705 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11706 bc_emit_instruction (loadP);
11707
11708 /* If packed, also return offset and size */
11709 if (DECL_BIT_FIELD (exp))
11710 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11711 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11712
11713 break;
11714
11715 case STRING_CST:
11716 {
11717 rtx r;
11718
11719 bc_emit_bytecode (constP);
11720 r = output_constant_def (exp);
11721 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11722
11723 #ifdef DEBUG_PRINT_CODE
11724 fputc ('\n', stderr);
11725 #endif
11726 }
11727 break;
11728
11729 default:
11730
11731 abort();
11732 break;
11733 }
11734
11735 /* Most lvalues don't have components. */
11736 return (exp);
11737 }
11738
11739
11740 /* Emit a type code to be used by the runtime support in handling
11741 parameter passing. The type code consists of the machine mode
11742 plus the minimal alignment shifted left 8 bits. */
11743
11744 tree
11745 bc_runtime_type_code (type)
11746 tree type;
11747 {
11748 int val;
11749
11750 switch (TREE_CODE (type))
11751 {
11752 case VOID_TYPE:
11753 case INTEGER_TYPE:
11754 case REAL_TYPE:
11755 case COMPLEX_TYPE:
11756 case ENUMERAL_TYPE:
11757 case POINTER_TYPE:
11758 case RECORD_TYPE:
11759
11760 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11761 break;
11762
11763 case ERROR_MARK:
11764
11765 val = 0;
11766 break;
11767
11768 default:
11769
11770 abort ();
11771 }
11772 return build_int_2 (val, 0);
11773 }
11774
11775
11776 /* Generate constructor label */
11777
11778 char *
11779 bc_gen_constr_label ()
11780 {
11781 static int label_counter;
11782 static char label[20];
11783
11784 sprintf (label, "*LR%d", label_counter++);
11785
11786 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11787 }
11788
11789
11790 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11791 expand the constructor data as static data, and push a pointer to it.
11792 The pointer is put in the pointer table and is retrieved by a constP
11793 bytecode instruction. We then loop and store each constructor member in
11794 the corresponding component. Finally, we return the original pointer on
11795 the stack. */
11796
11797 void
11798 bc_expand_constructor (constr)
11799 tree constr;
11800 {
11801 char *l;
11802 HOST_WIDE_INT ptroffs;
11803 rtx constr_rtx;
11804
11805
11806 /* Literal constructors are handled as constants, whereas
11807 non-literals are evaluated and stored element by element
11808 into the data segment. */
11809
11810 /* Allocate space in proper segment and push pointer to space on stack.
11811 */
11812
11813 l = bc_gen_constr_label ();
11814
11815 if (TREE_CONSTANT (constr))
11816 {
11817 text_section ();
11818
11819 bc_emit_const_labeldef (l);
11820 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11821 }
11822 else
11823 {
11824 data_section ();
11825
11826 bc_emit_data_labeldef (l);
11827 bc_output_data_constructor (constr);
11828 }
11829
11830
11831 /* Add reference to pointer table and recall pointer to stack;
11832 this code is common for both types of constructors: literals
11833 and non-literals. */
11834
11835 ptroffs = bc_define_pointer (l);
11836 bc_emit_instruction (constP, ptroffs);
11837
11838 /* This is all that has to be done if it's a literal. */
11839 if (TREE_CONSTANT (constr))
11840 return;
11841
11842
11843 /* At this point, we have the pointer to the structure on top of the stack.
11844 Generate sequences of store_memory calls for the constructor. */
11845
11846 /* constructor type is structure */
11847 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11848 {
11849 register tree elt;
11850
11851 /* If the constructor has fewer fields than the structure,
11852 clear the whole structure first. */
11853
11854 if (list_length (CONSTRUCTOR_ELTS (constr))
11855 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11856 {
11857 bc_emit_instruction (duplicate);
11858 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11859 bc_emit_instruction (clearBLK);
11860 }
11861
11862 /* Store each element of the constructor into the corresponding
11863 field of TARGET. */
11864
11865 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11866 {
11867 register tree field = TREE_PURPOSE (elt);
11868 register enum machine_mode mode;
11869 int bitsize;
11870 int bitpos;
11871 int unsignedp;
11872
11873 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11874 mode = DECL_MODE (field);
11875 unsignedp = TREE_UNSIGNED (field);
11876
11877 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11878
11879 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11880 /* The alignment of TARGET is
11881 at least what its type requires. */
11882 VOIDmode, 0,
11883 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11884 int_size_in_bytes (TREE_TYPE (constr)));
11885 }
11886 }
11887 else
11888
11889 /* Constructor type is array */
11890 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11891 {
11892 register tree elt;
11893 register int i;
11894 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11895 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11896 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11897 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11898
11899 /* If the constructor has fewer fields than the structure,
11900 clear the whole structure first. */
11901
11902 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11903 {
11904 bc_emit_instruction (duplicate);
11905 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11906 bc_emit_instruction (clearBLK);
11907 }
11908
11909
11910 /* Store each element of the constructor into the corresponding
11911 element of TARGET, determined by counting the elements. */
11912
11913 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11914 elt;
11915 elt = TREE_CHAIN (elt), i++)
11916 {
11917 register enum machine_mode mode;
11918 int bitsize;
11919 int bitpos;
11920 int unsignedp;
11921
11922 mode = TYPE_MODE (elttype);
11923 bitsize = GET_MODE_BITSIZE (mode);
11924 unsignedp = TREE_UNSIGNED (elttype);
11925
11926 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11927 /* * TYPE_SIZE_UNIT (elttype) */ );
11928
11929 bc_store_field (elt, bitsize, bitpos, mode,
11930 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11931 /* The alignment of TARGET is
11932 at least what its type requires. */
11933 VOIDmode, 0,
11934 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11935 int_size_in_bytes (TREE_TYPE (constr)));
11936 }
11937
11938 }
11939 }
11940
11941
11942 /* Store the value of EXP (an expression tree) into member FIELD of
11943 structure at address on stack, which has type TYPE, mode MODE and
11944 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11945 structure.
11946
11947 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11948 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11949
11950 void
11951 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11952 value_mode, unsignedp, align, total_size)
11953 int bitsize, bitpos;
11954 enum machine_mode mode;
11955 tree field, exp, type;
11956 enum machine_mode value_mode;
11957 int unsignedp;
11958 int align;
11959 int total_size;
11960 {
11961
11962 /* Expand expression and copy pointer */
11963 bc_expand_expr (exp);
11964 bc_emit_instruction (over);
11965
11966
11967 /* If the component is a bit field, we cannot use addressing to access
11968 it. Use bit-field techniques to store in it. */
11969
11970 if (DECL_BIT_FIELD (field))
11971 {
11972 bc_store_bit_field (bitpos, bitsize, unsignedp);
11973 return;
11974 }
11975 else
11976 /* Not bit field */
11977 {
11978 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11979
11980 /* Advance pointer to the desired member */
11981 if (offset)
11982 bc_emit_instruction (addconstPSI, offset);
11983
11984 /* Store */
11985 bc_store_memory (type, field);
11986 }
11987 }
11988
11989
11990 /* Store SI/SU in bitfield */
11991
11992 void
11993 bc_store_bit_field (offset, size, unsignedp)
11994 int offset, size, unsignedp;
11995 {
11996 /* Push bitfield offset and size */
11997 bc_push_offset_and_size (offset, size);
11998
11999 /* Store */
12000 bc_emit_instruction (sstoreBI);
12001 }
12002
12003
12004 /* Load SI/SU from bitfield */
12005
12006 void
12007 bc_load_bit_field (offset, size, unsignedp)
12008 int offset, size, unsignedp;
12009 {
12010 /* Push bitfield offset and size */
12011 bc_push_offset_and_size (offset, size);
12012
12013 /* Load: sign-extend if signed, else zero-extend */
12014 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
12015 }
12016
12017
12018 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12019 (adjust stack pointer upwards), negative means add that number of
12020 levels (adjust the stack pointer downwards). Only positive values
12021 normally make sense. */
12022
12023 void
12024 bc_adjust_stack (nlevels)
12025 int nlevels;
12026 {
12027 switch (nlevels)
12028 {
12029 case 0:
12030 break;
12031
12032 case 2:
12033 bc_emit_instruction (drop);
12034
12035 case 1:
12036 bc_emit_instruction (drop);
12037 break;
12038
12039 default:
12040
12041 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12042 stack_depth -= nlevels;
12043 }
12044
12045 #if defined (VALIDATE_STACK_FOR_BC)
12046 VALIDATE_STACK_FOR_BC ();
12047 #endif
12048 }
This page took 0.592403 seconds and 5 git commands to generate.