]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
(expand_builtin, case BUILT_INT_CONSTANT_P): Add missing call to
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "function.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "expr.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "output.h"
32 #include "gvarargs.h"
33 #include "typeclass.h"
34
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40 #ifdef STACK_GROWS_DOWNWARD
41 #ifdef PUSH_ROUNDING
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
43 #endif
44 #endif
45
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
53
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63 int cse_not_expected;
64
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
69
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
73
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
79
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
83
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87 static rtx saveregs_value;
88
89 rtx store_expr ();
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
98
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
104
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
111
112 /* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115 #ifndef MOVE_RATIO
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
117 #define MOVE_RATIO 2
118 #else
119 /* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121 #define MOVE_RATIO 15
122 #endif
123 #endif
124
125 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
126
127 #ifndef SLOW_UNALIGNED_ACCESS
128 #define SLOW_UNALIGNED_ACCESS 0
129 #endif
130 \f
131 /* This is run once per compilation to set up which modes can be used
132 directly in memory. */
133
134 void
135 init_expr_once ()
136 {
137 rtx insn, pat;
138 enum machine_mode mode;
139 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
140
141 start_sequence ();
142 insn = emit_insn (gen_rtx (SET, 0, 0));
143 pat = PATTERN (insn);
144
145 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
146 mode = (enum machine_mode) ((int) mode + 1))
147 {
148 int regno;
149 rtx reg;
150 int num_clobbers;
151
152 direct_load[(int) mode] = direct_store[(int) mode] = 0;
153 PUT_MODE (mem, mode);
154
155 /* Find a register that can be used in this mode, if any. */
156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
157 if (HARD_REGNO_MODE_OK (regno, mode))
158 break;
159
160 if (regno == FIRST_PSEUDO_REGISTER)
161 continue;
162
163 reg = gen_rtx (REG, mode, regno);
164
165 SET_SRC (pat) = mem;
166 SET_DEST (pat) = reg;
167 direct_load[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
168
169 SET_SRC (pat) = reg;
170 SET_DEST (pat) = mem;
171 direct_store[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
172 }
173
174 end_sequence ();
175 }
176
177 /* This is run at the start of compiling a function. */
178
179 void
180 init_expr ()
181 {
182 init_queue ();
183
184 pending_stack_adjust = 0;
185 inhibit_defer_pop = 0;
186 cleanups_this_call = 0;
187 saveregs_value = 0;
188 forced_labels = 0;
189 }
190
191 /* Save all variables describing the current status into the structure *P.
192 This is used before starting a nested function. */
193
194 void
195 save_expr_status (p)
196 struct function *p;
197 {
198 /* Instead of saving the postincrement queue, empty it. */
199 emit_queue ();
200
201 p->pending_stack_adjust = pending_stack_adjust;
202 p->inhibit_defer_pop = inhibit_defer_pop;
203 p->cleanups_this_call = cleanups_this_call;
204 p->saveregs_value = saveregs_value;
205 p->forced_labels = forced_labels;
206
207 pending_stack_adjust = 0;
208 inhibit_defer_pop = 0;
209 cleanups_this_call = 0;
210 saveregs_value = 0;
211 forced_labels = 0;
212 }
213
214 /* Restore all variables describing the current status from the structure *P.
215 This is used after a nested function. */
216
217 void
218 restore_expr_status (p)
219 struct function *p;
220 {
221 pending_stack_adjust = p->pending_stack_adjust;
222 inhibit_defer_pop = p->inhibit_defer_pop;
223 cleanups_this_call = p->cleanups_this_call;
224 saveregs_value = p->saveregs_value;
225 forced_labels = p->forced_labels;
226 }
227 \f
228 /* Manage the queue of increment instructions to be output
229 for POSTINCREMENT_EXPR expressions, etc. */
230
231 static rtx pending_chain;
232
233 /* Queue up to increment (or change) VAR later. BODY says how:
234 BODY should be the same thing you would pass to emit_insn
235 to increment right away. It will go to emit_insn later on.
236
237 The value is a QUEUED expression to be used in place of VAR
238 where you want to guarantee the pre-incrementation value of VAR. */
239
240 static rtx
241 enqueue_insn (var, body)
242 rtx var, body;
243 {
244 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
245 var, NULL_RTX, NULL_RTX, body, pending_chain);
246 return pending_chain;
247 }
248
249 /* Use protect_from_queue to convert a QUEUED expression
250 into something that you can put immediately into an instruction.
251 If the queued incrementation has not happened yet,
252 protect_from_queue returns the variable itself.
253 If the incrementation has happened, protect_from_queue returns a temp
254 that contains a copy of the old value of the variable.
255
256 Any time an rtx which might possibly be a QUEUED is to be put
257 into an instruction, it must be passed through protect_from_queue first.
258 QUEUED expressions are not meaningful in instructions.
259
260 Do not pass a value through protect_from_queue and then hold
261 on to it for a while before putting it in an instruction!
262 If the queue is flushed in between, incorrect code will result. */
263
264 rtx
265 protect_from_queue (x, modify)
266 register rtx x;
267 int modify;
268 {
269 register RTX_CODE code = GET_CODE (x);
270
271 #if 0 /* A QUEUED can hang around after the queue is forced out. */
272 /* Shortcut for most common case. */
273 if (pending_chain == 0)
274 return x;
275 #endif
276
277 if (code != QUEUED)
278 {
279 /* A special hack for read access to (MEM (QUEUED ...))
280 to facilitate use of autoincrement.
281 Make a copy of the contents of the memory location
282 rather than a copy of the address, but not
283 if the value is of mode BLKmode. */
284 if (code == MEM && GET_MODE (x) != BLKmode
285 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
286 {
287 register rtx y = XEXP (x, 0);
288 XEXP (x, 0) = QUEUED_VAR (y);
289 if (QUEUED_INSN (y))
290 {
291 register rtx temp = gen_reg_rtx (GET_MODE (x));
292 emit_insn_before (gen_move_insn (temp, x),
293 QUEUED_INSN (y));
294 return temp;
295 }
296 return x;
297 }
298 /* Otherwise, recursively protect the subexpressions of all
299 the kinds of rtx's that can contain a QUEUED. */
300 if (code == MEM)
301 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
302 else if (code == PLUS || code == MULT)
303 {
304 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
305 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
306 }
307 return x;
308 }
309 /* If the increment has not happened, use the variable itself. */
310 if (QUEUED_INSN (x) == 0)
311 return QUEUED_VAR (x);
312 /* If the increment has happened and a pre-increment copy exists,
313 use that copy. */
314 if (QUEUED_COPY (x) != 0)
315 return QUEUED_COPY (x);
316 /* The increment has happened but we haven't set up a pre-increment copy.
317 Set one up now, and use it. */
318 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
319 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
320 QUEUED_INSN (x));
321 return QUEUED_COPY (x);
322 }
323
324 /* Return nonzero if X contains a QUEUED expression:
325 if it contains anything that will be altered by a queued increment.
326 We handle only combinations of MEM, PLUS, MINUS and MULT operators
327 since memory addresses generally contain only those. */
328
329 static int
330 queued_subexp_p (x)
331 rtx x;
332 {
333 register enum rtx_code code = GET_CODE (x);
334 switch (code)
335 {
336 case QUEUED:
337 return 1;
338 case MEM:
339 return queued_subexp_p (XEXP (x, 0));
340 case MULT:
341 case PLUS:
342 case MINUS:
343 return queued_subexp_p (XEXP (x, 0))
344 || queued_subexp_p (XEXP (x, 1));
345 }
346 return 0;
347 }
348
349 /* Perform all the pending incrementations. */
350
351 void
352 emit_queue ()
353 {
354 register rtx p;
355 while (p = pending_chain)
356 {
357 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
358 pending_chain = QUEUED_NEXT (p);
359 }
360 }
361
362 static void
363 init_queue ()
364 {
365 if (pending_chain)
366 abort ();
367 }
368 \f
369 /* Copy data from FROM to TO, where the machine modes are not the same.
370 Both modes may be integer, or both may be floating.
371 UNSIGNEDP should be nonzero if FROM is an unsigned type.
372 This causes zero-extension instead of sign-extension. */
373
374 void
375 convert_move (to, from, unsignedp)
376 register rtx to, from;
377 int unsignedp;
378 {
379 enum machine_mode to_mode = GET_MODE (to);
380 enum machine_mode from_mode = GET_MODE (from);
381 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
382 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
383 enum insn_code code;
384 rtx libcall;
385
386 /* rtx code for making an equivalent value. */
387 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
388
389 to = protect_from_queue (to, 1);
390 from = protect_from_queue (from, 0);
391
392 if (to_real != from_real)
393 abort ();
394
395 if (to_mode == from_mode
396 || (from_mode == VOIDmode && CONSTANT_P (from)))
397 {
398 emit_move_insn (to, from);
399 return;
400 }
401
402 if (to_real)
403 {
404 #ifdef HAVE_extendsfdf2
405 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
406 {
407 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
408 return;
409 }
410 #endif
411 #ifdef HAVE_extendsfxf2
412 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
413 {
414 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
415 return;
416 }
417 #endif
418 #ifdef HAVE_extendsftf2
419 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
420 {
421 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
422 return;
423 }
424 #endif
425 #ifdef HAVE_extenddfxf2
426 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
427 {
428 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
429 return;
430 }
431 #endif
432 #ifdef HAVE_extenddftf2
433 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
434 {
435 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
436 return;
437 }
438 #endif
439 #ifdef HAVE_truncdfsf2
440 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
441 {
442 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
443 return;
444 }
445 #endif
446 #ifdef HAVE_truncxfsf2
447 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
448 {
449 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
450 return;
451 }
452 #endif
453 #ifdef HAVE_trunctfsf2
454 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
455 {
456 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
457 return;
458 }
459 #endif
460 #ifdef HAVE_truncxfdf2
461 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
462 {
463 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
464 return;
465 }
466 #endif
467 #ifdef HAVE_trunctfdf2
468 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
469 {
470 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
471 return;
472 }
473 #endif
474
475 libcall = (rtx) 0;
476 switch (from_mode)
477 {
478 case SFmode:
479 switch (to_mode)
480 {
481 case DFmode:
482 libcall = extendsfdf2_libfunc;
483 break;
484
485 case XFmode:
486 libcall = extendsfxf2_libfunc;
487 break;
488
489 case TFmode:
490 libcall = extendsftf2_libfunc;
491 break;
492 }
493 break;
494
495 case DFmode:
496 switch (to_mode)
497 {
498 case SFmode:
499 libcall = truncdfsf2_libfunc;
500 break;
501
502 case XFmode:
503 libcall = extenddfxf2_libfunc;
504 break;
505
506 case TFmode:
507 libcall = extenddftf2_libfunc;
508 break;
509 }
510 break;
511
512 case XFmode:
513 switch (to_mode)
514 {
515 case SFmode:
516 libcall = truncxfsf2_libfunc;
517 break;
518
519 case DFmode:
520 libcall = truncxfdf2_libfunc;
521 break;
522 }
523 break;
524
525 case TFmode:
526 switch (to_mode)
527 {
528 case SFmode:
529 libcall = trunctfsf2_libfunc;
530 break;
531
532 case DFmode:
533 libcall = trunctfdf2_libfunc;
534 break;
535 }
536 break;
537 }
538
539 if (libcall == (rtx) 0)
540 /* This conversion is not implemented yet. */
541 abort ();
542
543 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
544 emit_move_insn (to, hard_libcall_value (to_mode));
545 return;
546 }
547
548 /* Now both modes are integers. */
549
550 /* Handle expanding beyond a word. */
551 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
552 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
553 {
554 rtx insns;
555 rtx lowpart;
556 rtx fill_value;
557 rtx lowfrom;
558 int i;
559 enum machine_mode lowpart_mode;
560 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
561
562 /* Try converting directly if the insn is supported. */
563 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
564 != CODE_FOR_nothing)
565 {
566 emit_unop_insn (code, to, from, equiv_code);
567 return;
568 }
569 /* Next, try converting via full word. */
570 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
571 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
572 != CODE_FOR_nothing))
573 {
574 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
575 emit_unop_insn (code, to,
576 gen_lowpart (word_mode, to), equiv_code);
577 return;
578 }
579
580 /* No special multiword conversion insn; do it by hand. */
581 start_sequence ();
582
583 /* Get a copy of FROM widened to a word, if necessary. */
584 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
585 lowpart_mode = word_mode;
586 else
587 lowpart_mode = from_mode;
588
589 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
590
591 lowpart = gen_lowpart (lowpart_mode, to);
592 emit_move_insn (lowpart, lowfrom);
593
594 /* Compute the value to put in each remaining word. */
595 if (unsignedp)
596 fill_value = const0_rtx;
597 else
598 {
599 #ifdef HAVE_slt
600 if (HAVE_slt
601 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
602 && STORE_FLAG_VALUE == -1)
603 {
604 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
605 lowpart_mode, 0, 0);
606 fill_value = gen_reg_rtx (word_mode);
607 emit_insn (gen_slt (fill_value));
608 }
609 else
610 #endif
611 {
612 fill_value
613 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
614 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
615 NULL_RTX, 0);
616 fill_value = convert_to_mode (word_mode, fill_value, 1);
617 }
618 }
619
620 /* Fill the remaining words. */
621 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
622 {
623 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
624 rtx subword = operand_subword (to, index, 1, to_mode);
625
626 if (subword == 0)
627 abort ();
628
629 if (fill_value != subword)
630 emit_move_insn (subword, fill_value);
631 }
632
633 insns = get_insns ();
634 end_sequence ();
635
636 emit_no_conflict_block (insns, to, from, NULL_RTX,
637 gen_rtx (equiv_code, to_mode, from));
638 return;
639 }
640
641 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
642 {
643 convert_move (to, gen_lowpart (word_mode, from), 0);
644 return;
645 }
646
647 /* Handle pointer conversion */ /* SPEE 900220 */
648 if (to_mode == PSImode)
649 {
650 if (from_mode != SImode)
651 from = convert_to_mode (SImode, from, unsignedp);
652
653 #ifdef HAVE_truncsipsi
654 if (HAVE_truncsipsi)
655 {
656 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
657 return;
658 }
659 #endif /* HAVE_truncsipsi */
660 abort ();
661 }
662
663 if (from_mode == PSImode)
664 {
665 if (to_mode != SImode)
666 {
667 from = convert_to_mode (SImode, from, unsignedp);
668 from_mode = SImode;
669 }
670 else
671 {
672 #ifdef HAVE_extendpsisi
673 if (HAVE_extendpsisi)
674 {
675 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
676 return;
677 }
678 #endif /* HAVE_extendpsisi */
679 abort ();
680 }
681 }
682
683 /* Now follow all the conversions between integers
684 no more than a word long. */
685
686 /* For truncation, usually we can just refer to FROM in a narrower mode. */
687 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
688 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
689 GET_MODE_BITSIZE (from_mode))
690 && ((GET_CODE (from) == MEM
691 && ! MEM_VOLATILE_P (from)
692 && direct_load[(int) to_mode]
693 && ! mode_dependent_address_p (XEXP (from, 0)))
694 || GET_CODE (from) == REG
695 || GET_CODE (from) == SUBREG))
696 {
697 emit_move_insn (to, gen_lowpart (to_mode, from));
698 return;
699 }
700
701 /* For truncation, usually we can just refer to FROM in a narrower mode. */
702 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
703 {
704 /* Convert directly if that works. */
705 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
706 != CODE_FOR_nothing)
707 {
708 /* If FROM is a SUBREG, put it into a register. Do this
709 so that we always generate the same set of insns for
710 better cse'ing; if an intermediate assignment occurred,
711 we won't be doing the operation directly on the SUBREG. */
712 if (optimize > 0 && GET_CODE (from) == SUBREG)
713 from = force_reg (from_mode, from);
714 emit_unop_insn (code, to, from, equiv_code);
715 return;
716 }
717 else
718 {
719 enum machine_mode intermediate;
720
721 /* Search for a mode to convert via. */
722 for (intermediate = from_mode; intermediate != VOIDmode;
723 intermediate = GET_MODE_WIDER_MODE (intermediate))
724 if ((can_extend_p (to_mode, intermediate, unsignedp)
725 != CODE_FOR_nothing)
726 && (can_extend_p (intermediate, from_mode, unsignedp)
727 != CODE_FOR_nothing))
728 {
729 convert_move (to, convert_to_mode (intermediate, from,
730 unsignedp), unsignedp);
731 return;
732 }
733
734 /* No suitable intermediate mode. */
735 abort ();
736 }
737 }
738
739 /* Support special truncate insns for certain modes. */
740
741 if (from_mode == DImode && to_mode == SImode)
742 {
743 #ifdef HAVE_truncdisi2
744 if (HAVE_truncdisi2)
745 {
746 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
747 return;
748 }
749 #endif
750 convert_move (to, force_reg (from_mode, from), unsignedp);
751 return;
752 }
753
754 if (from_mode == DImode && to_mode == HImode)
755 {
756 #ifdef HAVE_truncdihi2
757 if (HAVE_truncdihi2)
758 {
759 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
760 return;
761 }
762 #endif
763 convert_move (to, force_reg (from_mode, from), unsignedp);
764 return;
765 }
766
767 if (from_mode == DImode && to_mode == QImode)
768 {
769 #ifdef HAVE_truncdiqi2
770 if (HAVE_truncdiqi2)
771 {
772 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
773 return;
774 }
775 #endif
776 convert_move (to, force_reg (from_mode, from), unsignedp);
777 return;
778 }
779
780 if (from_mode == SImode && to_mode == HImode)
781 {
782 #ifdef HAVE_truncsihi2
783 if (HAVE_truncsihi2)
784 {
785 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
786 return;
787 }
788 #endif
789 convert_move (to, force_reg (from_mode, from), unsignedp);
790 return;
791 }
792
793 if (from_mode == SImode && to_mode == QImode)
794 {
795 #ifdef HAVE_truncsiqi2
796 if (HAVE_truncsiqi2)
797 {
798 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
799 return;
800 }
801 #endif
802 convert_move (to, force_reg (from_mode, from), unsignedp);
803 return;
804 }
805
806 if (from_mode == HImode && to_mode == QImode)
807 {
808 #ifdef HAVE_trunchiqi2
809 if (HAVE_trunchiqi2)
810 {
811 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
812 return;
813 }
814 #endif
815 convert_move (to, force_reg (from_mode, from), unsignedp);
816 return;
817 }
818
819 /* Handle truncation of volatile memrefs, and so on;
820 the things that couldn't be truncated directly,
821 and for which there was no special instruction. */
822 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
823 {
824 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
825 emit_move_insn (to, temp);
826 return;
827 }
828
829 /* Mode combination is not recognized. */
830 abort ();
831 }
832
833 /* Return an rtx for a value that would result
834 from converting X to mode MODE.
835 Both X and MODE may be floating, or both integer.
836 UNSIGNEDP is nonzero if X is an unsigned value.
837 This can be done by referring to a part of X in place
838 or by copying to a new temporary with conversion.
839
840 This function *must not* call protect_from_queue
841 except when putting X into an insn (in which case convert_move does it). */
842
843 rtx
844 convert_to_mode (mode, x, unsignedp)
845 enum machine_mode mode;
846 rtx x;
847 int unsignedp;
848 {
849 register rtx temp;
850
851 if (mode == GET_MODE (x))
852 return x;
853
854 /* There is one case that we must handle specially: If we are converting
855 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
856 we are to interpret the constant as unsigned, gen_lowpart will do
857 the wrong if the constant appears negative. What we want to do is
858 make the high-order word of the constant zero, not all ones. */
859
860 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
861 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
862 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
863 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
864
865 /* We can do this with a gen_lowpart if both desired and current modes
866 are integer, and this is either a constant integer, a register, or a
867 non-volatile MEM. Except for the constant case, we must be narrowing
868 the operand. */
869
870 if (GET_CODE (x) == CONST_INT
871 || (GET_MODE_CLASS (mode) == MODE_INT
872 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
873 && (GET_CODE (x) == CONST_DOUBLE
874 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
875 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
876 && direct_load[(int) mode]
877 || GET_CODE (x) == REG)))))
878 return gen_lowpart (mode, x);
879
880 temp = gen_reg_rtx (mode);
881 convert_move (temp, x, unsignedp);
882 return temp;
883 }
884 \f
885 /* Generate several move instructions to copy LEN bytes
886 from block FROM to block TO. (These are MEM rtx's with BLKmode).
887 The caller must pass FROM and TO
888 through protect_from_queue before calling.
889 ALIGN (in bytes) is maximum alignment we can assume. */
890
891 struct move_by_pieces
892 {
893 rtx to;
894 rtx to_addr;
895 int autinc_to;
896 int explicit_inc_to;
897 rtx from;
898 rtx from_addr;
899 int autinc_from;
900 int explicit_inc_from;
901 int len;
902 int offset;
903 int reverse;
904 };
905
906 static void move_by_pieces_1 ();
907 static int move_by_pieces_ninsns ();
908
909 static void
910 move_by_pieces (to, from, len, align)
911 rtx to, from;
912 int len, align;
913 {
914 struct move_by_pieces data;
915 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
916 int max_size = MOVE_MAX + 1;
917
918 data.offset = 0;
919 data.to_addr = to_addr;
920 data.from_addr = from_addr;
921 data.to = to;
922 data.from = from;
923 data.autinc_to
924 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
925 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
926 data.autinc_from
927 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
928 || GET_CODE (from_addr) == POST_INC
929 || GET_CODE (from_addr) == POST_DEC);
930
931 data.explicit_inc_from = 0;
932 data.explicit_inc_to = 0;
933 data.reverse
934 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
935 if (data.reverse) data.offset = len;
936 data.len = len;
937
938 /* If copying requires more than two move insns,
939 copy addresses to registers (to make displacements shorter)
940 and use post-increment if available. */
941 if (!(data.autinc_from && data.autinc_to)
942 && move_by_pieces_ninsns (len, align) > 2)
943 {
944 #ifdef HAVE_PRE_DECREMENT
945 if (data.reverse && ! data.autinc_from)
946 {
947 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
948 data.autinc_from = 1;
949 data.explicit_inc_from = -1;
950 }
951 #endif
952 #ifdef HAVE_POST_INCREMENT
953 if (! data.autinc_from)
954 {
955 data.from_addr = copy_addr_to_reg (from_addr);
956 data.autinc_from = 1;
957 data.explicit_inc_from = 1;
958 }
959 #endif
960 if (!data.autinc_from && CONSTANT_P (from_addr))
961 data.from_addr = copy_addr_to_reg (from_addr);
962 #ifdef HAVE_PRE_DECREMENT
963 if (data.reverse && ! data.autinc_to)
964 {
965 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
966 data.autinc_to = 1;
967 data.explicit_inc_to = -1;
968 }
969 #endif
970 #ifdef HAVE_POST_INCREMENT
971 if (! data.reverse && ! data.autinc_to)
972 {
973 data.to_addr = copy_addr_to_reg (to_addr);
974 data.autinc_to = 1;
975 data.explicit_inc_to = 1;
976 }
977 #endif
978 if (!data.autinc_to && CONSTANT_P (to_addr))
979 data.to_addr = copy_addr_to_reg (to_addr);
980 }
981
982 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
983 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
984 align = MOVE_MAX;
985
986 /* First move what we can in the largest integer mode, then go to
987 successively smaller modes. */
988
989 while (max_size > 1)
990 {
991 enum machine_mode mode = VOIDmode, tmode;
992 enum insn_code icode;
993
994 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
995 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
996 if (GET_MODE_SIZE (tmode) < max_size)
997 mode = tmode;
998
999 if (mode == VOIDmode)
1000 break;
1001
1002 icode = mov_optab->handlers[(int) mode].insn_code;
1003 if (icode != CODE_FOR_nothing
1004 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1005 GET_MODE_SIZE (mode)))
1006 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1007
1008 max_size = GET_MODE_SIZE (mode);
1009 }
1010
1011 /* The code above should have handled everything. */
1012 if (data.len != 0)
1013 abort ();
1014 }
1015
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bytes) is maximum alignment we can assume. */
1018
1019 static int
1020 move_by_pieces_ninsns (l, align)
1021 unsigned int l;
1022 int align;
1023 {
1024 register int n_insns = 0;
1025 int max_size = MOVE_MAX + 1;
1026
1027 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1028 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1029 align = MOVE_MAX;
1030
1031 while (max_size > 1)
1032 {
1033 enum machine_mode mode = VOIDmode, tmode;
1034 enum insn_code icode;
1035
1036 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1037 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1038 if (GET_MODE_SIZE (tmode) < max_size)
1039 mode = tmode;
1040
1041 if (mode == VOIDmode)
1042 break;
1043
1044 icode = mov_optab->handlers[(int) mode].insn_code;
1045 if (icode != CODE_FOR_nothing
1046 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1047 GET_MODE_SIZE (mode)))
1048 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1049
1050 max_size = GET_MODE_SIZE (mode);
1051 }
1052
1053 return n_insns;
1054 }
1055
1056 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1057 with move instructions for mode MODE. GENFUN is the gen_... function
1058 to make a move insn for that mode. DATA has all the other info. */
1059
1060 static void
1061 move_by_pieces_1 (genfun, mode, data)
1062 rtx (*genfun) ();
1063 enum machine_mode mode;
1064 struct move_by_pieces *data;
1065 {
1066 register int size = GET_MODE_SIZE (mode);
1067 register rtx to1, from1;
1068
1069 while (data->len >= size)
1070 {
1071 if (data->reverse) data->offset -= size;
1072
1073 to1 = (data->autinc_to
1074 ? gen_rtx (MEM, mode, data->to_addr)
1075 : change_address (data->to, mode,
1076 plus_constant (data->to_addr, data->offset)));
1077 from1 =
1078 (data->autinc_from
1079 ? gen_rtx (MEM, mode, data->from_addr)
1080 : change_address (data->from, mode,
1081 plus_constant (data->from_addr, data->offset)));
1082
1083 #ifdef HAVE_PRE_DECREMENT
1084 if (data->explicit_inc_to < 0)
1085 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1086 if (data->explicit_inc_from < 0)
1087 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1088 #endif
1089
1090 emit_insn ((*genfun) (to1, from1));
1091 #ifdef HAVE_POST_INCREMENT
1092 if (data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1096 #endif
1097
1098 if (! data->reverse) data->offset += size;
1099
1100 data->len -= size;
1101 }
1102 }
1103 \f
1104 /* Emit code to move a block Y to a block X.
1105 This may be done with string-move instructions,
1106 with multiple scalar move instructions, or with a library call.
1107
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1109 with mode BLKmode.
1110 SIZE is an rtx that says how long they are.
1111 ALIGN is the maximum alignment we can assume they have,
1112 measured in bytes. */
1113
1114 void
1115 emit_block_move (x, y, size, align)
1116 rtx x, y;
1117 rtx size;
1118 int align;
1119 {
1120 if (GET_MODE (x) != BLKmode)
1121 abort ();
1122
1123 if (GET_MODE (y) != BLKmode)
1124 abort ();
1125
1126 x = protect_from_queue (x, 1);
1127 y = protect_from_queue (y, 0);
1128 size = protect_from_queue (size, 0);
1129
1130 if (GET_CODE (x) != MEM)
1131 abort ();
1132 if (GET_CODE (y) != MEM)
1133 abort ();
1134 if (size == 0)
1135 abort ();
1136
1137 if (GET_CODE (size) == CONST_INT
1138 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1139 move_by_pieces (x, y, INTVAL (size), align);
1140 else
1141 {
1142 /* Try the most limited insn first, because there's no point
1143 including more than one in the machine description unless
1144 the more limited one has some advantage. */
1145 #ifdef HAVE_movstrqi
1146 if (HAVE_movstrqi
1147 && GET_CODE (size) == CONST_INT
1148 && ((unsigned) INTVAL (size)
1149 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1150 {
1151 rtx insn = gen_movstrqi (x, y, size, GEN_INT (align));
1152 if (insn)
1153 {
1154 emit_insn (insn);
1155 return;
1156 }
1157 }
1158 #endif
1159 #ifdef HAVE_movstrhi
1160 if (HAVE_movstrhi
1161 && GET_CODE (size) == CONST_INT
1162 && ((unsigned) INTVAL (size)
1163 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1164 {
1165 rtx insn = gen_movstrhi (x, y, size, GEN_INT (align));
1166 if (insn)
1167 {
1168 emit_insn (insn);
1169 return;
1170 }
1171 }
1172 #endif
1173 #ifdef HAVE_movstrsi
1174 if (HAVE_movstrsi)
1175 {
1176 rtx insn = gen_movstrsi (x, y, size, GEN_INT (align));
1177 if (insn)
1178 {
1179 emit_insn (insn);
1180 return;
1181 }
1182 }
1183 #endif
1184 #ifdef HAVE_movstrdi
1185 if (HAVE_movstrdi)
1186 {
1187 rtx insn = gen_movstrdi (x, y, size, GEN_INT (align));
1188 if (insn)
1189 {
1190 emit_insn (insn);
1191 return;
1192 }
1193 }
1194 #endif
1195
1196 #ifdef TARGET_MEM_FUNCTIONS
1197 emit_library_call (memcpy_libfunc, 1,
1198 VOIDmode, 3, XEXP (x, 0), Pmode,
1199 XEXP (y, 0), Pmode,
1200 convert_to_mode (Pmode, size, 1), Pmode);
1201 #else
1202 emit_library_call (bcopy_libfunc, 1,
1203 VOIDmode, 3, XEXP (y, 0), Pmode,
1204 XEXP (x, 0), Pmode,
1205 convert_to_mode (Pmode, size, 1), Pmode);
1206 #endif
1207 }
1208 }
1209 \f
1210 /* Copy all or part of a value X into registers starting at REGNO.
1211 The number of registers to be filled is NREGS. */
1212
1213 void
1214 move_block_to_reg (regno, x, nregs, mode)
1215 int regno;
1216 rtx x;
1217 int nregs;
1218 enum machine_mode mode;
1219 {
1220 int i;
1221 rtx pat, last;
1222
1223 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1224 x = validize_mem (force_const_mem (mode, x));
1225
1226 /* See if the machine can do this with a load multiple insn. */
1227 #ifdef HAVE_load_multiple
1228 last = get_last_insn ();
1229 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1230 GEN_INT (nregs));
1231 if (pat)
1232 {
1233 emit_insn (pat);
1234 return;
1235 }
1236 else
1237 delete_insns_since (last);
1238 #endif
1239
1240 for (i = 0; i < nregs; i++)
1241 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1242 operand_subword_force (x, i, mode));
1243 }
1244
1245 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1246 The number of registers to be filled is NREGS. */
1247
1248 void
1249 move_block_from_reg (regno, x, nregs)
1250 int regno;
1251 rtx x;
1252 int nregs;
1253 {
1254 int i;
1255 rtx pat, last;
1256
1257 /* See if the machine can do this with a store multiple insn. */
1258 #ifdef HAVE_store_multiple
1259 last = get_last_insn ();
1260 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1261 GEN_INT (nregs));
1262 if (pat)
1263 {
1264 emit_insn (pat);
1265 return;
1266 }
1267 else
1268 delete_insns_since (last);
1269 #endif
1270
1271 for (i = 0; i < nregs; i++)
1272 {
1273 rtx tem = operand_subword (x, i, 1, BLKmode);
1274
1275 if (tem == 0)
1276 abort ();
1277
1278 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1279 }
1280 }
1281
1282 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1283
1284 void
1285 use_regs (regno, nregs)
1286 int regno;
1287 int nregs;
1288 {
1289 int i;
1290
1291 for (i = 0; i < nregs; i++)
1292 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1293 }
1294 \f
1295 /* Write zeros through the storage of OBJECT.
1296 If OBJECT has BLKmode, SIZE is its length in bytes. */
1297
1298 void
1299 clear_storage (object, size)
1300 rtx object;
1301 int size;
1302 {
1303 if (GET_MODE (object) == BLKmode)
1304 {
1305 #ifdef TARGET_MEM_FUNCTIONS
1306 emit_library_call (memset_libfunc, 1,
1307 VOIDmode, 3,
1308 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1309 GEN_INT (size), Pmode);
1310 #else
1311 emit_library_call (bzero_libfunc, 1,
1312 VOIDmode, 2,
1313 XEXP (object, 0), Pmode,
1314 GEN_INT (size), Pmode);
1315 #endif
1316 }
1317 else
1318 emit_move_insn (object, const0_rtx);
1319 }
1320
1321 /* Generate code to copy Y into X.
1322 Both Y and X must have the same mode, except that
1323 Y can be a constant with VOIDmode.
1324 This mode cannot be BLKmode; use emit_block_move for that.
1325
1326 Return the last instruction emitted. */
1327
1328 rtx
1329 emit_move_insn (x, y)
1330 rtx x, y;
1331 {
1332 enum machine_mode mode = GET_MODE (x);
1333 int i;
1334
1335 x = protect_from_queue (x, 1);
1336 y = protect_from_queue (y, 0);
1337
1338 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1339 abort ();
1340
1341 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1342 y = force_const_mem (mode, y);
1343
1344 /* If X or Y are memory references, verify that their addresses are valid
1345 for the machine. */
1346 if (GET_CODE (x) == MEM
1347 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1348 && ! push_operand (x, GET_MODE (x)))
1349 || (flag_force_addr
1350 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1351 x = change_address (x, VOIDmode, XEXP (x, 0));
1352
1353 if (GET_CODE (y) == MEM
1354 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1355 || (flag_force_addr
1356 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1357 y = change_address (y, VOIDmode, XEXP (y, 0));
1358
1359 if (mode == BLKmode)
1360 abort ();
1361
1362 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1363 return
1364 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1365
1366 /* This will handle any multi-word mode that lacks a move_insn pattern.
1367 However, you will get better code if you define such patterns,
1368 even if they must turn into multiple assembler instructions. */
1369 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1370 {
1371 rtx last_insn = 0;
1372
1373 for (i = 0;
1374 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1375 i++)
1376 {
1377 rtx xpart = operand_subword (x, i, 1, mode);
1378 rtx ypart = operand_subword (y, i, 1, mode);
1379
1380 /* If we can't get a part of Y, put Y into memory if it is a
1381 constant. Otherwise, force it into a register. If we still
1382 can't get a part of Y, abort. */
1383 if (ypart == 0 && CONSTANT_P (y))
1384 {
1385 y = force_const_mem (mode, y);
1386 ypart = operand_subword (y, i, 1, mode);
1387 }
1388 else if (ypart == 0)
1389 ypart = operand_subword_force (y, i, mode);
1390
1391 if (xpart == 0 || ypart == 0)
1392 abort ();
1393
1394 last_insn = emit_move_insn (xpart, ypart);
1395 }
1396 return last_insn;
1397 }
1398 else
1399 abort ();
1400 }
1401 \f
1402 /* Pushing data onto the stack. */
1403
1404 /* Push a block of length SIZE (perhaps variable)
1405 and return an rtx to address the beginning of the block.
1406 Note that it is not possible for the value returned to be a QUEUED.
1407 The value may be virtual_outgoing_args_rtx.
1408
1409 EXTRA is the number of bytes of padding to push in addition to SIZE.
1410 BELOW nonzero means this padding comes at low addresses;
1411 otherwise, the padding comes at high addresses. */
1412
1413 rtx
1414 push_block (size, extra, below)
1415 rtx size;
1416 int extra, below;
1417 {
1418 register rtx temp;
1419 if (CONSTANT_P (size))
1420 anti_adjust_stack (plus_constant (size, extra));
1421 else if (GET_CODE (size) == REG && extra == 0)
1422 anti_adjust_stack (size);
1423 else
1424 {
1425 rtx temp = copy_to_mode_reg (Pmode, size);
1426 if (extra != 0)
1427 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1428 temp, 0, OPTAB_LIB_WIDEN);
1429 anti_adjust_stack (temp);
1430 }
1431
1432 #ifdef STACK_GROWS_DOWNWARD
1433 temp = virtual_outgoing_args_rtx;
1434 if (extra != 0 && below)
1435 temp = plus_constant (temp, extra);
1436 #else
1437 if (GET_CODE (size) == CONST_INT)
1438 temp = plus_constant (virtual_outgoing_args_rtx,
1439 - INTVAL (size) - (below ? 0 : extra));
1440 else if (extra != 0 && !below)
1441 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1442 negate_rtx (Pmode, plus_constant (size, extra)));
1443 else
1444 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1445 negate_rtx (Pmode, size));
1446 #endif
1447
1448 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1449 }
1450
1451 static rtx
1452 gen_push_operand ()
1453 {
1454 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1455 }
1456
1457 /* Generate code to push X onto the stack, assuming it has mode MODE and
1458 type TYPE.
1459 MODE is redundant except when X is a CONST_INT (since they don't
1460 carry mode info).
1461 SIZE is an rtx for the size of data to be copied (in bytes),
1462 needed only if X is BLKmode.
1463
1464 ALIGN (in bytes) is maximum alignment we can assume.
1465
1466 If PARTIAL is nonzero, then copy that many of the first words
1467 of X into registers starting with REG, and push the rest of X.
1468 The amount of space pushed is decreased by PARTIAL words,
1469 rounded *down* to a multiple of PARM_BOUNDARY.
1470 REG must be a hard register in this case.
1471
1472 EXTRA is the amount in bytes of extra space to leave next to this arg.
1473 This is ignored if an argument block has already been allocated.
1474
1475 On a machine that lacks real push insns, ARGS_ADDR is the address of
1476 the bottom of the argument block for this call. We use indexing off there
1477 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1478 argument block has not been preallocated.
1479
1480 ARGS_SO_FAR is the size of args previously pushed for this call. */
1481
1482 void
1483 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1484 args_addr, args_so_far)
1485 register rtx x;
1486 enum machine_mode mode;
1487 tree type;
1488 rtx size;
1489 int align;
1490 int partial;
1491 rtx reg;
1492 int extra;
1493 rtx args_addr;
1494 rtx args_so_far;
1495 {
1496 rtx xinner;
1497 enum direction stack_direction
1498 #ifdef STACK_GROWS_DOWNWARD
1499 = downward;
1500 #else
1501 = upward;
1502 #endif
1503
1504 /* Decide where to pad the argument: `downward' for below,
1505 `upward' for above, or `none' for don't pad it.
1506 Default is below for small data on big-endian machines; else above. */
1507 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1508
1509 /* Invert direction if stack is post-update. */
1510 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1511 if (where_pad != none)
1512 where_pad = (where_pad == downward ? upward : downward);
1513
1514 xinner = x = protect_from_queue (x, 0);
1515
1516 if (mode == BLKmode)
1517 {
1518 /* Copy a block into the stack, entirely or partially. */
1519
1520 register rtx temp;
1521 int used = partial * UNITS_PER_WORD;
1522 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1523 int skip;
1524
1525 if (size == 0)
1526 abort ();
1527
1528 used -= offset;
1529
1530 /* USED is now the # of bytes we need not copy to the stack
1531 because registers will take care of them. */
1532
1533 if (partial != 0)
1534 xinner = change_address (xinner, BLKmode,
1535 plus_constant (XEXP (xinner, 0), used));
1536
1537 /* If the partial register-part of the arg counts in its stack size,
1538 skip the part of stack space corresponding to the registers.
1539 Otherwise, start copying to the beginning of the stack space,
1540 by setting SKIP to 0. */
1541 #ifndef REG_PARM_STACK_SPACE
1542 skip = 0;
1543 #else
1544 skip = used;
1545 #endif
1546
1547 #ifdef PUSH_ROUNDING
1548 /* Do it with several push insns if that doesn't take lots of insns
1549 and if there is no difficulty with push insns that skip bytes
1550 on the stack for alignment purposes. */
1551 if (args_addr == 0
1552 && GET_CODE (size) == CONST_INT
1553 && skip == 0
1554 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1555 < MOVE_RATIO)
1556 /* Here we avoid the case of a structure whose weak alignment
1557 forces many pushes of a small amount of data,
1558 and such small pushes do rounding that causes trouble. */
1559 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1560 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1561 || PUSH_ROUNDING (align) == align)
1562 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1563 {
1564 /* Push padding now if padding above and stack grows down,
1565 or if padding below and stack grows up.
1566 But if space already allocated, this has already been done. */
1567 if (extra && args_addr == 0
1568 && where_pad != none && where_pad != stack_direction)
1569 anti_adjust_stack (GEN_INT (extra));
1570
1571 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1572 INTVAL (size) - used, align);
1573 }
1574 else
1575 #endif /* PUSH_ROUNDING */
1576 {
1577 /* Otherwise make space on the stack and copy the data
1578 to the address of that space. */
1579
1580 /* Deduct words put into registers from the size we must copy. */
1581 if (partial != 0)
1582 {
1583 if (GET_CODE (size) == CONST_INT)
1584 size = GEN_INT (INTVAL (size) - used);
1585 else
1586 size = expand_binop (GET_MODE (size), sub_optab, size,
1587 GEN_INT (used), NULL_RTX, 0,
1588 OPTAB_LIB_WIDEN);
1589 }
1590
1591 /* Get the address of the stack space.
1592 In this case, we do not deal with EXTRA separately.
1593 A single stack adjust will do. */
1594 if (! args_addr)
1595 {
1596 temp = push_block (size, extra, where_pad == downward);
1597 extra = 0;
1598 }
1599 else if (GET_CODE (args_so_far) == CONST_INT)
1600 temp = memory_address (BLKmode,
1601 plus_constant (args_addr,
1602 skip + INTVAL (args_so_far)));
1603 else
1604 temp = memory_address (BLKmode,
1605 plus_constant (gen_rtx (PLUS, Pmode,
1606 args_addr, args_so_far),
1607 skip));
1608
1609 /* TEMP is the address of the block. Copy the data there. */
1610 if (GET_CODE (size) == CONST_INT
1611 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1612 < MOVE_RATIO))
1613 {
1614 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1615 INTVAL (size), align);
1616 goto ret;
1617 }
1618 /* Try the most limited insn first, because there's no point
1619 including more than one in the machine description unless
1620 the more limited one has some advantage. */
1621 #ifdef HAVE_movstrqi
1622 if (HAVE_movstrqi
1623 && GET_CODE (size) == CONST_INT
1624 && ((unsigned) INTVAL (size)
1625 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1626 {
1627 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1628 xinner, size, GEN_INT (align)));
1629 goto ret;
1630 }
1631 #endif
1632 #ifdef HAVE_movstrhi
1633 if (HAVE_movstrhi
1634 && GET_CODE (size) == CONST_INT
1635 && ((unsigned) INTVAL (size)
1636 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1637 {
1638 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1639 xinner, size, GEN_INT (align)));
1640 goto ret;
1641 }
1642 #endif
1643 #ifdef HAVE_movstrsi
1644 if (HAVE_movstrsi)
1645 {
1646 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1647 xinner, size, GEN_INT (align)));
1648 goto ret;
1649 }
1650 #endif
1651 #ifdef HAVE_movstrdi
1652 if (HAVE_movstrdi)
1653 {
1654 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1655 xinner, size, GEN_INT (align)));
1656 goto ret;
1657 }
1658 #endif
1659
1660 #ifndef ACCUMULATE_OUTGOING_ARGS
1661 /* If the source is referenced relative to the stack pointer,
1662 copy it to another register to stabilize it. We do not need
1663 to do this if we know that we won't be changing sp. */
1664
1665 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1666 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1667 temp = copy_to_reg (temp);
1668 #endif
1669
1670 /* Make inhibit_defer_pop nonzero around the library call
1671 to force it to pop the bcopy-arguments right away. */
1672 NO_DEFER_POP;
1673 #ifdef TARGET_MEM_FUNCTIONS
1674 emit_library_call (memcpy_libfunc, 1,
1675 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1676 size, Pmode);
1677 #else
1678 emit_library_call (bcopy_libfunc, 1,
1679 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1680 size, Pmode);
1681 #endif
1682 OK_DEFER_POP;
1683 }
1684 }
1685 else if (partial > 0)
1686 {
1687 /* Scalar partly in registers. */
1688
1689 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1690 int i;
1691 int not_stack;
1692 /* # words of start of argument
1693 that we must make space for but need not store. */
1694 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1695 int args_offset = INTVAL (args_so_far);
1696 int skip;
1697
1698 /* Push padding now if padding above and stack grows down,
1699 or if padding below and stack grows up.
1700 But if space already allocated, this has already been done. */
1701 if (extra && args_addr == 0
1702 && where_pad != none && where_pad != stack_direction)
1703 anti_adjust_stack (GEN_INT (extra));
1704
1705 /* If we make space by pushing it, we might as well push
1706 the real data. Otherwise, we can leave OFFSET nonzero
1707 and leave the space uninitialized. */
1708 if (args_addr == 0)
1709 offset = 0;
1710
1711 /* Now NOT_STACK gets the number of words that we don't need to
1712 allocate on the stack. */
1713 not_stack = partial - offset;
1714
1715 /* If the partial register-part of the arg counts in its stack size,
1716 skip the part of stack space corresponding to the registers.
1717 Otherwise, start copying to the beginning of the stack space,
1718 by setting SKIP to 0. */
1719 #ifndef REG_PARM_STACK_SPACE
1720 skip = 0;
1721 #else
1722 skip = not_stack;
1723 #endif
1724
1725 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1726 x = validize_mem (force_const_mem (mode, x));
1727
1728 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1729 SUBREGs of such registers are not allowed. */
1730 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1731 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1732 x = copy_to_reg (x);
1733
1734 /* Loop over all the words allocated on the stack for this arg. */
1735 /* We can do it by words, because any scalar bigger than a word
1736 has a size a multiple of a word. */
1737 #ifndef PUSH_ARGS_REVERSED
1738 for (i = not_stack; i < size; i++)
1739 #else
1740 for (i = size - 1; i >= not_stack; i--)
1741 #endif
1742 if (i >= not_stack + offset)
1743 emit_push_insn (operand_subword_force (x, i, mode),
1744 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1745 0, args_addr,
1746 GEN_INT (args_offset + ((i - not_stack + skip)
1747 * UNITS_PER_WORD)));
1748 }
1749 else
1750 {
1751 rtx addr;
1752
1753 /* Push padding now if padding above and stack grows down,
1754 or if padding below and stack grows up.
1755 But if space already allocated, this has already been done. */
1756 if (extra && args_addr == 0
1757 && where_pad != none && where_pad != stack_direction)
1758 anti_adjust_stack (GEN_INT (extra));
1759
1760 #ifdef PUSH_ROUNDING
1761 if (args_addr == 0)
1762 addr = gen_push_operand ();
1763 else
1764 #endif
1765 if (GET_CODE (args_so_far) == CONST_INT)
1766 addr
1767 = memory_address (mode,
1768 plus_constant (args_addr, INTVAL (args_so_far)));
1769 else
1770 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1771 args_so_far));
1772
1773 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1774 }
1775
1776 ret:
1777 /* If part should go in registers, copy that part
1778 into the appropriate registers. Do this now, at the end,
1779 since mem-to-mem copies above may do function calls. */
1780 if (partial > 0)
1781 move_block_to_reg (REGNO (reg), x, partial, mode);
1782
1783 if (extra && args_addr == 0 && where_pad == stack_direction)
1784 anti_adjust_stack (GEN_INT (extra));
1785 }
1786 \f
1787 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1788 (emitting the queue unless NO_QUEUE is nonzero),
1789 for a value of mode OUTMODE,
1790 with NARGS different arguments, passed as alternating rtx values
1791 and machine_modes to convert them to.
1792 The rtx values should have been passed through protect_from_queue already.
1793
1794 NO_QUEUE will be true if and only if the library call is a `const' call
1795 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1796 to the variable is_const in expand_call. */
1797
1798 void
1799 emit_library_call (va_alist)
1800 va_dcl
1801 {
1802 va_list p;
1803 struct args_size args_size;
1804 register int argnum;
1805 enum machine_mode outmode;
1806 int nargs;
1807 rtx fun;
1808 rtx orgfun;
1809 int inc;
1810 int count;
1811 rtx argblock = 0;
1812 CUMULATIVE_ARGS args_so_far;
1813 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1814 struct args_size offset; struct args_size size; };
1815 struct arg *argvec;
1816 int old_inhibit_defer_pop = inhibit_defer_pop;
1817 int no_queue = 0;
1818 rtx use_insns;
1819
1820 va_start (p);
1821 orgfun = fun = va_arg (p, rtx);
1822 no_queue = va_arg (p, int);
1823 outmode = va_arg (p, enum machine_mode);
1824 nargs = va_arg (p, int);
1825
1826 /* Copy all the libcall-arguments out of the varargs data
1827 and into a vector ARGVEC.
1828
1829 Compute how to pass each argument. We only support a very small subset
1830 of the full argument passing conventions to limit complexity here since
1831 library functions shouldn't have many args. */
1832
1833 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1834
1835 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1836
1837 args_size.constant = 0;
1838 args_size.var = 0;
1839
1840 for (count = 0; count < nargs; count++)
1841 {
1842 rtx val = va_arg (p, rtx);
1843 enum machine_mode mode = va_arg (p, enum machine_mode);
1844
1845 /* We cannot convert the arg value to the mode the library wants here;
1846 must do it earlier where we know the signedness of the arg. */
1847 if (mode == BLKmode
1848 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1849 abort ();
1850
1851 /* On some machines, there's no way to pass a float to a library fcn.
1852 Pass it as a double instead. */
1853 #ifdef LIBGCC_NEEDS_DOUBLE
1854 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1855 val = convert_to_mode (DFmode, val), mode = DFmode;
1856 #endif
1857
1858 /* There's no need to call protect_from_queue, because
1859 either emit_move_insn or emit_push_insn will do that. */
1860
1861 /* Make sure it is a reasonable operand for a move or push insn. */
1862 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1863 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1864 val = force_operand (val, NULL_RTX);
1865
1866 argvec[count].value = val;
1867 argvec[count].mode = mode;
1868
1869 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1870 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1871 abort ();
1872 #endif
1873
1874 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1875 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1876 abort ();
1877 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1878 argvec[count].partial
1879 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
1880 #else
1881 argvec[count].partial = 0;
1882 #endif
1883
1884 locate_and_pad_parm (mode, NULL_TREE,
1885 argvec[count].reg && argvec[count].partial == 0,
1886 NULL_TREE, &args_size, &argvec[count].offset,
1887 &argvec[count].size);
1888
1889 if (argvec[count].size.var)
1890 abort ();
1891
1892 #ifndef REG_PARM_STACK_SPACE
1893 if (argvec[count].partial)
1894 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1895 #endif
1896
1897 if (argvec[count].reg == 0 || argvec[count].partial != 0
1898 #ifdef REG_PARM_STACK_SPACE
1899 || 1
1900 #endif
1901 )
1902 args_size.constant += argvec[count].size.constant;
1903
1904 #ifdef ACCUMULATE_OUTGOING_ARGS
1905 /* If this arg is actually passed on the stack, it might be
1906 clobbering something we already put there (this library call might
1907 be inside the evaluation of an argument to a function whose call
1908 requires the stack). This will only occur when the library call
1909 has sufficient args to run out of argument registers. Abort in
1910 this case; if this ever occurs, code must be added to save and
1911 restore the arg slot. */
1912
1913 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1914 abort ();
1915 #endif
1916
1917 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1918 }
1919 va_end (p);
1920
1921 /* If this machine requires an external definition for library
1922 functions, write one out. */
1923 assemble_external_libcall (fun);
1924
1925 #ifdef STACK_BOUNDARY
1926 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1927 / STACK_BYTES) * STACK_BYTES);
1928 #endif
1929
1930 #ifdef REG_PARM_STACK_SPACE
1931 args_size.constant = MAX (args_size.constant,
1932 REG_PARM_STACK_SPACE ((tree) 0));
1933 #endif
1934
1935 #ifdef ACCUMULATE_OUTGOING_ARGS
1936 if (args_size.constant > current_function_outgoing_args_size)
1937 current_function_outgoing_args_size = args_size.constant;
1938 args_size.constant = 0;
1939 #endif
1940
1941 #ifndef PUSH_ROUNDING
1942 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
1943 #endif
1944
1945 #ifdef PUSH_ARGS_REVERSED
1946 inc = -1;
1947 argnum = nargs - 1;
1948 #else
1949 inc = 1;
1950 argnum = 0;
1951 #endif
1952
1953 /* Push the args that need to be pushed. */
1954
1955 for (count = 0; count < nargs; count++, argnum += inc)
1956 {
1957 register enum machine_mode mode = argvec[argnum].mode;
1958 register rtx val = argvec[argnum].value;
1959 rtx reg = argvec[argnum].reg;
1960 int partial = argvec[argnum].partial;
1961
1962 if (! (reg != 0 && partial == 0))
1963 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1964 argblock, GEN_INT (argvec[count].offset.constant));
1965 NO_DEFER_POP;
1966 }
1967
1968 #ifdef PUSH_ARGS_REVERSED
1969 argnum = nargs - 1;
1970 #else
1971 argnum = 0;
1972 #endif
1973
1974 /* Now load any reg parms into their regs. */
1975
1976 for (count = 0; count < nargs; count++, argnum += inc)
1977 {
1978 register enum machine_mode mode = argvec[argnum].mode;
1979 register rtx val = argvec[argnum].value;
1980 rtx reg = argvec[argnum].reg;
1981 int partial = argvec[argnum].partial;
1982
1983 if (reg != 0 && partial == 0)
1984 emit_move_insn (reg, val);
1985 NO_DEFER_POP;
1986 }
1987
1988 /* For version 1.37, try deleting this entirely. */
1989 if (! no_queue)
1990 emit_queue ();
1991
1992 /* Any regs containing parms remain in use through the call. */
1993 start_sequence ();
1994 for (count = 0; count < nargs; count++)
1995 if (argvec[count].reg != 0)
1996 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
1997
1998 use_insns = get_insns ();
1999 end_sequence ();
2000
2001 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2002
2003 /* Don't allow popping to be deferred, since then
2004 cse'ing of library calls could delete a call and leave the pop. */
2005 NO_DEFER_POP;
2006
2007 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2008 will set inhibit_defer_pop to that value. */
2009
2010 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2011 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2012 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2013 old_inhibit_defer_pop + 1, use_insns, no_queue);
2014
2015 /* Now restore inhibit_defer_pop to its actual original value. */
2016 OK_DEFER_POP;
2017 }
2018 \f
2019 /* Expand an assignment that stores the value of FROM into TO.
2020 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2021 (This may contain a QUEUED rtx.)
2022 Otherwise, the returned value is not meaningful.
2023
2024 SUGGEST_REG is no longer actually used.
2025 It used to mean, copy the value through a register
2026 and return that register, if that is possible.
2027 But now we do this if WANT_VALUE.
2028
2029 If the value stored is a constant, we return the constant. */
2030
2031 rtx
2032 expand_assignment (to, from, want_value, suggest_reg)
2033 tree to, from;
2034 int want_value;
2035 int suggest_reg;
2036 {
2037 register rtx to_rtx = 0;
2038 rtx result;
2039
2040 /* Don't crash if the lhs of the assignment was erroneous. */
2041
2042 if (TREE_CODE (to) == ERROR_MARK)
2043 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2044
2045 /* Assignment of a structure component needs special treatment
2046 if the structure component's rtx is not simply a MEM.
2047 Assignment of an array element at a constant index
2048 has the same problem. */
2049
2050 if (TREE_CODE (to) == COMPONENT_REF
2051 || TREE_CODE (to) == BIT_FIELD_REF
2052 || (TREE_CODE (to) == ARRAY_REF
2053 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2054 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2055 {
2056 enum machine_mode mode1;
2057 int bitsize;
2058 int bitpos;
2059 tree offset;
2060 int unsignedp;
2061 int volatilep = 0;
2062 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2063 &mode1, &unsignedp, &volatilep);
2064
2065 /* If we are going to use store_bit_field and extract_bit_field,
2066 make sure to_rtx will be safe for multiple use. */
2067
2068 if (mode1 == VOIDmode && want_value)
2069 tem = stabilize_reference (tem);
2070
2071 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2072 if (offset != 0)
2073 {
2074 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2075
2076 if (GET_CODE (to_rtx) != MEM)
2077 abort ();
2078 to_rtx = change_address (to_rtx, VOIDmode,
2079 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2080 force_reg (Pmode, offset_rtx)));
2081 }
2082 if (volatilep)
2083 {
2084 if (GET_CODE (to_rtx) == MEM)
2085 MEM_VOLATILE_P (to_rtx) = 1;
2086 #if 0 /* This was turned off because, when a field is volatile
2087 in an object which is not volatile, the object may be in a register,
2088 and then we would abort over here. */
2089 else
2090 abort ();
2091 #endif
2092 }
2093
2094 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2095 (want_value
2096 /* Spurious cast makes HPUX compiler happy. */
2097 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2098 : VOIDmode),
2099 unsignedp,
2100 /* Required alignment of containing datum. */
2101 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2102 int_size_in_bytes (TREE_TYPE (tem)));
2103 preserve_temp_slots (result);
2104 free_temp_slots ();
2105
2106 return result;
2107 }
2108
2109 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2110 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2111
2112 if (to_rtx == 0)
2113 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2114
2115 /* In case we are returning the contents of an object which overlaps
2116 the place the value is being stored, use a safe function when copying
2117 a value through a pointer into a structure value return block. */
2118 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2119 && current_function_returns_struct
2120 && !current_function_returns_pcc_struct)
2121 {
2122 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2123 rtx size = expr_size (from);
2124
2125 #ifdef TARGET_MEM_FUNCTIONS
2126 emit_library_call (memcpy_libfunc, 1,
2127 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2128 XEXP (from_rtx, 0), Pmode,
2129 size, Pmode);
2130 #else
2131 emit_library_call (bcopy_libfunc, 1,
2132 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2133 XEXP (to_rtx, 0), Pmode,
2134 size, Pmode);
2135 #endif
2136
2137 preserve_temp_slots (to_rtx);
2138 free_temp_slots ();
2139 return to_rtx;
2140 }
2141
2142 /* Compute FROM and store the value in the rtx we got. */
2143
2144 result = store_expr (from, to_rtx, want_value);
2145 preserve_temp_slots (result);
2146 free_temp_slots ();
2147 return result;
2148 }
2149
2150 /* Generate code for computing expression EXP,
2151 and storing the value into TARGET.
2152 Returns TARGET or an equivalent value.
2153 TARGET may contain a QUEUED rtx.
2154
2155 If SUGGEST_REG is nonzero, copy the value through a register
2156 and return that register, if that is possible.
2157
2158 If the value stored is a constant, we return the constant. */
2159
2160 rtx
2161 store_expr (exp, target, suggest_reg)
2162 register tree exp;
2163 register rtx target;
2164 int suggest_reg;
2165 {
2166 register rtx temp;
2167 int dont_return_target = 0;
2168
2169 if (TREE_CODE (exp) == COMPOUND_EXPR)
2170 {
2171 /* Perform first part of compound expression, then assign from second
2172 part. */
2173 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2174 emit_queue ();
2175 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2176 }
2177 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2178 {
2179 /* For conditional expression, get safe form of the target. Then
2180 test the condition, doing the appropriate assignment on either
2181 side. This avoids the creation of unnecessary temporaries.
2182 For non-BLKmode, it is more efficient not to do this. */
2183
2184 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2185
2186 emit_queue ();
2187 target = protect_from_queue (target, 1);
2188
2189 NO_DEFER_POP;
2190 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2191 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2192 emit_queue ();
2193 emit_jump_insn (gen_jump (lab2));
2194 emit_barrier ();
2195 emit_label (lab1);
2196 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2197 emit_queue ();
2198 emit_label (lab2);
2199 OK_DEFER_POP;
2200 return target;
2201 }
2202 else if (suggest_reg && GET_CODE (target) == MEM
2203 && GET_MODE (target) != BLKmode)
2204 /* If target is in memory and caller wants value in a register instead,
2205 arrange that. Pass TARGET as target for expand_expr so that,
2206 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2207 We know expand_expr will not use the target in that case. */
2208 {
2209 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2210 GET_MODE (target), 0);
2211 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2212 temp = copy_to_reg (temp);
2213 dont_return_target = 1;
2214 }
2215 else if (queued_subexp_p (target))
2216 /* If target contains a postincrement, it is not safe
2217 to use as the returned value. It would access the wrong
2218 place by the time the queued increment gets output.
2219 So copy the value through a temporary and use that temp
2220 as the result. */
2221 {
2222 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2223 {
2224 /* Expand EXP into a new pseudo. */
2225 temp = gen_reg_rtx (GET_MODE (target));
2226 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2227 }
2228 else
2229 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2230 dont_return_target = 1;
2231 }
2232 else
2233 {
2234 temp = expand_expr (exp, target, GET_MODE (target), 0);
2235 /* DO return TARGET if it's a specified hardware register.
2236 expand_return relies on this. */
2237 if (!(target && GET_CODE (target) == REG
2238 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2239 && CONSTANT_P (temp))
2240 dont_return_target = 1;
2241 }
2242
2243 /* If value was not generated in the target, store it there.
2244 Convert the value to TARGET's type first if nec. */
2245
2246 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2247 {
2248 target = protect_from_queue (target, 1);
2249 if (GET_MODE (temp) != GET_MODE (target)
2250 && GET_MODE (temp) != VOIDmode)
2251 {
2252 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2253 if (dont_return_target)
2254 {
2255 /* In this case, we will return TEMP,
2256 so make sure it has the proper mode.
2257 But don't forget to store the value into TARGET. */
2258 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2259 emit_move_insn (target, temp);
2260 }
2261 else
2262 convert_move (target, temp, unsignedp);
2263 }
2264
2265 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2266 {
2267 /* Handle copying a string constant into an array.
2268 The string constant may be shorter than the array.
2269 So copy just the string's actual length, and clear the rest. */
2270 rtx size;
2271
2272 /* Get the size of the data type of the string,
2273 which is actually the size of the target. */
2274 size = expr_size (exp);
2275 if (GET_CODE (size) == CONST_INT
2276 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2277 emit_block_move (target, temp, size,
2278 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2279 else
2280 {
2281 /* Compute the size of the data to copy from the string. */
2282 tree copy_size
2283 = fold (build (MIN_EXPR, sizetype,
2284 size_binop (CEIL_DIV_EXPR,
2285 TYPE_SIZE (TREE_TYPE (exp)),
2286 size_int (BITS_PER_UNIT)),
2287 convert (sizetype,
2288 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2289 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2290 VOIDmode, 0);
2291 rtx label = 0;
2292
2293 /* Copy that much. */
2294 emit_block_move (target, temp, copy_size_rtx,
2295 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2296
2297 /* Figure out how much is left in TARGET
2298 that we have to clear. */
2299 if (GET_CODE (copy_size_rtx) == CONST_INT)
2300 {
2301 temp = plus_constant (XEXP (target, 0),
2302 TREE_STRING_LENGTH (exp));
2303 size = plus_constant (size,
2304 - TREE_STRING_LENGTH (exp));
2305 }
2306 else
2307 {
2308 enum machine_mode size_mode = Pmode;
2309
2310 temp = force_reg (Pmode, XEXP (target, 0));
2311 temp = expand_binop (size_mode, add_optab, temp,
2312 copy_size_rtx, NULL_RTX, 0,
2313 OPTAB_LIB_WIDEN);
2314
2315 size = expand_binop (size_mode, sub_optab, size,
2316 copy_size_rtx, NULL_RTX, 0,
2317 OPTAB_LIB_WIDEN);
2318
2319 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2320 GET_MODE (size), 0, 0);
2321 label = gen_label_rtx ();
2322 emit_jump_insn (gen_blt (label));
2323 }
2324
2325 if (size != const0_rtx)
2326 {
2327 #ifdef TARGET_MEM_FUNCTIONS
2328 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2329 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2330 #else
2331 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2332 temp, Pmode, size, Pmode);
2333 #endif
2334 }
2335 if (label)
2336 emit_label (label);
2337 }
2338 }
2339 else if (GET_MODE (temp) == BLKmode)
2340 emit_block_move (target, temp, expr_size (exp),
2341 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2342 else
2343 emit_move_insn (target, temp);
2344 }
2345 if (dont_return_target)
2346 return temp;
2347 return target;
2348 }
2349 \f
2350 /* Store the value of constructor EXP into the rtx TARGET.
2351 TARGET is either a REG or a MEM. */
2352
2353 static void
2354 store_constructor (exp, target)
2355 tree exp;
2356 rtx target;
2357 {
2358 tree type = TREE_TYPE (exp);
2359
2360 /* We know our target cannot conflict, since safe_from_p has been called. */
2361 #if 0
2362 /* Don't try copying piece by piece into a hard register
2363 since that is vulnerable to being clobbered by EXP.
2364 Instead, construct in a pseudo register and then copy it all. */
2365 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2366 {
2367 rtx temp = gen_reg_rtx (GET_MODE (target));
2368 store_constructor (exp, temp);
2369 emit_move_insn (target, temp);
2370 return;
2371 }
2372 #endif
2373
2374 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2375 {
2376 register tree elt;
2377
2378 /* Inform later passes that the whole union value is dead. */
2379 if (TREE_CODE (type) == UNION_TYPE)
2380 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2381
2382 /* If we are building a static constructor into a register,
2383 set the initial value as zero so we can fold the value into
2384 a constant. */
2385 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2386 emit_move_insn (target, const0_rtx);
2387
2388 /* If the constructor has fewer fields than the structure,
2389 clear the whole structure first. */
2390 else if (list_length (CONSTRUCTOR_ELTS (exp))
2391 != list_length (TYPE_FIELDS (type)))
2392 clear_storage (target, int_size_in_bytes (type));
2393 else
2394 /* Inform later passes that the old value is dead. */
2395 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2396
2397 /* Store each element of the constructor into
2398 the corresponding field of TARGET. */
2399
2400 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2401 {
2402 register tree field = TREE_PURPOSE (elt);
2403 register enum machine_mode mode;
2404 int bitsize;
2405 int bitpos;
2406 int unsignedp;
2407
2408 /* Just ignore missing fields.
2409 We cleared the whole structure, above,
2410 if any fields are missing. */
2411 if (field == 0)
2412 continue;
2413
2414 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2415 unsignedp = TREE_UNSIGNED (field);
2416 mode = DECL_MODE (field);
2417 if (DECL_BIT_FIELD (field))
2418 mode = VOIDmode;
2419
2420 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2421 /* ??? This case remains to be written. */
2422 abort ();
2423
2424 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2425
2426 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2427 /* The alignment of TARGET is
2428 at least what its type requires. */
2429 VOIDmode, 0,
2430 TYPE_ALIGN (type) / BITS_PER_UNIT,
2431 int_size_in_bytes (type));
2432 }
2433 }
2434 else if (TREE_CODE (type) == ARRAY_TYPE)
2435 {
2436 register tree elt;
2437 register int i;
2438 tree domain = TYPE_DOMAIN (type);
2439 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2440 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2441 tree elttype = TREE_TYPE (type);
2442
2443 /* If the constructor has fewer fields than the structure,
2444 clear the whole structure first. Similarly if this this is
2445 static constructor of a non-BLKmode object. */
2446
2447 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2448 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2449 clear_storage (target, maxelt - minelt + 1);
2450 else
2451 /* Inform later passes that the old value is dead. */
2452 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2453
2454 /* Store each element of the constructor into
2455 the corresponding element of TARGET, determined
2456 by counting the elements. */
2457 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2458 elt;
2459 elt = TREE_CHAIN (elt), i++)
2460 {
2461 register enum machine_mode mode;
2462 int bitsize;
2463 int bitpos;
2464 int unsignedp;
2465
2466 mode = TYPE_MODE (elttype);
2467 bitsize = GET_MODE_BITSIZE (mode);
2468 unsignedp = TREE_UNSIGNED (elttype);
2469
2470 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2471
2472 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2473 /* The alignment of TARGET is
2474 at least what its type requires. */
2475 VOIDmode, 0,
2476 TYPE_ALIGN (type) / BITS_PER_UNIT,
2477 int_size_in_bytes (type));
2478 }
2479 }
2480
2481 else
2482 abort ();
2483 }
2484
2485 /* Store the value of EXP (an expression tree)
2486 into a subfield of TARGET which has mode MODE and occupies
2487 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2488 If MODE is VOIDmode, it means that we are storing into a bit-field.
2489
2490 If VALUE_MODE is VOIDmode, return nothing in particular.
2491 UNSIGNEDP is not used in this case.
2492
2493 Otherwise, return an rtx for the value stored. This rtx
2494 has mode VALUE_MODE if that is convenient to do.
2495 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2496
2497 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2498 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2499
2500 static rtx
2501 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2502 unsignedp, align, total_size)
2503 rtx target;
2504 int bitsize, bitpos;
2505 enum machine_mode mode;
2506 tree exp;
2507 enum machine_mode value_mode;
2508 int unsignedp;
2509 int align;
2510 int total_size;
2511 {
2512 HOST_WIDE_INT width_mask = 0;
2513
2514 if (bitsize < HOST_BITS_PER_WIDE_INT)
2515 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2516
2517 /* If we are storing into an unaligned field of an aligned union that is
2518 in a register, we may have the mode of TARGET being an integer mode but
2519 MODE == BLKmode. In that case, get an aligned object whose size and
2520 alignment are the same as TARGET and store TARGET into it (we can avoid
2521 the store if the field being stored is the entire width of TARGET). Then
2522 call ourselves recursively to store the field into a BLKmode version of
2523 that object. Finally, load from the object into TARGET. This is not
2524 very efficient in general, but should only be slightly more expensive
2525 than the otherwise-required unaligned accesses. Perhaps this can be
2526 cleaned up later. */
2527
2528 if (mode == BLKmode
2529 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2530 {
2531 rtx object = assign_stack_temp (GET_MODE (target),
2532 GET_MODE_SIZE (GET_MODE (target)), 0);
2533 rtx blk_object = copy_rtx (object);
2534
2535 PUT_MODE (blk_object, BLKmode);
2536
2537 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2538 emit_move_insn (object, target);
2539
2540 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2541 align, total_size);
2542
2543 emit_move_insn (target, object);
2544
2545 return target;
2546 }
2547
2548 /* If the structure is in a register or if the component
2549 is a bit field, we cannot use addressing to access it.
2550 Use bit-field techniques or SUBREG to store in it. */
2551
2552 if (mode == VOIDmode
2553 || (mode != BLKmode && ! direct_store[(int) mode])
2554 || GET_CODE (target) == REG
2555 || GET_CODE (target) == SUBREG)
2556 {
2557 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2558 /* Store the value in the bitfield. */
2559 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2560 if (value_mode != VOIDmode)
2561 {
2562 /* The caller wants an rtx for the value. */
2563 /* If possible, avoid refetching from the bitfield itself. */
2564 if (width_mask != 0
2565 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2566 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2567 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2568 NULL_RTX, value_mode, 0, align,
2569 total_size);
2570 }
2571 return const0_rtx;
2572 }
2573 else
2574 {
2575 rtx addr = XEXP (target, 0);
2576 rtx to_rtx;
2577
2578 /* If a value is wanted, it must be the lhs;
2579 so make the address stable for multiple use. */
2580
2581 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2582 && ! CONSTANT_ADDRESS_P (addr)
2583 /* A frame-pointer reference is already stable. */
2584 && ! (GET_CODE (addr) == PLUS
2585 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2586 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2587 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2588 addr = copy_to_reg (addr);
2589
2590 /* Now build a reference to just the desired component. */
2591
2592 to_rtx = change_address (target, mode,
2593 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2594 MEM_IN_STRUCT_P (to_rtx) = 1;
2595
2596 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2597 }
2598 }
2599 \f
2600 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2601 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2602 ARRAY_REFs at constant positions and find the ultimate containing object,
2603 which we return.
2604
2605 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2606 bit position, and *PUNSIGNEDP to the signedness of the field.
2607 If the position of the field is variable, we store a tree
2608 giving the variable offset (in units) in *POFFSET.
2609 This offset is in addition to the bit position.
2610 If the position is not variable, we store 0 in *POFFSET.
2611
2612 If any of the extraction expressions is volatile,
2613 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2614
2615 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2616 is a mode that can be used to access the field. In that case, *PBITSIZE
2617 is redundant.
2618
2619 If the field describes a variable-sized object, *PMODE is set to
2620 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2621 this case, but the address of the object can be found. */
2622
2623 tree
2624 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2625 tree exp;
2626 int *pbitsize;
2627 int *pbitpos;
2628 tree *poffset;
2629 enum machine_mode *pmode;
2630 int *punsignedp;
2631 int *pvolatilep;
2632 {
2633 tree size_tree = 0;
2634 enum machine_mode mode = VOIDmode;
2635 tree offset = 0;
2636
2637 if (TREE_CODE (exp) == COMPONENT_REF)
2638 {
2639 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2640 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2641 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2642 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2643 }
2644 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2645 {
2646 size_tree = TREE_OPERAND (exp, 1);
2647 *punsignedp = TREE_UNSIGNED (exp);
2648 }
2649 else
2650 {
2651 mode = TYPE_MODE (TREE_TYPE (exp));
2652 *pbitsize = GET_MODE_BITSIZE (mode);
2653 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2654 }
2655
2656 if (size_tree)
2657 {
2658 if (TREE_CODE (size_tree) != INTEGER_CST)
2659 mode = BLKmode, *pbitsize = -1;
2660 else
2661 *pbitsize = TREE_INT_CST_LOW (size_tree);
2662 }
2663
2664 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2665 and find the ultimate containing object. */
2666
2667 *pbitpos = 0;
2668
2669 while (1)
2670 {
2671 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2672 {
2673 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2674 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2675 : TREE_OPERAND (exp, 2));
2676
2677 if (TREE_CODE (pos) == PLUS_EXPR)
2678 {
2679 tree constant, var;
2680 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2681 {
2682 constant = TREE_OPERAND (pos, 0);
2683 var = TREE_OPERAND (pos, 1);
2684 }
2685 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2686 {
2687 constant = TREE_OPERAND (pos, 1);
2688 var = TREE_OPERAND (pos, 0);
2689 }
2690 else
2691 abort ();
2692 *pbitpos += TREE_INT_CST_LOW (constant);
2693 if (offset)
2694 offset = size_binop (PLUS_EXPR, offset,
2695 size_binop (FLOOR_DIV_EXPR, var,
2696 size_int (BITS_PER_UNIT)));
2697 else
2698 offset = size_binop (FLOOR_DIV_EXPR, var,
2699 size_int (BITS_PER_UNIT));
2700 }
2701 else if (TREE_CODE (pos) == INTEGER_CST)
2702 *pbitpos += TREE_INT_CST_LOW (pos);
2703 else
2704 {
2705 /* Assume here that the offset is a multiple of a unit.
2706 If not, there should be an explicitly added constant. */
2707 if (offset)
2708 offset = size_binop (PLUS_EXPR, offset,
2709 size_binop (FLOOR_DIV_EXPR, pos,
2710 size_int (BITS_PER_UNIT)));
2711 else
2712 offset = size_binop (FLOOR_DIV_EXPR, pos,
2713 size_int (BITS_PER_UNIT));
2714 }
2715 }
2716
2717 else if (TREE_CODE (exp) == ARRAY_REF
2718 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2719 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2720 {
2721 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2722 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2723 }
2724 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2725 && ! ((TREE_CODE (exp) == NOP_EXPR
2726 || TREE_CODE (exp) == CONVERT_EXPR)
2727 && (TYPE_MODE (TREE_TYPE (exp))
2728 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2729 break;
2730
2731 /* If any reference in the chain is volatile, the effect is volatile. */
2732 if (TREE_THIS_VOLATILE (exp))
2733 *pvolatilep = 1;
2734 exp = TREE_OPERAND (exp, 0);
2735 }
2736
2737 /* If this was a bit-field, see if there is a mode that allows direct
2738 access in case EXP is in memory. */
2739 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2740 {
2741 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2742 if (mode == BLKmode)
2743 mode = VOIDmode;
2744 }
2745
2746 *pmode = mode;
2747 *poffset = offset;
2748 #if 0
2749 /* We aren't finished fixing the callers to really handle nonzero offset. */
2750 if (offset != 0)
2751 abort ();
2752 #endif
2753
2754 return exp;
2755 }
2756 \f
2757 /* Given an rtx VALUE that may contain additions and multiplications,
2758 return an equivalent value that just refers to a register or memory.
2759 This is done by generating instructions to perform the arithmetic
2760 and returning a pseudo-register containing the value. */
2761
2762 rtx
2763 force_operand (value, target)
2764 rtx value, target;
2765 {
2766 register optab binoptab = 0;
2767 /* Use a temporary to force order of execution of calls to
2768 `force_operand'. */
2769 rtx tmp;
2770 register rtx op2;
2771 /* Use subtarget as the target for operand 0 of a binary operation. */
2772 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2773
2774 if (GET_CODE (value) == PLUS)
2775 binoptab = add_optab;
2776 else if (GET_CODE (value) == MINUS)
2777 binoptab = sub_optab;
2778 else if (GET_CODE (value) == MULT)
2779 {
2780 op2 = XEXP (value, 1);
2781 if (!CONSTANT_P (op2)
2782 && !(GET_CODE (op2) == REG && op2 != subtarget))
2783 subtarget = 0;
2784 tmp = force_operand (XEXP (value, 0), subtarget);
2785 return expand_mult (GET_MODE (value), tmp,
2786 force_operand (op2, NULL_RTX),
2787 target, 0);
2788 }
2789
2790 if (binoptab)
2791 {
2792 op2 = XEXP (value, 1);
2793 if (!CONSTANT_P (op2)
2794 && !(GET_CODE (op2) == REG && op2 != subtarget))
2795 subtarget = 0;
2796 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2797 {
2798 binoptab = add_optab;
2799 op2 = negate_rtx (GET_MODE (value), op2);
2800 }
2801
2802 /* Check for an addition with OP2 a constant integer and our first
2803 operand a PLUS of a virtual register and something else. In that
2804 case, we want to emit the sum of the virtual register and the
2805 constant first and then add the other value. This allows virtual
2806 register instantiation to simply modify the constant rather than
2807 creating another one around this addition. */
2808 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2809 && GET_CODE (XEXP (value, 0)) == PLUS
2810 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2811 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2812 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2813 {
2814 rtx temp = expand_binop (GET_MODE (value), binoptab,
2815 XEXP (XEXP (value, 0), 0), op2,
2816 subtarget, 0, OPTAB_LIB_WIDEN);
2817 return expand_binop (GET_MODE (value), binoptab, temp,
2818 force_operand (XEXP (XEXP (value, 0), 1), 0),
2819 target, 0, OPTAB_LIB_WIDEN);
2820 }
2821
2822 tmp = force_operand (XEXP (value, 0), subtarget);
2823 return expand_binop (GET_MODE (value), binoptab, tmp,
2824 force_operand (op2, NULL_RTX),
2825 target, 0, OPTAB_LIB_WIDEN);
2826 /* We give UNSIGNEP = 0 to expand_binop
2827 because the only operations we are expanding here are signed ones. */
2828 }
2829 return value;
2830 }
2831 \f
2832 /* Subroutine of expand_expr:
2833 save the non-copied parts (LIST) of an expr (LHS), and return a list
2834 which can restore these values to their previous values,
2835 should something modify their storage. */
2836
2837 static tree
2838 save_noncopied_parts (lhs, list)
2839 tree lhs;
2840 tree list;
2841 {
2842 tree tail;
2843 tree parts = 0;
2844
2845 for (tail = list; tail; tail = TREE_CHAIN (tail))
2846 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2847 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2848 else
2849 {
2850 tree part = TREE_VALUE (tail);
2851 tree part_type = TREE_TYPE (part);
2852 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2853 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2854 int_size_in_bytes (part_type), 0);
2855 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2856 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2857 parts = tree_cons (to_be_saved,
2858 build (RTL_EXPR, part_type, NULL_TREE,
2859 (tree) target),
2860 parts);
2861 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2862 }
2863 return parts;
2864 }
2865
2866 /* Subroutine of expand_expr:
2867 record the non-copied parts (LIST) of an expr (LHS), and return a list
2868 which specifies the initial values of these parts. */
2869
2870 static tree
2871 init_noncopied_parts (lhs, list)
2872 tree lhs;
2873 tree list;
2874 {
2875 tree tail;
2876 tree parts = 0;
2877
2878 for (tail = list; tail; tail = TREE_CHAIN (tail))
2879 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2880 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2881 else
2882 {
2883 tree part = TREE_VALUE (tail);
2884 tree part_type = TREE_TYPE (part);
2885 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
2886 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2887 }
2888 return parts;
2889 }
2890
2891 /* Subroutine of expand_expr: return nonzero iff there is no way that
2892 EXP can reference X, which is being modified. */
2893
2894 static int
2895 safe_from_p (x, exp)
2896 rtx x;
2897 tree exp;
2898 {
2899 rtx exp_rtl = 0;
2900 int i, nops;
2901
2902 if (x == 0)
2903 return 1;
2904
2905 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2906 find the underlying pseudo. */
2907 if (GET_CODE (x) == SUBREG)
2908 {
2909 x = SUBREG_REG (x);
2910 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2911 return 0;
2912 }
2913
2914 /* If X is a location in the outgoing argument area, it is always safe. */
2915 if (GET_CODE (x) == MEM
2916 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2917 || (GET_CODE (XEXP (x, 0)) == PLUS
2918 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2919 return 1;
2920
2921 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2922 {
2923 case 'd':
2924 exp_rtl = DECL_RTL (exp);
2925 break;
2926
2927 case 'c':
2928 return 1;
2929
2930 case 'x':
2931 if (TREE_CODE (exp) == TREE_LIST)
2932 return ((TREE_VALUE (exp) == 0
2933 || safe_from_p (x, TREE_VALUE (exp)))
2934 && (TREE_CHAIN (exp) == 0
2935 || safe_from_p (x, TREE_CHAIN (exp))));
2936 else
2937 return 0;
2938
2939 case '1':
2940 return safe_from_p (x, TREE_OPERAND (exp, 0));
2941
2942 case '2':
2943 case '<':
2944 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2945 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2946
2947 case 'e':
2948 case 'r':
2949 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2950 the expression. If it is set, we conflict iff we are that rtx or
2951 both are in memory. Otherwise, we check all operands of the
2952 expression recursively. */
2953
2954 switch (TREE_CODE (exp))
2955 {
2956 case ADDR_EXPR:
2957 return staticp (TREE_OPERAND (exp, 0));
2958
2959 case INDIRECT_REF:
2960 if (GET_CODE (x) == MEM)
2961 return 0;
2962 break;
2963
2964 case CALL_EXPR:
2965 exp_rtl = CALL_EXPR_RTL (exp);
2966 if (exp_rtl == 0)
2967 {
2968 /* Assume that the call will clobber all hard registers and
2969 all of memory. */
2970 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2971 || GET_CODE (x) == MEM)
2972 return 0;
2973 }
2974
2975 break;
2976
2977 case RTL_EXPR:
2978 exp_rtl = RTL_EXPR_RTL (exp);
2979 if (exp_rtl == 0)
2980 /* We don't know what this can modify. */
2981 return 0;
2982
2983 break;
2984
2985 case WITH_CLEANUP_EXPR:
2986 exp_rtl = RTL_EXPR_RTL (exp);
2987 break;
2988
2989 case SAVE_EXPR:
2990 exp_rtl = SAVE_EXPR_RTL (exp);
2991 break;
2992
2993 case BIND_EXPR:
2994 /* The only operand we look at is operand 1. The rest aren't
2995 part of the expression. */
2996 return safe_from_p (x, TREE_OPERAND (exp, 1));
2997
2998 case METHOD_CALL_EXPR:
2999 /* This takes a rtx argument, but shouldn't appear here. */
3000 abort ();
3001 }
3002
3003 /* If we have an rtx, we do not need to scan our operands. */
3004 if (exp_rtl)
3005 break;
3006
3007 nops = tree_code_length[(int) TREE_CODE (exp)];
3008 for (i = 0; i < nops; i++)
3009 if (TREE_OPERAND (exp, i) != 0
3010 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3011 return 0;
3012 }
3013
3014 /* If we have an rtl, find any enclosed object. Then see if we conflict
3015 with it. */
3016 if (exp_rtl)
3017 {
3018 if (GET_CODE (exp_rtl) == SUBREG)
3019 {
3020 exp_rtl = SUBREG_REG (exp_rtl);
3021 if (GET_CODE (exp_rtl) == REG
3022 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3023 return 0;
3024 }
3025
3026 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3027 are memory and EXP is not readonly. */
3028 return ! (rtx_equal_p (x, exp_rtl)
3029 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3030 && ! TREE_READONLY (exp)));
3031 }
3032
3033 /* If we reach here, it is safe. */
3034 return 1;
3035 }
3036
3037 /* Subroutine of expand_expr: return nonzero iff EXP is an
3038 expression whose type is statically determinable. */
3039
3040 static int
3041 fixed_type_p (exp)
3042 tree exp;
3043 {
3044 if (TREE_CODE (exp) == PARM_DECL
3045 || TREE_CODE (exp) == VAR_DECL
3046 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3047 || TREE_CODE (exp) == COMPONENT_REF
3048 || TREE_CODE (exp) == ARRAY_REF)
3049 return 1;
3050 return 0;
3051 }
3052 \f
3053 /* expand_expr: generate code for computing expression EXP.
3054 An rtx for the computed value is returned. The value is never null.
3055 In the case of a void EXP, const0_rtx is returned.
3056
3057 The value may be stored in TARGET if TARGET is nonzero.
3058 TARGET is just a suggestion; callers must assume that
3059 the rtx returned may not be the same as TARGET.
3060
3061 If TARGET is CONST0_RTX, it means that the value will be ignored.
3062
3063 If TMODE is not VOIDmode, it suggests generating the
3064 result in mode TMODE. But this is done only when convenient.
3065 Otherwise, TMODE is ignored and the value generated in its natural mode.
3066 TMODE is just a suggestion; callers must assume that
3067 the rtx returned may not have mode TMODE.
3068
3069 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3070 with a constant address even if that address is not normally legitimate.
3071 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3072
3073 If MODIFIER is EXPAND_SUM then when EXP is an addition
3074 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3075 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3076 products as above, or REG or MEM, or constant.
3077 Ordinarily in such cases we would output mul or add instructions
3078 and then return a pseudo reg containing the sum.
3079
3080 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3081 it also marks a label as absolutely required (it can't be dead).
3082 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3083 This is used for outputting expressions used in initializers. */
3084
3085 rtx
3086 expand_expr (exp, target, tmode, modifier)
3087 register tree exp;
3088 rtx target;
3089 enum machine_mode tmode;
3090 enum expand_modifier modifier;
3091 {
3092 register rtx op0, op1, temp;
3093 tree type = TREE_TYPE (exp);
3094 int unsignedp = TREE_UNSIGNED (type);
3095 register enum machine_mode mode = TYPE_MODE (type);
3096 register enum tree_code code = TREE_CODE (exp);
3097 optab this_optab;
3098 /* Use subtarget as the target for operand 0 of a binary operation. */
3099 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3100 rtx original_target = target;
3101 int ignore = target == const0_rtx;
3102 tree context;
3103
3104 /* Don't use hard regs as subtargets, because the combiner
3105 can only handle pseudo regs. */
3106 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3107 subtarget = 0;
3108 /* Avoid subtargets inside loops,
3109 since they hide some invariant expressions. */
3110 if (preserve_subexpressions_p ())
3111 subtarget = 0;
3112
3113 if (ignore) target = 0, original_target = 0;
3114
3115 /* If will do cse, generate all results into pseudo registers
3116 since 1) that allows cse to find more things
3117 and 2) otherwise cse could produce an insn the machine
3118 cannot support. */
3119
3120 if (! cse_not_expected && mode != BLKmode && target
3121 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3122 target = subtarget;
3123
3124 /* Ensure we reference a volatile object even if value is ignored. */
3125 if (ignore && TREE_THIS_VOLATILE (exp)
3126 && mode != VOIDmode && mode != BLKmode)
3127 {
3128 target = gen_reg_rtx (mode);
3129 temp = expand_expr (exp, target, VOIDmode, modifier);
3130 if (temp != target)
3131 emit_move_insn (target, temp);
3132 return target;
3133 }
3134
3135 switch (code)
3136 {
3137 case LABEL_DECL:
3138 {
3139 tree function = decl_function_context (exp);
3140 /* Handle using a label in a containing function. */
3141 if (function != current_function_decl && function != 0)
3142 {
3143 struct function *p = find_function_data (function);
3144 /* Allocate in the memory associated with the function
3145 that the label is in. */
3146 push_obstacks (p->function_obstack,
3147 p->function_maybepermanent_obstack);
3148
3149 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3150 label_rtx (exp), p->forced_labels);
3151 pop_obstacks ();
3152 }
3153 else if (modifier == EXPAND_INITIALIZER)
3154 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3155 label_rtx (exp), forced_labels);
3156 temp = gen_rtx (MEM, FUNCTION_MODE,
3157 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3158 if (function != current_function_decl && function != 0)
3159 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3160 return temp;
3161 }
3162
3163 case PARM_DECL:
3164 if (DECL_RTL (exp) == 0)
3165 {
3166 error_with_decl (exp, "prior parameter's size depends on `%s'");
3167 return CONST0_RTX (mode);
3168 }
3169
3170 case FUNCTION_DECL:
3171 case VAR_DECL:
3172 case RESULT_DECL:
3173 if (DECL_RTL (exp) == 0)
3174 abort ();
3175 /* Ensure variable marked as used
3176 even if it doesn't go through a parser. */
3177 TREE_USED (exp) = 1;
3178 /* Handle variables inherited from containing functions. */
3179 context = decl_function_context (exp);
3180
3181 /* We treat inline_function_decl as an alias for the current function
3182 because that is the inline function whose vars, types, etc.
3183 are being merged into the current function.
3184 See expand_inline_function. */
3185 if (context != 0 && context != current_function_decl
3186 && context != inline_function_decl
3187 /* If var is static, we don't need a static chain to access it. */
3188 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3189 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3190 {
3191 rtx addr;
3192
3193 /* Mark as non-local and addressable. */
3194 DECL_NONLOCAL (exp) = 1;
3195 mark_addressable (exp);
3196 if (GET_CODE (DECL_RTL (exp)) != MEM)
3197 abort ();
3198 addr = XEXP (DECL_RTL (exp), 0);
3199 if (GET_CODE (addr) == MEM)
3200 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3201 else
3202 addr = fix_lexical_addr (addr, exp);
3203 return change_address (DECL_RTL (exp), mode, addr);
3204 }
3205
3206 /* This is the case of an array whose size is to be determined
3207 from its initializer, while the initializer is still being parsed.
3208 See expand_decl. */
3209 if (GET_CODE (DECL_RTL (exp)) == MEM
3210 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3211 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3212 XEXP (DECL_RTL (exp), 0));
3213 if (GET_CODE (DECL_RTL (exp)) == MEM
3214 && modifier != EXPAND_CONST_ADDRESS
3215 && modifier != EXPAND_SUM
3216 && modifier != EXPAND_INITIALIZER)
3217 {
3218 /* DECL_RTL probably contains a constant address.
3219 On RISC machines where a constant address isn't valid,
3220 make some insns to get that address into a register. */
3221 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3222 || (flag_force_addr
3223 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3224 return change_address (DECL_RTL (exp), VOIDmode,
3225 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3226 }
3227 return DECL_RTL (exp);
3228
3229 case INTEGER_CST:
3230 return immed_double_const (TREE_INT_CST_LOW (exp),
3231 TREE_INT_CST_HIGH (exp),
3232 mode);
3233
3234 case CONST_DECL:
3235 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3236
3237 case REAL_CST:
3238 /* If optimized, generate immediate CONST_DOUBLE
3239 which will be turned into memory by reload if necessary.
3240
3241 We used to force a register so that loop.c could see it. But
3242 this does not allow gen_* patterns to perform optimizations with
3243 the constants. It also produces two insns in cases like "x = 1.0;".
3244 On most machines, floating-point constants are not permitted in
3245 many insns, so we'd end up copying it to a register in any case.
3246
3247 Now, we do the copying in expand_binop, if appropriate. */
3248 return immed_real_const (exp);
3249
3250 case COMPLEX_CST:
3251 case STRING_CST:
3252 if (! TREE_CST_RTL (exp))
3253 output_constant_def (exp);
3254
3255 /* TREE_CST_RTL probably contains a constant address.
3256 On RISC machines where a constant address isn't valid,
3257 make some insns to get that address into a register. */
3258 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3259 && modifier != EXPAND_CONST_ADDRESS
3260 && modifier != EXPAND_INITIALIZER
3261 && modifier != EXPAND_SUM
3262 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3263 return change_address (TREE_CST_RTL (exp), VOIDmode,
3264 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3265 return TREE_CST_RTL (exp);
3266
3267 case SAVE_EXPR:
3268 context = decl_function_context (exp);
3269 /* We treat inline_function_decl as an alias for the current function
3270 because that is the inline function whose vars, types, etc.
3271 are being merged into the current function.
3272 See expand_inline_function. */
3273 if (context == current_function_decl || context == inline_function_decl)
3274 context = 0;
3275
3276 /* If this is non-local, handle it. */
3277 if (context)
3278 {
3279 temp = SAVE_EXPR_RTL (exp);
3280 if (temp && GET_CODE (temp) == REG)
3281 {
3282 put_var_into_stack (exp);
3283 temp = SAVE_EXPR_RTL (exp);
3284 }
3285 if (temp == 0 || GET_CODE (temp) != MEM)
3286 abort ();
3287 return change_address (temp, mode,
3288 fix_lexical_addr (XEXP (temp, 0), exp));
3289 }
3290 if (SAVE_EXPR_RTL (exp) == 0)
3291 {
3292 if (mode == BLKmode)
3293 temp
3294 = assign_stack_temp (mode,
3295 int_size_in_bytes (TREE_TYPE (exp)), 0);
3296 else
3297 temp = gen_reg_rtx (mode);
3298 SAVE_EXPR_RTL (exp) = temp;
3299 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3300 if (!optimize && GET_CODE (temp) == REG)
3301 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3302 save_expr_regs);
3303 }
3304 return SAVE_EXPR_RTL (exp);
3305
3306 case EXIT_EXPR:
3307 /* Exit the current loop if the body-expression is true. */
3308 {
3309 rtx label = gen_label_rtx ();
3310 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3311 expand_exit_loop (NULL_PTR);
3312 emit_label (label);
3313 }
3314 return const0_rtx;
3315
3316 case LOOP_EXPR:
3317 expand_start_loop (1);
3318 expand_expr_stmt (TREE_OPERAND (exp, 0));
3319 expand_end_loop ();
3320
3321 return const0_rtx;
3322
3323 case BIND_EXPR:
3324 {
3325 tree vars = TREE_OPERAND (exp, 0);
3326 int vars_need_expansion = 0;
3327
3328 /* Need to open a binding contour here because
3329 if there are any cleanups they most be contained here. */
3330 expand_start_bindings (0);
3331
3332 /* Mark the corresponding BLOCK for output. */
3333 if (TREE_OPERAND (exp, 2) != 0)
3334 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3335
3336 /* If VARS have not yet been expanded, expand them now. */
3337 while (vars)
3338 {
3339 if (DECL_RTL (vars) == 0)
3340 {
3341 vars_need_expansion = 1;
3342 expand_decl (vars);
3343 }
3344 expand_decl_init (vars);
3345 vars = TREE_CHAIN (vars);
3346 }
3347
3348 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3349
3350 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3351
3352 return temp;
3353 }
3354
3355 case RTL_EXPR:
3356 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3357 abort ();
3358 emit_insns (RTL_EXPR_SEQUENCE (exp));
3359 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3360 return RTL_EXPR_RTL (exp);
3361
3362 case CONSTRUCTOR:
3363 /* All elts simple constants => refer to a constant in memory. But
3364 if this is a non-BLKmode mode, let it store a field at a time
3365 since that should make a CONST_INT or CONST_DOUBLE when we
3366 fold. */
3367 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3368 {
3369 rtx constructor = output_constant_def (exp);
3370 if (modifier != EXPAND_CONST_ADDRESS
3371 && modifier != EXPAND_INITIALIZER
3372 && modifier != EXPAND_SUM
3373 && !memory_address_p (GET_MODE (constructor),
3374 XEXP (constructor, 0)))
3375 constructor = change_address (constructor, VOIDmode,
3376 XEXP (constructor, 0));
3377 return constructor;
3378 }
3379
3380 if (ignore)
3381 {
3382 tree elt;
3383 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3384 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3385 return const0_rtx;
3386 }
3387 else
3388 {
3389 if (target == 0 || ! safe_from_p (target, exp))
3390 {
3391 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3392 target = gen_reg_rtx (mode);
3393 else
3394 {
3395 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3396 if (target)
3397 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3398 target = safe_target;
3399 }
3400 }
3401 store_constructor (exp, target);
3402 return target;
3403 }
3404
3405 case INDIRECT_REF:
3406 {
3407 tree exp1 = TREE_OPERAND (exp, 0);
3408 tree exp2;
3409
3410 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3411 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3412 This code has the same general effect as simply doing
3413 expand_expr on the save expr, except that the expression PTR
3414 is computed for use as a memory address. This means different
3415 code, suitable for indexing, may be generated. */
3416 if (TREE_CODE (exp1) == SAVE_EXPR
3417 && SAVE_EXPR_RTL (exp1) == 0
3418 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3419 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3420 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3421 {
3422 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3423 VOIDmode, EXPAND_SUM);
3424 op0 = memory_address (mode, temp);
3425 op0 = copy_all_regs (op0);
3426 SAVE_EXPR_RTL (exp1) = op0;
3427 }
3428 else
3429 {
3430 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3431 op0 = memory_address (mode, op0);
3432 }
3433
3434 temp = gen_rtx (MEM, mode, op0);
3435 /* If address was computed by addition,
3436 mark this as an element of an aggregate. */
3437 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3438 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3439 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3440 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3441 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3442 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3443 || (TREE_CODE (exp1) == ADDR_EXPR
3444 && (exp2 = TREE_OPERAND (exp1, 0))
3445 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3446 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3447 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3448 MEM_IN_STRUCT_P (temp) = 1;
3449 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3450 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3451 a location is accessed through a pointer to const does not mean
3452 that the value there can never change. */
3453 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3454 #endif
3455 return temp;
3456 }
3457
3458 case ARRAY_REF:
3459 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3460 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3461 {
3462 /* Nonconstant array index or nonconstant element size.
3463 Generate the tree for *(&array+index) and expand that,
3464 except do it in a language-independent way
3465 and don't complain about non-lvalue arrays.
3466 `mark_addressable' should already have been called
3467 for any array for which this case will be reached. */
3468
3469 /* Don't forget the const or volatile flag from the array element. */
3470 tree variant_type = build_type_variant (type,
3471 TREE_READONLY (exp),
3472 TREE_THIS_VOLATILE (exp));
3473 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3474 TREE_OPERAND (exp, 0));
3475 tree index = TREE_OPERAND (exp, 1);
3476 tree elt;
3477
3478 /* Convert the integer argument to a type the same size as a pointer
3479 so the multiply won't overflow spuriously. */
3480 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3481 index = convert (type_for_size (POINTER_SIZE, 0), index);
3482
3483 /* Don't think the address has side effects
3484 just because the array does.
3485 (In some cases the address might have side effects,
3486 and we fail to record that fact here. However, it should not
3487 matter, since expand_expr should not care.) */
3488 TREE_SIDE_EFFECTS (array_adr) = 0;
3489
3490 elt = build1 (INDIRECT_REF, type,
3491 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3492 array_adr,
3493 fold (build (MULT_EXPR,
3494 TYPE_POINTER_TO (variant_type),
3495 index, size_in_bytes (type))))));
3496
3497 /* Volatility, etc., of new expression is same as old expression. */
3498 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3499 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3500 TREE_READONLY (elt) = TREE_READONLY (exp);
3501
3502 return expand_expr (elt, target, tmode, modifier);
3503 }
3504
3505 /* Fold an expression like: "foo"[2].
3506 This is not done in fold so it won't happen inside &. */
3507 {
3508 int i;
3509 tree arg0 = TREE_OPERAND (exp, 0);
3510 tree arg1 = TREE_OPERAND (exp, 1);
3511
3512 if (TREE_CODE (arg0) == STRING_CST
3513 && TREE_CODE (arg1) == INTEGER_CST
3514 && !TREE_INT_CST_HIGH (arg1)
3515 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3516 {
3517 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3518 {
3519 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3520 TREE_TYPE (exp) = integer_type_node;
3521 return expand_expr (exp, target, tmode, modifier);
3522 }
3523 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3524 {
3525 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3526 TREE_TYPE (exp) = integer_type_node;
3527 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3528 }
3529 }
3530 }
3531
3532 /* If this is a constant index into a constant array,
3533 just get the value from the array. Handle both the cases when
3534 we have an explicit constructor and when our operand is a variable
3535 that was declared const. */
3536
3537 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3538 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3539 {
3540 tree index = fold (TREE_OPERAND (exp, 1));
3541 if (TREE_CODE (index) == INTEGER_CST
3542 && TREE_INT_CST_HIGH (index) == 0)
3543 {
3544 int i = TREE_INT_CST_LOW (index);
3545 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3546
3547 while (elem && i--)
3548 elem = TREE_CHAIN (elem);
3549 if (elem)
3550 return expand_expr (fold (TREE_VALUE (elem)), target,
3551 tmode, modifier);
3552 }
3553 }
3554
3555 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3556 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3557 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3558 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3559 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3560 && optimize >= 1
3561 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3562 != ERROR_MARK))
3563 {
3564 tree index = fold (TREE_OPERAND (exp, 1));
3565 if (TREE_CODE (index) == INTEGER_CST
3566 && TREE_INT_CST_HIGH (index) == 0)
3567 {
3568 int i = TREE_INT_CST_LOW (index);
3569 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3570
3571 if (TREE_CODE (init) == CONSTRUCTOR)
3572 {
3573 tree elem = CONSTRUCTOR_ELTS (init);
3574
3575 while (elem && i--)
3576 elem = TREE_CHAIN (elem);
3577 if (elem)
3578 return expand_expr (fold (TREE_VALUE (elem)), target,
3579 tmode, modifier);
3580 }
3581 else if (TREE_CODE (init) == STRING_CST
3582 && i < TREE_STRING_LENGTH (init))
3583 {
3584 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3585 return convert_to_mode (mode, temp, 0);
3586 }
3587 }
3588 }
3589 /* Treat array-ref with constant index as a component-ref. */
3590
3591 case COMPONENT_REF:
3592 case BIT_FIELD_REF:
3593 /* If the operand is a CONSTRUCTOR, we can just extract the
3594 appropriate field if it is present. */
3595 if (code != ARRAY_REF
3596 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3597 {
3598 tree elt;
3599
3600 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3601 elt = TREE_CHAIN (elt))
3602 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3603 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3604 }
3605
3606 {
3607 enum machine_mode mode1;
3608 int bitsize;
3609 int bitpos;
3610 tree offset;
3611 int volatilep = 0;
3612 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3613 &mode1, &unsignedp, &volatilep);
3614
3615 /* In some cases, we will be offsetting OP0's address by a constant.
3616 So get it as a sum, if possible. If we will be using it
3617 directly in an insn, we validate it. */
3618 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3619
3620 /* If this is a constant, put it into a register if it is a
3621 legimate constant and memory if it isn't. */
3622 if (CONSTANT_P (op0))
3623 {
3624 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3625 if (LEGITIMATE_CONSTANT_P (op0))
3626 op0 = force_reg (mode, op0);
3627 else
3628 op0 = validize_mem (force_const_mem (mode, op0));
3629 }
3630
3631 if (offset != 0)
3632 {
3633 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3634
3635 if (GET_CODE (op0) != MEM)
3636 abort ();
3637 op0 = change_address (op0, VOIDmode,
3638 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3639 force_reg (Pmode, offset_rtx)));
3640 }
3641
3642 /* Don't forget about volatility even if this is a bitfield. */
3643 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3644 {
3645 op0 = copy_rtx (op0);
3646 MEM_VOLATILE_P (op0) = 1;
3647 }
3648
3649 if (mode1 == VOIDmode
3650 || (mode1 != BLKmode && ! direct_load[(int) mode1])
3651 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3652 {
3653 /* In cases where an aligned union has an unaligned object
3654 as a field, we might be extracting a BLKmode value from
3655 an integer-mode (e.g., SImode) object. Handle this case
3656 by doing the extract into an object as wide as the field
3657 (which we know to be the width of a basic mode), then
3658 storing into memory, and changing the mode to BLKmode. */
3659 enum machine_mode ext_mode = mode;
3660
3661 if (ext_mode == BLKmode)
3662 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3663
3664 if (ext_mode == BLKmode)
3665 abort ();
3666
3667 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3668 unsignedp, target, ext_mode, ext_mode,
3669 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3670 int_size_in_bytes (TREE_TYPE (tem)));
3671 if (mode == BLKmode)
3672 {
3673 rtx new = assign_stack_temp (ext_mode,
3674 bitsize / BITS_PER_UNIT, 0);
3675
3676 emit_move_insn (new, op0);
3677 op0 = copy_rtx (new);
3678 PUT_MODE (op0, BLKmode);
3679 }
3680
3681 return op0;
3682 }
3683
3684 /* Get a reference to just this component. */
3685 if (modifier == EXPAND_CONST_ADDRESS
3686 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3687 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3688 (bitpos / BITS_PER_UNIT)));
3689 else
3690 op0 = change_address (op0, mode1,
3691 plus_constant (XEXP (op0, 0),
3692 (bitpos / BITS_PER_UNIT)));
3693 MEM_IN_STRUCT_P (op0) = 1;
3694 MEM_VOLATILE_P (op0) |= volatilep;
3695 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3696 return op0;
3697 if (target == 0)
3698 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3699 convert_move (target, op0, unsignedp);
3700 return target;
3701 }
3702
3703 case OFFSET_REF:
3704 {
3705 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3706 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3707 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3708 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3709 MEM_IN_STRUCT_P (temp) = 1;
3710 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3711 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3712 a location is accessed through a pointer to const does not mean
3713 that the value there can never change. */
3714 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3715 #endif
3716 return temp;
3717 }
3718
3719 /* Intended for a reference to a buffer of a file-object in Pascal.
3720 But it's not certain that a special tree code will really be
3721 necessary for these. INDIRECT_REF might work for them. */
3722 case BUFFER_REF:
3723 abort ();
3724
3725 case WITH_CLEANUP_EXPR:
3726 if (RTL_EXPR_RTL (exp) == 0)
3727 {
3728 RTL_EXPR_RTL (exp)
3729 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3730 cleanups_this_call
3731 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
3732 /* That's it for this cleanup. */
3733 TREE_OPERAND (exp, 2) = 0;
3734 }
3735 return RTL_EXPR_RTL (exp);
3736
3737 case CALL_EXPR:
3738 /* Check for a built-in function. */
3739 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3740 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3741 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3742 return expand_builtin (exp, target, subtarget, tmode, ignore);
3743 /* If this call was expanded already by preexpand_calls,
3744 just return the result we got. */
3745 if (CALL_EXPR_RTL (exp) != 0)
3746 return CALL_EXPR_RTL (exp);
3747 return expand_call (exp, target, ignore);
3748
3749 case NON_LVALUE_EXPR:
3750 case NOP_EXPR:
3751 case CONVERT_EXPR:
3752 case REFERENCE_EXPR:
3753 if (TREE_CODE (type) == VOID_TYPE || ignore)
3754 {
3755 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3756 return const0_rtx;
3757 }
3758 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3759 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3760 if (TREE_CODE (type) == UNION_TYPE)
3761 {
3762 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3763 if (target == 0)
3764 {
3765 if (mode == BLKmode)
3766 {
3767 if (TYPE_SIZE (type) == 0
3768 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3769 abort ();
3770 target = assign_stack_temp (BLKmode,
3771 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3772 + BITS_PER_UNIT - 1)
3773 / BITS_PER_UNIT, 0);
3774 }
3775 else
3776 target = gen_reg_rtx (mode);
3777 }
3778 if (GET_CODE (target) == MEM)
3779 /* Store data into beginning of memory target. */
3780 store_expr (TREE_OPERAND (exp, 0),
3781 change_address (target, TYPE_MODE (valtype), 0),
3782 NULL_RTX);
3783 else if (GET_CODE (target) == REG)
3784 /* Store this field into a union of the proper type. */
3785 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3786 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3787 VOIDmode, 0, 1,
3788 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3789 else
3790 abort ();
3791
3792 /* Return the entire union. */
3793 return target;
3794 }
3795 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
3796 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3797 return op0;
3798 if (modifier == EXPAND_INITIALIZER)
3799 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
3800 if (flag_force_mem && GET_CODE (op0) == MEM)
3801 op0 = copy_to_reg (op0);
3802
3803 if (target == 0)
3804 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3805 else
3806 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3807 return target;
3808
3809 case PLUS_EXPR:
3810 /* We come here from MINUS_EXPR when the second operand is a constant. */
3811 plus_expr:
3812 this_optab = add_optab;
3813
3814 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3815 something else, make sure we add the register to the constant and
3816 then to the other thing. This case can occur during strength
3817 reduction and doing it this way will produce better code if the
3818 frame pointer or argument pointer is eliminated.
3819
3820 fold-const.c will ensure that the constant is always in the inner
3821 PLUS_EXPR, so the only case we need to do anything about is if
3822 sp, ap, or fp is our second argument, in which case we must swap
3823 the innermost first argument and our second argument. */
3824
3825 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3826 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3827 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3828 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3829 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3830 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3831 {
3832 tree t = TREE_OPERAND (exp, 1);
3833
3834 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3835 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3836 }
3837
3838 /* If the result is to be Pmode and we are adding an integer to
3839 something, we might be forming a constant. So try to use
3840 plus_constant. If it produces a sum and we can't accept it,
3841 use force_operand. This allows P = &ARR[const] to generate
3842 efficient code on machines where a SYMBOL_REF is not a valid
3843 address.
3844
3845 If this is an EXPAND_SUM call, always return the sum. */
3846 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3847 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3848 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3849 || mode == Pmode))
3850 {
3851 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3852 EXPAND_SUM);
3853 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3854 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3855 op1 = force_operand (op1, target);
3856 return op1;
3857 }
3858
3859 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3860 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3861 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3862 || mode == Pmode))
3863 {
3864 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3865 EXPAND_SUM);
3866 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3867 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3868 op0 = force_operand (op0, target);
3869 return op0;
3870 }
3871
3872 /* No sense saving up arithmetic to be done
3873 if it's all in the wrong mode to form part of an address.
3874 And force_operand won't know whether to sign-extend or
3875 zero-extend. */
3876 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3877 || mode != Pmode) goto binop;
3878
3879 preexpand_calls (exp);
3880 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3881 subtarget = 0;
3882
3883 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3884 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
3885
3886 /* Make sure any term that's a sum with a constant comes last. */
3887 if (GET_CODE (op0) == PLUS
3888 && CONSTANT_P (XEXP (op0, 1)))
3889 {
3890 temp = op0;
3891 op0 = op1;
3892 op1 = temp;
3893 }
3894 /* If adding to a sum including a constant,
3895 associate it to put the constant outside. */
3896 if (GET_CODE (op1) == PLUS
3897 && CONSTANT_P (XEXP (op1, 1)))
3898 {
3899 rtx constant_term = const0_rtx;
3900
3901 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3902 if (temp != 0)
3903 op0 = temp;
3904 /* Ensure that MULT comes first if there is one. */
3905 else if (GET_CODE (op0) == MULT)
3906 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
3907 else
3908 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3909
3910 /* Let's also eliminate constants from op0 if possible. */
3911 op0 = eliminate_constant_term (op0, &constant_term);
3912
3913 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3914 their sum should be a constant. Form it into OP1, since the
3915 result we want will then be OP0 + OP1. */
3916
3917 temp = simplify_binary_operation (PLUS, mode, constant_term,
3918 XEXP (op1, 1));
3919 if (temp != 0)
3920 op1 = temp;
3921 else
3922 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3923 }
3924
3925 /* Put a constant term last and put a multiplication first. */
3926 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3927 temp = op1, op1 = op0, op0 = temp;
3928
3929 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3930 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3931
3932 case MINUS_EXPR:
3933 /* Handle difference of two symbolic constants,
3934 for the sake of an initializer. */
3935 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3936 && really_constant_p (TREE_OPERAND (exp, 0))
3937 && really_constant_p (TREE_OPERAND (exp, 1)))
3938 {
3939 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3940 VOIDmode, modifier);
3941 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3942 VOIDmode, modifier);
3943 return gen_rtx (MINUS, mode, op0, op1);
3944 }
3945 /* Convert A - const to A + (-const). */
3946 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3947 {
3948 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3949 fold (build1 (NEGATE_EXPR, type,
3950 TREE_OPERAND (exp, 1))));
3951 goto plus_expr;
3952 }
3953 this_optab = sub_optab;
3954 goto binop;
3955
3956 case MULT_EXPR:
3957 preexpand_calls (exp);
3958 /* If first operand is constant, swap them.
3959 Thus the following special case checks need only
3960 check the second operand. */
3961 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3962 {
3963 register tree t1 = TREE_OPERAND (exp, 0);
3964 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3965 TREE_OPERAND (exp, 1) = t1;
3966 }
3967
3968 /* Attempt to return something suitable for generating an
3969 indexed address, for machines that support that. */
3970
3971 if (modifier == EXPAND_SUM && mode == Pmode
3972 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3973 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3974 {
3975 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3976
3977 /* Apply distributive law if OP0 is x+c. */
3978 if (GET_CODE (op0) == PLUS
3979 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3980 return gen_rtx (PLUS, mode,
3981 gen_rtx (MULT, mode, XEXP (op0, 0),
3982 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3983 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3984 * INTVAL (XEXP (op0, 1))));
3985
3986 if (GET_CODE (op0) != REG)
3987 op0 = force_operand (op0, NULL_RTX);
3988 if (GET_CODE (op0) != REG)
3989 op0 = copy_to_mode_reg (mode, op0);
3990
3991 return gen_rtx (MULT, mode, op0,
3992 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3993 }
3994
3995 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3996 subtarget = 0;
3997
3998 /* Check for multiplying things that have been extended
3999 from a narrower type. If this machine supports multiplying
4000 in that narrower type with a result in the desired type,
4001 do it that way, and avoid the explicit type-conversion. */
4002 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4003 && TREE_CODE (type) == INTEGER_TYPE
4004 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4005 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4006 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4007 && int_fits_type_p (TREE_OPERAND (exp, 1),
4008 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4009 /* Don't use a widening multiply if a shift will do. */
4010 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4011 > HOST_BITS_PER_WIDE_INT)
4012 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4013 ||
4014 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4015 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4016 ==
4017 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4018 /* If both operands are extended, they must either both
4019 be zero-extended or both be sign-extended. */
4020 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4021 ==
4022 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4023 {
4024 enum machine_mode innermode
4025 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4026 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4027 ? umul_widen_optab : smul_widen_optab);
4028 if (mode == GET_MODE_WIDER_MODE (innermode)
4029 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4030 {
4031 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4032 NULL_RTX, VOIDmode, 0);
4033 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4034 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4035 VOIDmode, 0);
4036 else
4037 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4038 NULL_RTX, VOIDmode, 0);
4039 goto binop2;
4040 }
4041 }
4042 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4043 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4044 return expand_mult (mode, op0, op1, target, unsignedp);
4045
4046 case TRUNC_DIV_EXPR:
4047 case FLOOR_DIV_EXPR:
4048 case CEIL_DIV_EXPR:
4049 case ROUND_DIV_EXPR:
4050 case EXACT_DIV_EXPR:
4051 preexpand_calls (exp);
4052 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4053 subtarget = 0;
4054 /* Possible optimization: compute the dividend with EXPAND_SUM
4055 then if the divisor is constant can optimize the case
4056 where some terms of the dividend have coeffs divisible by it. */
4057 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4058 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4059 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4060
4061 case RDIV_EXPR:
4062 this_optab = flodiv_optab;
4063 goto binop;
4064
4065 case TRUNC_MOD_EXPR:
4066 case FLOOR_MOD_EXPR:
4067 case CEIL_MOD_EXPR:
4068 case ROUND_MOD_EXPR:
4069 preexpand_calls (exp);
4070 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4071 subtarget = 0;
4072 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4073 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4074 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4075
4076 case FIX_ROUND_EXPR:
4077 case FIX_FLOOR_EXPR:
4078 case FIX_CEIL_EXPR:
4079 abort (); /* Not used for C. */
4080
4081 case FIX_TRUNC_EXPR:
4082 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4083 if (target == 0)
4084 target = gen_reg_rtx (mode);
4085 expand_fix (target, op0, unsignedp);
4086 return target;
4087
4088 case FLOAT_EXPR:
4089 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4090 if (target == 0)
4091 target = gen_reg_rtx (mode);
4092 /* expand_float can't figure out what to do if FROM has VOIDmode.
4093 So give it the correct mode. With -O, cse will optimize this. */
4094 if (GET_MODE (op0) == VOIDmode)
4095 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4096 op0);
4097 expand_float (target, op0,
4098 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4099 return target;
4100
4101 case NEGATE_EXPR:
4102 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4103 temp = expand_unop (mode, neg_optab, op0, target, 0);
4104 if (temp == 0)
4105 abort ();
4106 return temp;
4107
4108 case ABS_EXPR:
4109 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4110
4111 /* Unsigned abs is simply the operand. Testing here means we don't
4112 risk generating incorrect code below. */
4113 if (TREE_UNSIGNED (type))
4114 return op0;
4115
4116 /* First try to do it with a special abs instruction. */
4117 temp = expand_unop (mode, abs_optab, op0, target, 0);
4118 if (temp != 0)
4119 return temp;
4120
4121 /* If this machine has expensive jumps, we can do integer absolute
4122 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4123 where W is the width of MODE. */
4124
4125 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4126 {
4127 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4128 size_int (GET_MODE_BITSIZE (mode) - 1),
4129 NULL_RTX, 0);
4130
4131 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4132 OPTAB_LIB_WIDEN);
4133 if (temp != 0)
4134 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4135 OPTAB_LIB_WIDEN);
4136
4137 if (temp != 0)
4138 return temp;
4139 }
4140
4141 /* If that does not win, use conditional jump and negate. */
4142 target = original_target;
4143 temp = gen_label_rtx ();
4144 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4145 || (GET_CODE (target) == REG
4146 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4147 target = gen_reg_rtx (mode);
4148 emit_move_insn (target, op0);
4149 emit_cmp_insn (target,
4150 expand_expr (convert (type, integer_zero_node),
4151 NULL_RTX, VOIDmode, 0),
4152 GE, NULL_RTX, mode, 0, 0);
4153 NO_DEFER_POP;
4154 emit_jump_insn (gen_bge (temp));
4155 op0 = expand_unop (mode, neg_optab, target, target, 0);
4156 if (op0 != target)
4157 emit_move_insn (target, op0);
4158 emit_label (temp);
4159 OK_DEFER_POP;
4160 return target;
4161
4162 case MAX_EXPR:
4163 case MIN_EXPR:
4164 target = original_target;
4165 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4166 || (GET_CODE (target) == REG
4167 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4168 target = gen_reg_rtx (mode);
4169 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4170 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4171
4172 /* First try to do it with a special MIN or MAX instruction.
4173 If that does not win, use a conditional jump to select the proper
4174 value. */
4175 this_optab = (TREE_UNSIGNED (type)
4176 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4177 : (code == MIN_EXPR ? smin_optab : smax_optab));
4178
4179 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4180 OPTAB_WIDEN);
4181 if (temp != 0)
4182 return temp;
4183
4184 if (target != op0)
4185 emit_move_insn (target, op0);
4186 op0 = gen_label_rtx ();
4187 if (code == MAX_EXPR)
4188 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4189 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4190 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4191 else
4192 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4193 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4194 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4195 if (temp == const0_rtx)
4196 emit_move_insn (target, op1);
4197 else if (temp != const_true_rtx)
4198 {
4199 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4200 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4201 else
4202 abort ();
4203 emit_move_insn (target, op1);
4204 }
4205 emit_label (op0);
4206 return target;
4207
4208 /* ??? Can optimize when the operand of this is a bitwise operation,
4209 by using a different bitwise operation. */
4210 case BIT_NOT_EXPR:
4211 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4212 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4213 if (temp == 0)
4214 abort ();
4215 return temp;
4216
4217 case FFS_EXPR:
4218 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4219 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4220 if (temp == 0)
4221 abort ();
4222 return temp;
4223
4224 /* ??? Can optimize bitwise operations with one arg constant.
4225 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4226 and (a bitwise1 b) bitwise2 b (etc)
4227 but that is probably not worth while. */
4228
4229 /* BIT_AND_EXPR is for bitwise anding.
4230 TRUTH_AND_EXPR is for anding two boolean values
4231 when we want in all cases to compute both of them.
4232 In general it is fastest to do TRUTH_AND_EXPR by
4233 computing both operands as actual zero-or-1 values
4234 and then bitwise anding. In cases where there cannot
4235 be any side effects, better code would be made by
4236 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4237 but the question is how to recognize those cases. */
4238
4239 case TRUTH_AND_EXPR:
4240 case BIT_AND_EXPR:
4241 this_optab = and_optab;
4242 goto binop;
4243
4244 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4245 case TRUTH_OR_EXPR:
4246 case BIT_IOR_EXPR:
4247 this_optab = ior_optab;
4248 goto binop;
4249
4250 case BIT_XOR_EXPR:
4251 this_optab = xor_optab;
4252 goto binop;
4253
4254 case LSHIFT_EXPR:
4255 case RSHIFT_EXPR:
4256 case LROTATE_EXPR:
4257 case RROTATE_EXPR:
4258 preexpand_calls (exp);
4259 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4260 subtarget = 0;
4261 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4262 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4263 unsignedp);
4264
4265 /* Could determine the answer when only additive constants differ.
4266 Also, the addition of one can be handled by changing the condition. */
4267 case LT_EXPR:
4268 case LE_EXPR:
4269 case GT_EXPR:
4270 case GE_EXPR:
4271 case EQ_EXPR:
4272 case NE_EXPR:
4273 preexpand_calls (exp);
4274 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4275 if (temp != 0)
4276 return temp;
4277 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4278 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4279 && original_target
4280 && GET_CODE (original_target) == REG
4281 && (GET_MODE (original_target)
4282 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4283 {
4284 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4285 if (temp != original_target)
4286 temp = copy_to_reg (temp);
4287 op1 = gen_label_rtx ();
4288 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4289 GET_MODE (temp), unsignedp, 0);
4290 emit_jump_insn (gen_beq (op1));
4291 emit_move_insn (temp, const1_rtx);
4292 emit_label (op1);
4293 return temp;
4294 }
4295 /* If no set-flag instruction, must generate a conditional
4296 store into a temporary variable. Drop through
4297 and handle this like && and ||. */
4298
4299 case TRUTH_ANDIF_EXPR:
4300 case TRUTH_ORIF_EXPR:
4301 if (target == 0 || ! safe_from_p (target, exp)
4302 /* Make sure we don't have a hard reg (such as function's return
4303 value) live across basic blocks, if not optimizing. */
4304 || (!optimize && GET_CODE (target) == REG
4305 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4306 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4307 emit_clr_insn (target);
4308 op1 = gen_label_rtx ();
4309 jumpifnot (exp, op1);
4310 emit_0_to_1_insn (target);
4311 emit_label (op1);
4312 return target;
4313
4314 case TRUTH_NOT_EXPR:
4315 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4316 /* The parser is careful to generate TRUTH_NOT_EXPR
4317 only with operands that are always zero or one. */
4318 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4319 target, 1, OPTAB_LIB_WIDEN);
4320 if (temp == 0)
4321 abort ();
4322 return temp;
4323
4324 case COMPOUND_EXPR:
4325 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4326 emit_queue ();
4327 return expand_expr (TREE_OPERAND (exp, 1),
4328 (ignore ? const0_rtx : target),
4329 VOIDmode, 0);
4330
4331 case COND_EXPR:
4332 {
4333 /* Note that COND_EXPRs whose type is a structure or union
4334 are required to be constructed to contain assignments of
4335 a temporary variable, so that we can evaluate them here
4336 for side effect only. If type is void, we must do likewise. */
4337
4338 /* If an arm of the branch requires a cleanup,
4339 only that cleanup is performed. */
4340
4341 tree singleton = 0;
4342 tree binary_op = 0, unary_op = 0;
4343 tree old_cleanups = cleanups_this_call;
4344 cleanups_this_call = 0;
4345
4346 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4347 convert it to our mode, if necessary. */
4348 if (integer_onep (TREE_OPERAND (exp, 1))
4349 && integer_zerop (TREE_OPERAND (exp, 2))
4350 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4351 {
4352 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4353 if (GET_MODE (op0) == mode)
4354 return op0;
4355 if (target == 0)
4356 target = gen_reg_rtx (mode);
4357 convert_move (target, op0, unsignedp);
4358 return target;
4359 }
4360
4361 /* If we are not to produce a result, we have no target. Otherwise,
4362 if a target was specified use it; it will not be used as an
4363 intermediate target unless it is safe. If no target, use a
4364 temporary. */
4365
4366 if (mode == VOIDmode || ignore)
4367 temp = 0;
4368 else if (original_target
4369 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4370 temp = original_target;
4371 else if (mode == BLKmode)
4372 {
4373 if (TYPE_SIZE (type) == 0
4374 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4375 abort ();
4376 temp = assign_stack_temp (BLKmode,
4377 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4378 + BITS_PER_UNIT - 1)
4379 / BITS_PER_UNIT, 0);
4380 }
4381 else
4382 temp = gen_reg_rtx (mode);
4383
4384 /* Check for X ? A + B : A. If we have this, we can copy
4385 A to the output and conditionally add B. Similarly for unary
4386 operations. Don't do this if X has side-effects because
4387 those side effects might affect A or B and the "?" operation is
4388 a sequence point in ANSI. (We test for side effects later.) */
4389
4390 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4391 && operand_equal_p (TREE_OPERAND (exp, 2),
4392 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4393 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4394 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4395 && operand_equal_p (TREE_OPERAND (exp, 1),
4396 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4397 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4398 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4399 && operand_equal_p (TREE_OPERAND (exp, 2),
4400 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4401 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4402 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4403 && operand_equal_p (TREE_OPERAND (exp, 1),
4404 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4405 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4406
4407 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4408 operation, do this as A + (X != 0). Similarly for other simple
4409 binary operators. */
4410 if (singleton && binary_op
4411 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4412 && (TREE_CODE (binary_op) == PLUS_EXPR
4413 || TREE_CODE (binary_op) == MINUS_EXPR
4414 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4415 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4416 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4417 && integer_onep (TREE_OPERAND (binary_op, 1))
4418 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4419 {
4420 rtx result;
4421 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4422 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4423 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4424 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4425 : and_optab);
4426
4427 /* If we had X ? A : A + 1, do this as A + (X == 0).
4428
4429 We have to invert the truth value here and then put it
4430 back later if do_store_flag fails. We cannot simply copy
4431 TREE_OPERAND (exp, 0) to another variable and modify that
4432 because invert_truthvalue can modify the tree pointed to
4433 by its argument. */
4434 if (singleton == TREE_OPERAND (exp, 1))
4435 TREE_OPERAND (exp, 0)
4436 = invert_truthvalue (TREE_OPERAND (exp, 0));
4437
4438 result = do_store_flag (TREE_OPERAND (exp, 0),
4439 (safe_from_p (temp, singleton)
4440 ? temp : NULL_RTX),
4441 mode, BRANCH_COST <= 1);
4442
4443 if (result)
4444 {
4445 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4446 return expand_binop (mode, boptab, op1, result, temp,
4447 unsignedp, OPTAB_LIB_WIDEN);
4448 }
4449 else if (singleton == TREE_OPERAND (exp, 1))
4450 TREE_OPERAND (exp, 0)
4451 = invert_truthvalue (TREE_OPERAND (exp, 0));
4452 }
4453
4454 NO_DEFER_POP;
4455 op0 = gen_label_rtx ();
4456
4457 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4458 {
4459 if (temp != 0)
4460 {
4461 /* If the target conflicts with the other operand of the
4462 binary op, we can't use it. Also, we can't use the target
4463 if it is a hard register, because evaluating the condition
4464 might clobber it. */
4465 if ((binary_op
4466 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4467 || (GET_CODE (temp) == REG
4468 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4469 temp = gen_reg_rtx (mode);
4470 store_expr (singleton, temp, 0);
4471 }
4472 else
4473 expand_expr (singleton,
4474 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4475 if (cleanups_this_call)
4476 {
4477 sorry ("aggregate value in COND_EXPR");
4478 cleanups_this_call = 0;
4479 }
4480 if (singleton == TREE_OPERAND (exp, 1))
4481 jumpif (TREE_OPERAND (exp, 0), op0);
4482 else
4483 jumpifnot (TREE_OPERAND (exp, 0), op0);
4484
4485 if (binary_op && temp == 0)
4486 /* Just touch the other operand. */
4487 expand_expr (TREE_OPERAND (binary_op, 1),
4488 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4489 else if (binary_op)
4490 store_expr (build (TREE_CODE (binary_op), type,
4491 make_tree (type, temp),
4492 TREE_OPERAND (binary_op, 1)),
4493 temp, 0);
4494 else
4495 store_expr (build1 (TREE_CODE (unary_op), type,
4496 make_tree (type, temp)),
4497 temp, 0);
4498 op1 = op0;
4499 }
4500 #if 0
4501 /* This is now done in jump.c and is better done there because it
4502 produces shorter register lifetimes. */
4503
4504 /* Check for both possibilities either constants or variables
4505 in registers (but not the same as the target!). If so, can
4506 save branches by assigning one, branching, and assigning the
4507 other. */
4508 else if (temp && GET_MODE (temp) != BLKmode
4509 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4510 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4511 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4512 && DECL_RTL (TREE_OPERAND (exp, 1))
4513 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4514 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4515 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4516 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4517 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4518 && DECL_RTL (TREE_OPERAND (exp, 2))
4519 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4520 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4521 {
4522 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4523 temp = gen_reg_rtx (mode);
4524 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4525 jumpifnot (TREE_OPERAND (exp, 0), op0);
4526 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4527 op1 = op0;
4528 }
4529 #endif
4530 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4531 comparison operator. If we have one of these cases, set the
4532 output to A, branch on A (cse will merge these two references),
4533 then set the output to FOO. */
4534 else if (temp
4535 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4536 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4537 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4538 TREE_OPERAND (exp, 1), 0)
4539 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4540 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4541 {
4542 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4543 temp = gen_reg_rtx (mode);
4544 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4545 jumpif (TREE_OPERAND (exp, 0), op0);
4546 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4547 op1 = op0;
4548 }
4549 else if (temp
4550 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4551 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4552 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4553 TREE_OPERAND (exp, 2), 0)
4554 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4555 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4556 {
4557 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4558 temp = gen_reg_rtx (mode);
4559 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4560 jumpifnot (TREE_OPERAND (exp, 0), op0);
4561 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4562 op1 = op0;
4563 }
4564 else
4565 {
4566 op1 = gen_label_rtx ();
4567 jumpifnot (TREE_OPERAND (exp, 0), op0);
4568 if (temp != 0)
4569 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4570 else
4571 expand_expr (TREE_OPERAND (exp, 1),
4572 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4573 if (cleanups_this_call)
4574 {
4575 sorry ("aggregate value in COND_EXPR");
4576 cleanups_this_call = 0;
4577 }
4578
4579 emit_queue ();
4580 emit_jump_insn (gen_jump (op1));
4581 emit_barrier ();
4582 emit_label (op0);
4583 if (temp != 0)
4584 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4585 else
4586 expand_expr (TREE_OPERAND (exp, 2),
4587 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4588 }
4589
4590 if (cleanups_this_call)
4591 {
4592 sorry ("aggregate value in COND_EXPR");
4593 cleanups_this_call = 0;
4594 }
4595
4596 emit_queue ();
4597 emit_label (op1);
4598 OK_DEFER_POP;
4599 cleanups_this_call = old_cleanups;
4600 return temp;
4601 }
4602
4603 case TARGET_EXPR:
4604 {
4605 /* Something needs to be initialized, but we didn't know
4606 where that thing was when building the tree. For example,
4607 it could be the return value of a function, or a parameter
4608 to a function which lays down in the stack, or a temporary
4609 variable which must be passed by reference.
4610
4611 We guarantee that the expression will either be constructed
4612 or copied into our original target. */
4613
4614 tree slot = TREE_OPERAND (exp, 0);
4615
4616 if (TREE_CODE (slot) != VAR_DECL)
4617 abort ();
4618
4619 if (target == 0)
4620 {
4621 if (DECL_RTL (slot) != 0)
4622 target = DECL_RTL (slot);
4623 else
4624 {
4625 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4626 /* All temp slots at this level must not conflict. */
4627 preserve_temp_slots (target);
4628 DECL_RTL (slot) = target;
4629 }
4630
4631 #if 0
4632 /* Since SLOT is not known to the called function
4633 to belong to its stack frame, we must build an explicit
4634 cleanup. This case occurs when we must build up a reference
4635 to pass the reference as an argument. In this case,
4636 it is very likely that such a reference need not be
4637 built here. */
4638
4639 if (TREE_OPERAND (exp, 2) == 0)
4640 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4641 if (TREE_OPERAND (exp, 2))
4642 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4643 cleanups_this_call);
4644 #endif
4645 }
4646 else
4647 {
4648 /* This case does occur, when expanding a parameter which
4649 needs to be constructed on the stack. The target
4650 is the actual stack address that we want to initialize.
4651 The function we call will perform the cleanup in this case. */
4652
4653 DECL_RTL (slot) = target;
4654 }
4655
4656 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4657 }
4658
4659 case INIT_EXPR:
4660 {
4661 tree lhs = TREE_OPERAND (exp, 0);
4662 tree rhs = TREE_OPERAND (exp, 1);
4663 tree noncopied_parts = 0;
4664 tree lhs_type = TREE_TYPE (lhs);
4665
4666 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4667 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4668 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4669 TYPE_NONCOPIED_PARTS (lhs_type));
4670 while (noncopied_parts != 0)
4671 {
4672 expand_assignment (TREE_VALUE (noncopied_parts),
4673 TREE_PURPOSE (noncopied_parts), 0, 0);
4674 noncopied_parts = TREE_CHAIN (noncopied_parts);
4675 }
4676 return temp;
4677 }
4678
4679 case MODIFY_EXPR:
4680 {
4681 /* If lhs is complex, expand calls in rhs before computing it.
4682 That's so we don't compute a pointer and save it over a call.
4683 If lhs is simple, compute it first so we can give it as a
4684 target if the rhs is just a call. This avoids an extra temp and copy
4685 and that prevents a partial-subsumption which makes bad code.
4686 Actually we could treat component_ref's of vars like vars. */
4687
4688 tree lhs = TREE_OPERAND (exp, 0);
4689 tree rhs = TREE_OPERAND (exp, 1);
4690 tree noncopied_parts = 0;
4691 tree lhs_type = TREE_TYPE (lhs);
4692
4693 temp = 0;
4694
4695 if (TREE_CODE (lhs) != VAR_DECL
4696 && TREE_CODE (lhs) != RESULT_DECL
4697 && TREE_CODE (lhs) != PARM_DECL)
4698 preexpand_calls (exp);
4699
4700 /* Check for |= or &= of a bitfield of size one into another bitfield
4701 of size 1. In this case, (unless we need the result of the
4702 assignment) we can do this more efficiently with a
4703 test followed by an assignment, if necessary.
4704
4705 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4706 things change so we do, this code should be enhanced to
4707 support it. */
4708 if (ignore
4709 && TREE_CODE (lhs) == COMPONENT_REF
4710 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4711 || TREE_CODE (rhs) == BIT_AND_EXPR)
4712 && TREE_OPERAND (rhs, 0) == lhs
4713 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4714 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4715 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4716 {
4717 rtx label = gen_label_rtx ();
4718
4719 do_jump (TREE_OPERAND (rhs, 1),
4720 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4721 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4722 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4723 (TREE_CODE (rhs) == BIT_IOR_EXPR
4724 ? integer_one_node
4725 : integer_zero_node)),
4726 0, 0);
4727 do_pending_stack_adjust ();
4728 emit_label (label);
4729 return const0_rtx;
4730 }
4731
4732 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4733 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4734 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4735 TYPE_NONCOPIED_PARTS (lhs_type));
4736
4737 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4738 while (noncopied_parts != 0)
4739 {
4740 expand_assignment (TREE_PURPOSE (noncopied_parts),
4741 TREE_VALUE (noncopied_parts), 0, 0);
4742 noncopied_parts = TREE_CHAIN (noncopied_parts);
4743 }
4744 return temp;
4745 }
4746
4747 case PREINCREMENT_EXPR:
4748 case PREDECREMENT_EXPR:
4749 return expand_increment (exp, 0);
4750
4751 case POSTINCREMENT_EXPR:
4752 case POSTDECREMENT_EXPR:
4753 /* Faster to treat as pre-increment if result is not used. */
4754 return expand_increment (exp, ! ignore);
4755
4756 case ADDR_EXPR:
4757 /* Are we taking the address of a nested function? */
4758 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4759 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4760 {
4761 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4762 op0 = force_operand (op0, target);
4763 }
4764 else
4765 {
4766 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
4767 (modifier == EXPAND_INITIALIZER
4768 ? modifier : EXPAND_CONST_ADDRESS));
4769 if (GET_CODE (op0) != MEM)
4770 abort ();
4771
4772 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4773 return XEXP (op0, 0);
4774 op0 = force_operand (XEXP (op0, 0), target);
4775 }
4776 if (flag_force_addr && GET_CODE (op0) != REG)
4777 return force_reg (Pmode, op0);
4778 return op0;
4779
4780 case ENTRY_VALUE_EXPR:
4781 abort ();
4782
4783 case ERROR_MARK:
4784 return const0_rtx;
4785
4786 default:
4787 return (*lang_expand_expr) (exp, target, tmode, modifier);
4788 }
4789
4790 /* Here to do an ordinary binary operator, generating an instruction
4791 from the optab already placed in `this_optab'. */
4792 binop:
4793 preexpand_calls (exp);
4794 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4795 subtarget = 0;
4796 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4797 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4798 binop2:
4799 temp = expand_binop (mode, this_optab, op0, op1, target,
4800 unsignedp, OPTAB_LIB_WIDEN);
4801 if (temp == 0)
4802 abort ();
4803 return temp;
4804 }
4805 \f
4806 /* Return the alignment in bits of EXP, a pointer valued expression.
4807 But don't return more than MAX_ALIGN no matter what.
4808 The alignment returned is, by default, the alignment of the thing that
4809 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4810
4811 Otherwise, look at the expression to see if we can do better, i.e., if the
4812 expression is actually pointing at an object whose alignment is tighter. */
4813
4814 static int
4815 get_pointer_alignment (exp, max_align)
4816 tree exp;
4817 unsigned max_align;
4818 {
4819 unsigned align, inner;
4820
4821 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4822 return 0;
4823
4824 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4825 align = MIN (align, max_align);
4826
4827 while (1)
4828 {
4829 switch (TREE_CODE (exp))
4830 {
4831 case NOP_EXPR:
4832 case CONVERT_EXPR:
4833 case NON_LVALUE_EXPR:
4834 exp = TREE_OPERAND (exp, 0);
4835 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4836 return align;
4837 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4838 inner = MIN (inner, max_align);
4839 align = MAX (align, inner);
4840 break;
4841
4842 case PLUS_EXPR:
4843 /* If sum of pointer + int, restrict our maximum alignment to that
4844 imposed by the integer. If not, we can't do any better than
4845 ALIGN. */
4846 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4847 return align;
4848
4849 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4850 & (max_align - 1))
4851 != 0)
4852 max_align >>= 1;
4853
4854 exp = TREE_OPERAND (exp, 0);
4855 break;
4856
4857 case ADDR_EXPR:
4858 /* See what we are pointing at and look at its alignment. */
4859 exp = TREE_OPERAND (exp, 0);
4860 if (TREE_CODE (exp) == FUNCTION_DECL)
4861 align = MAX (align, FUNCTION_BOUNDARY);
4862 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4863 align = MAX (align, DECL_ALIGN (exp));
4864 #ifdef CONSTANT_ALIGNMENT
4865 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4866 align = CONSTANT_ALIGNMENT (exp, align);
4867 #endif
4868 return MIN (align, max_align);
4869
4870 default:
4871 return align;
4872 }
4873 }
4874 }
4875 \f
4876 /* Return the tree node and offset if a given argument corresponds to
4877 a string constant. */
4878
4879 static tree
4880 string_constant (arg, ptr_offset)
4881 tree arg;
4882 tree *ptr_offset;
4883 {
4884 STRIP_NOPS (arg);
4885
4886 if (TREE_CODE (arg) == ADDR_EXPR
4887 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4888 {
4889 *ptr_offset = integer_zero_node;
4890 return TREE_OPERAND (arg, 0);
4891 }
4892 else if (TREE_CODE (arg) == PLUS_EXPR)
4893 {
4894 tree arg0 = TREE_OPERAND (arg, 0);
4895 tree arg1 = TREE_OPERAND (arg, 1);
4896
4897 STRIP_NOPS (arg0);
4898 STRIP_NOPS (arg1);
4899
4900 if (TREE_CODE (arg0) == ADDR_EXPR
4901 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4902 {
4903 *ptr_offset = arg1;
4904 return TREE_OPERAND (arg0, 0);
4905 }
4906 else if (TREE_CODE (arg1) == ADDR_EXPR
4907 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4908 {
4909 *ptr_offset = arg0;
4910 return TREE_OPERAND (arg1, 0);
4911 }
4912 }
4913
4914 return 0;
4915 }
4916
4917 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4918 way, because it could contain a zero byte in the middle.
4919 TREE_STRING_LENGTH is the size of the character array, not the string.
4920
4921 Unfortunately, string_constant can't access the values of const char
4922 arrays with initializers, so neither can we do so here. */
4923
4924 static tree
4925 c_strlen (src)
4926 tree src;
4927 {
4928 tree offset_node;
4929 int offset, max;
4930 char *ptr;
4931
4932 src = string_constant (src, &offset_node);
4933 if (src == 0)
4934 return 0;
4935 max = TREE_STRING_LENGTH (src);
4936 ptr = TREE_STRING_POINTER (src);
4937 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4938 {
4939 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4940 compute the offset to the following null if we don't know where to
4941 start searching for it. */
4942 int i;
4943 for (i = 0; i < max; i++)
4944 if (ptr[i] == 0)
4945 return 0;
4946 /* We don't know the starting offset, but we do know that the string
4947 has no internal zero bytes. We can assume that the offset falls
4948 within the bounds of the string; otherwise, the programmer deserves
4949 what he gets. Subtract the offset from the length of the string,
4950 and return that. */
4951 /* This would perhaps not be valid if we were dealing with named
4952 arrays in addition to literal string constants. */
4953 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4954 }
4955
4956 /* We have a known offset into the string. Start searching there for
4957 a null character. */
4958 if (offset_node == 0)
4959 offset = 0;
4960 else
4961 {
4962 /* Did we get a long long offset? If so, punt. */
4963 if (TREE_INT_CST_HIGH (offset_node) != 0)
4964 return 0;
4965 offset = TREE_INT_CST_LOW (offset_node);
4966 }
4967 /* If the offset is known to be out of bounds, warn, and call strlen at
4968 runtime. */
4969 if (offset < 0 || offset > max)
4970 {
4971 warning ("offset outside bounds of constant string");
4972 return 0;
4973 }
4974 /* Use strlen to search for the first zero byte. Since any strings
4975 constructed with build_string will have nulls appended, we win even
4976 if we get handed something like (char[4])"abcd".
4977
4978 Since OFFSET is our starting index into the string, no further
4979 calculation is needed. */
4980 return size_int (strlen (ptr + offset));
4981 }
4982 \f
4983 /* Expand an expression EXP that calls a built-in function,
4984 with result going to TARGET if that's convenient
4985 (and in mode MODE if that's convenient).
4986 SUBTARGET may be used as the target for computing one of EXP's operands.
4987 IGNORE is nonzero if the value is to be ignored. */
4988
4989 static rtx
4990 expand_builtin (exp, target, subtarget, mode, ignore)
4991 tree exp;
4992 rtx target;
4993 rtx subtarget;
4994 enum machine_mode mode;
4995 int ignore;
4996 {
4997 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4998 tree arglist = TREE_OPERAND (exp, 1);
4999 rtx op0;
5000 rtx lab1, insns;
5001 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5002
5003 switch (DECL_FUNCTION_CODE (fndecl))
5004 {
5005 case BUILT_IN_ABS:
5006 case BUILT_IN_LABS:
5007 case BUILT_IN_FABS:
5008 /* build_function_call changes these into ABS_EXPR. */
5009 abort ();
5010
5011 case BUILT_IN_FSQRT:
5012 /* If not optimizing, call the library function. */
5013 if (! optimize)
5014 break;
5015
5016 if (arglist == 0
5017 /* Arg could be wrong type if user redeclared this fcn wrong. */
5018 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5019 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5020
5021 /* Stabilize and compute the argument. */
5022 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5023 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5024 {
5025 exp = copy_node (exp);
5026 arglist = copy_node (arglist);
5027 TREE_OPERAND (exp, 1) = arglist;
5028 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5029 }
5030 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5031
5032 /* Make a suitable register to place result in. */
5033 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5034
5035 emit_queue ();
5036 start_sequence ();
5037
5038 /* Compute sqrt into TARGET.
5039 Set TARGET to wherever the result comes back. */
5040 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5041 sqrt_optab, op0, target, 0);
5042
5043 /* If we were unable to expand via the builtin, stop the
5044 sequence (without outputting the insns) and break, causing
5045 a call the the library function. */
5046 if (target == 0)
5047 {
5048 end_sequence ();
5049 break;
5050 }
5051
5052 /* Check the results by default. But if flag_fast_math is turned on,
5053 then assume sqrt will always be called with valid arguments. */
5054
5055 if (! flag_fast_math)
5056 {
5057 /* Don't define the sqrt instructions
5058 if your machine is not IEEE. */
5059 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5060 abort ();
5061
5062 lab1 = gen_label_rtx ();
5063
5064 /* Test the result; if it is NaN, set errno=EDOM because
5065 the argument was not in the domain. */
5066 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5067 emit_jump_insn (gen_beq (lab1));
5068
5069 #if TARGET_EDOM
5070 {
5071 #ifdef GEN_ERRNO_RTX
5072 rtx errno_rtx = GEN_ERRNO_RTX;
5073 #else
5074 rtx errno_rtx
5075 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5076 #endif
5077
5078 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5079 }
5080 #else
5081 /* We can't set errno=EDOM directly; let the library call do it.
5082 Pop the arguments right away in case the call gets deleted. */
5083 NO_DEFER_POP;
5084 expand_call (exp, target, 0);
5085 OK_DEFER_POP;
5086 #endif
5087
5088 emit_label (lab1);
5089 }
5090
5091 /* Output the entire sequence. */
5092 insns = get_insns ();
5093 end_sequence ();
5094 emit_insns (insns);
5095
5096 return target;
5097
5098 case BUILT_IN_SAVEREGS:
5099 /* Don't do __builtin_saveregs more than once in a function.
5100 Save the result of the first call and reuse it. */
5101 if (saveregs_value != 0)
5102 return saveregs_value;
5103 {
5104 /* When this function is called, it means that registers must be
5105 saved on entry to this function. So we migrate the
5106 call to the first insn of this function. */
5107 rtx temp;
5108 rtx seq;
5109 rtx valreg, saved_valreg;
5110
5111 /* Now really call the function. `expand_call' does not call
5112 expand_builtin, so there is no danger of infinite recursion here. */
5113 start_sequence ();
5114
5115 #ifdef EXPAND_BUILTIN_SAVEREGS
5116 /* Do whatever the machine needs done in this case. */
5117 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5118 #else
5119 /* The register where the function returns its value
5120 is likely to have something else in it, such as an argument.
5121 So preserve that register around the call. */
5122 if (value_mode != VOIDmode)
5123 {
5124 valreg = hard_libcall_value (value_mode);
5125 saved_valreg = gen_reg_rtx (value_mode);
5126 emit_move_insn (saved_valreg, valreg);
5127 }
5128
5129 /* Generate the call, putting the value in a pseudo. */
5130 temp = expand_call (exp, target, ignore);
5131
5132 if (value_mode != VOIDmode)
5133 emit_move_insn (valreg, saved_valreg);
5134 #endif
5135
5136 seq = get_insns ();
5137 end_sequence ();
5138
5139 saveregs_value = temp;
5140
5141 /* This won't work inside a SEQUENCE--it really has to be
5142 at the start of the function. */
5143 if (in_sequence_p ())
5144 {
5145 /* Better to do this than to crash. */
5146 error ("`va_start' used within `({...})'");
5147 return temp;
5148 }
5149
5150 /* Put the sequence after the NOTE that starts the function. */
5151 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5152 return temp;
5153 }
5154
5155 /* __builtin_args_info (N) returns word N of the arg space info
5156 for the current function. The number and meanings of words
5157 is controlled by the definition of CUMULATIVE_ARGS. */
5158 case BUILT_IN_ARGS_INFO:
5159 {
5160 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5161 int i;
5162 int *word_ptr = (int *) &current_function_args_info;
5163 tree type, elts, result;
5164
5165 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5166 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5167 __FILE__, __LINE__);
5168
5169 if (arglist != 0)
5170 {
5171 tree arg = TREE_VALUE (arglist);
5172 if (TREE_CODE (arg) != INTEGER_CST)
5173 error ("argument of __builtin_args_info must be constant");
5174 else
5175 {
5176 int wordnum = TREE_INT_CST_LOW (arg);
5177
5178 if (wordnum < 0 || wordnum >= nwords)
5179 error ("argument of __builtin_args_info out of range");
5180 else
5181 return GEN_INT (word_ptr[wordnum]);
5182 }
5183 }
5184 else
5185 error ("missing argument in __builtin_args_info");
5186
5187 return const0_rtx;
5188
5189 #if 0
5190 for (i = 0; i < nwords; i++)
5191 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5192
5193 type = build_array_type (integer_type_node,
5194 build_index_type (build_int_2 (nwords, 0)));
5195 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5196 TREE_CONSTANT (result) = 1;
5197 TREE_STATIC (result) = 1;
5198 result = build (INDIRECT_REF, build_pointer_type (type), result);
5199 TREE_CONSTANT (result) = 1;
5200 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5201 #endif
5202 }
5203
5204 /* Return the address of the first anonymous stack arg. */
5205 case BUILT_IN_NEXT_ARG:
5206 {
5207 tree fntype = TREE_TYPE (current_function_decl);
5208 if (!(TYPE_ARG_TYPES (fntype) != 0
5209 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5210 != void_type_node)))
5211 {
5212 error ("`va_start' used in function with fixed args");
5213 return const0_rtx;
5214 }
5215 }
5216
5217 return expand_binop (Pmode, add_optab,
5218 current_function_internal_arg_pointer,
5219 current_function_arg_offset_rtx,
5220 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5221
5222 case BUILT_IN_CLASSIFY_TYPE:
5223 if (arglist != 0)
5224 {
5225 tree type = TREE_TYPE (TREE_VALUE (arglist));
5226 enum tree_code code = TREE_CODE (type);
5227 if (code == VOID_TYPE)
5228 return GEN_INT (void_type_class);
5229 if (code == INTEGER_TYPE)
5230 return GEN_INT (integer_type_class);
5231 if (code == CHAR_TYPE)
5232 return GEN_INT (char_type_class);
5233 if (code == ENUMERAL_TYPE)
5234 return GEN_INT (enumeral_type_class);
5235 if (code == BOOLEAN_TYPE)
5236 return GEN_INT (boolean_type_class);
5237 if (code == POINTER_TYPE)
5238 return GEN_INT (pointer_type_class);
5239 if (code == REFERENCE_TYPE)
5240 return GEN_INT (reference_type_class);
5241 if (code == OFFSET_TYPE)
5242 return GEN_INT (offset_type_class);
5243 if (code == REAL_TYPE)
5244 return GEN_INT (real_type_class);
5245 if (code == COMPLEX_TYPE)
5246 return GEN_INT (complex_type_class);
5247 if (code == FUNCTION_TYPE)
5248 return GEN_INT (function_type_class);
5249 if (code == METHOD_TYPE)
5250 return GEN_INT (method_type_class);
5251 if (code == RECORD_TYPE)
5252 return GEN_INT (record_type_class);
5253 if (code == UNION_TYPE)
5254 return GEN_INT (union_type_class);
5255 if (code == ARRAY_TYPE)
5256 return GEN_INT (array_type_class);
5257 if (code == STRING_TYPE)
5258 return GEN_INT (string_type_class);
5259 if (code == SET_TYPE)
5260 return GEN_INT (set_type_class);
5261 if (code == FILE_TYPE)
5262 return GEN_INT (file_type_class);
5263 if (code == LANG_TYPE)
5264 return GEN_INT (lang_type_class);
5265 }
5266 return GEN_INT (no_type_class);
5267
5268 case BUILT_IN_CONSTANT_P:
5269 if (arglist == 0)
5270 return const0_rtx;
5271 else
5272 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5273 ? const1_rtx : const0_rtx);
5274
5275 case BUILT_IN_FRAME_ADDRESS:
5276 /* The argument must be a nonnegative integer constant.
5277 It counts the number of frames to scan up the stack.
5278 The value is the address of that frame. */
5279 case BUILT_IN_RETURN_ADDRESS:
5280 /* The argument must be a nonnegative integer constant.
5281 It counts the number of frames to scan up the stack.
5282 The value is the return address saved in that frame. */
5283 if (arglist == 0)
5284 /* Warning about missing arg was already issued. */
5285 return const0_rtx;
5286 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5287 {
5288 error ("invalid arg to __builtin_return_address");
5289 return const0_rtx;
5290 }
5291 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5292 {
5293 error ("invalid arg to __builtin_return_address");
5294 return const0_rtx;
5295 }
5296 else
5297 {
5298 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5299 rtx tem = frame_pointer_rtx;
5300 int i;
5301
5302 /* Scan back COUNT frames to the specified frame. */
5303 for (i = 0; i < count; i++)
5304 {
5305 /* Assume the dynamic chain pointer is in the word that
5306 the frame address points to, unless otherwise specified. */
5307 #ifdef DYNAMIC_CHAIN_ADDRESS
5308 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5309 #endif
5310 tem = memory_address (Pmode, tem);
5311 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5312 }
5313
5314 /* For __builtin_frame_address, return what we've got. */
5315 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5316 return tem;
5317
5318 /* For __builtin_return_address,
5319 Get the return address from that frame. */
5320 #ifdef RETURN_ADDR_RTX
5321 return RETURN_ADDR_RTX (count, tem);
5322 #else
5323 tem = memory_address (Pmode,
5324 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5325 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5326 #endif
5327 }
5328
5329 case BUILT_IN_ALLOCA:
5330 if (arglist == 0
5331 /* Arg could be non-integer if user redeclared this fcn wrong. */
5332 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5333 return const0_rtx;
5334 current_function_calls_alloca = 1;
5335 /* Compute the argument. */
5336 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5337
5338 /* Allocate the desired space. */
5339 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5340
5341 /* Record the new stack level for nonlocal gotos. */
5342 if (nonlocal_goto_handler_slot != 0)
5343 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5344 return target;
5345
5346 case BUILT_IN_FFS:
5347 /* If not optimizing, call the library function. */
5348 if (!optimize)
5349 break;
5350
5351 if (arglist == 0
5352 /* Arg could be non-integer if user redeclared this fcn wrong. */
5353 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5354 return const0_rtx;
5355
5356 /* Compute the argument. */
5357 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5358 /* Compute ffs, into TARGET if possible.
5359 Set TARGET to wherever the result comes back. */
5360 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5361 ffs_optab, op0, target, 1);
5362 if (target == 0)
5363 abort ();
5364 return target;
5365
5366 case BUILT_IN_STRLEN:
5367 /* If not optimizing, call the library function. */
5368 if (!optimize)
5369 break;
5370
5371 if (arglist == 0
5372 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5373 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5374 return const0_rtx;
5375 else
5376 {
5377 tree src = TREE_VALUE (arglist);
5378 tree len = c_strlen (src);
5379
5380 int align
5381 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5382
5383 rtx result, src_rtx, char_rtx;
5384 enum machine_mode insn_mode = value_mode, char_mode;
5385 enum insn_code icode;
5386
5387 /* If the length is known, just return it. */
5388 if (len != 0)
5389 return expand_expr (len, target, mode, 0);
5390
5391 /* If SRC is not a pointer type, don't do this operation inline. */
5392 if (align == 0)
5393 break;
5394
5395 /* Call a function if we can't compute strlen in the right mode. */
5396
5397 while (insn_mode != VOIDmode)
5398 {
5399 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5400 if (icode != CODE_FOR_nothing)
5401 break;
5402
5403 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5404 }
5405 if (insn_mode == VOIDmode)
5406 break;
5407
5408 /* Make a place to write the result of the instruction. */
5409 result = target;
5410 if (! (result != 0
5411 && GET_CODE (result) == REG
5412 && GET_MODE (result) == insn_mode
5413 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5414 result = gen_reg_rtx (insn_mode);
5415
5416 /* Make sure the operands are acceptable to the predicates. */
5417
5418 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5419 result = gen_reg_rtx (insn_mode);
5420
5421 src_rtx = memory_address (BLKmode,
5422 expand_expr (src, NULL_RTX, Pmode,
5423 EXPAND_NORMAL));
5424 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5425 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5426
5427 char_rtx = const0_rtx;
5428 char_mode = insn_operand_mode[(int)icode][2];
5429 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5430 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5431
5432 emit_insn (GEN_FCN (icode) (result,
5433 gen_rtx (MEM, BLKmode, src_rtx),
5434 char_rtx, GEN_INT (align)));
5435
5436 /* Return the value in the proper mode for this function. */
5437 if (GET_MODE (result) == value_mode)
5438 return result;
5439 else if (target != 0)
5440 {
5441 convert_move (target, result, 0);
5442 return target;
5443 }
5444 else
5445 return convert_to_mode (value_mode, result, 0);
5446 }
5447
5448 case BUILT_IN_STRCPY:
5449 /* If not optimizing, call the library function. */
5450 if (!optimize)
5451 break;
5452
5453 if (arglist == 0
5454 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5455 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5456 || TREE_CHAIN (arglist) == 0
5457 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5458 return const0_rtx;
5459 else
5460 {
5461 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5462
5463 if (len == 0)
5464 break;
5465
5466 len = size_binop (PLUS_EXPR, len, integer_one_node);
5467
5468 chainon (arglist, build_tree_list (NULL_TREE, len));
5469 }
5470
5471 /* Drops in. */
5472 case BUILT_IN_MEMCPY:
5473 /* If not optimizing, call the library function. */
5474 if (!optimize)
5475 break;
5476
5477 if (arglist == 0
5478 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5479 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5480 || TREE_CHAIN (arglist) == 0
5481 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5482 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5483 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5484 return const0_rtx;
5485 else
5486 {
5487 tree dest = TREE_VALUE (arglist);
5488 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5489 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5490
5491 int src_align
5492 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5493 int dest_align
5494 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5495 rtx dest_rtx;
5496
5497 /* If either SRC or DEST is not a pointer type, don't do
5498 this operation in-line. */
5499 if (src_align == 0 || dest_align == 0)
5500 {
5501 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5502 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5503 break;
5504 }
5505
5506 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5507
5508 /* Copy word part most expediently. */
5509 emit_block_move (gen_rtx (MEM, BLKmode,
5510 memory_address (BLKmode, dest_rtx)),
5511 gen_rtx (MEM, BLKmode,
5512 memory_address (BLKmode,
5513 expand_expr (src, NULL_RTX,
5514 Pmode,
5515 EXPAND_NORMAL))),
5516 expand_expr (len, NULL_RTX, VOIDmode, 0),
5517 MIN (src_align, dest_align));
5518 return dest_rtx;
5519 }
5520
5521 /* These comparison functions need an instruction that returns an actual
5522 index. An ordinary compare that just sets the condition codes
5523 is not enough. */
5524 #ifdef HAVE_cmpstrsi
5525 case BUILT_IN_STRCMP:
5526 /* If not optimizing, call the library function. */
5527 if (!optimize)
5528 break;
5529
5530 if (arglist == 0
5531 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5532 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5533 || TREE_CHAIN (arglist) == 0
5534 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5535 return const0_rtx;
5536 else if (!HAVE_cmpstrsi)
5537 break;
5538 {
5539 tree arg1 = TREE_VALUE (arglist);
5540 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5541 tree offset;
5542 tree len, len2;
5543
5544 len = c_strlen (arg1);
5545 if (len)
5546 len = size_binop (PLUS_EXPR, integer_one_node, len);
5547 len2 = c_strlen (arg2);
5548 if (len2)
5549 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5550
5551 /* If we don't have a constant length for the first, use the length
5552 of the second, if we know it. We don't require a constant for
5553 this case; some cost analysis could be done if both are available
5554 but neither is constant. For now, assume they're equally cheap.
5555
5556 If both strings have constant lengths, use the smaller. This
5557 could arise if optimization results in strcpy being called with
5558 two fixed strings, or if the code was machine-generated. We should
5559 add some code to the `memcmp' handler below to deal with such
5560 situations, someday. */
5561 if (!len || TREE_CODE (len) != INTEGER_CST)
5562 {
5563 if (len2)
5564 len = len2;
5565 else if (len == 0)
5566 break;
5567 }
5568 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5569 {
5570 if (tree_int_cst_lt (len2, len))
5571 len = len2;
5572 }
5573
5574 chainon (arglist, build_tree_list (NULL_TREE, len));
5575 }
5576
5577 /* Drops in. */
5578 case BUILT_IN_MEMCMP:
5579 /* If not optimizing, call the library function. */
5580 if (!optimize)
5581 break;
5582
5583 if (arglist == 0
5584 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5585 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5586 || TREE_CHAIN (arglist) == 0
5587 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5588 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5589 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5590 return const0_rtx;
5591 else if (!HAVE_cmpstrsi)
5592 break;
5593 {
5594 tree arg1 = TREE_VALUE (arglist);
5595 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5596 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5597 rtx result;
5598
5599 int arg1_align
5600 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5601 int arg2_align
5602 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5603 enum machine_mode insn_mode
5604 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5605
5606 /* If we don't have POINTER_TYPE, call the function. */
5607 if (arg1_align == 0 || arg2_align == 0)
5608 {
5609 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5610 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5611 break;
5612 }
5613
5614 /* Make a place to write the result of the instruction. */
5615 result = target;
5616 if (! (result != 0
5617 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5618 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5619 result = gen_reg_rtx (insn_mode);
5620
5621 emit_insn (gen_cmpstrsi (result,
5622 gen_rtx (MEM, BLKmode,
5623 expand_expr (arg1, NULL_RTX, Pmode,
5624 EXPAND_NORMAL)),
5625 gen_rtx (MEM, BLKmode,
5626 expand_expr (arg2, NULL_RTX, Pmode,
5627 EXPAND_NORMAL)),
5628 expand_expr (len, NULL_RTX, VOIDmode, 0),
5629 GEN_INT (MIN (arg1_align, arg2_align))));
5630
5631 /* Return the value in the proper mode for this function. */
5632 mode = TYPE_MODE (TREE_TYPE (exp));
5633 if (GET_MODE (result) == mode)
5634 return result;
5635 else if (target != 0)
5636 {
5637 convert_move (target, result, 0);
5638 return target;
5639 }
5640 else
5641 return convert_to_mode (mode, result, 0);
5642 }
5643 #else
5644 case BUILT_IN_STRCMP:
5645 case BUILT_IN_MEMCMP:
5646 break;
5647 #endif
5648
5649 default: /* just do library call, if unknown builtin */
5650 error ("built-in function %s not currently supported",
5651 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5652 }
5653
5654 /* The switch statement above can drop through to cause the function
5655 to be called normally. */
5656
5657 return expand_call (exp, target, ignore);
5658 }
5659 \f
5660 /* Expand code for a post- or pre- increment or decrement
5661 and return the RTX for the result.
5662 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5663
5664 static rtx
5665 expand_increment (exp, post)
5666 register tree exp;
5667 int post;
5668 {
5669 register rtx op0, op1;
5670 register rtx temp, value;
5671 register tree incremented = TREE_OPERAND (exp, 0);
5672 optab this_optab = add_optab;
5673 int icode;
5674 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5675 int op0_is_copy = 0;
5676
5677 /* Stabilize any component ref that might need to be
5678 evaluated more than once below. */
5679 if (TREE_CODE (incremented) == BIT_FIELD_REF
5680 || (TREE_CODE (incremented) == COMPONENT_REF
5681 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5682 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5683 incremented = stabilize_reference (incremented);
5684
5685 /* Compute the operands as RTX.
5686 Note whether OP0 is the actual lvalue or a copy of it:
5687 I believe it is a copy iff it is a register or subreg
5688 and insns were generated in computing it. */
5689 temp = get_last_insn ();
5690 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
5691 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5692 && temp != get_last_insn ());
5693 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5694
5695 /* Decide whether incrementing or decrementing. */
5696 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5697 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5698 this_optab = sub_optab;
5699
5700 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5701 then we cannot just increment OP0. We must
5702 therefore contrive to increment the original value.
5703 Then we can return OP0 since it is a copy of the old value. */
5704 if (op0_is_copy)
5705 {
5706 /* This is the easiest way to increment the value wherever it is.
5707 Problems with multiple evaluation of INCREMENTED
5708 are prevented because either (1) it is a component_ref,
5709 in which case it was stabilized above, or (2) it is an array_ref
5710 with constant index in an array in a register, which is
5711 safe to reevaluate. */
5712 tree newexp = build ((this_optab == add_optab
5713 ? PLUS_EXPR : MINUS_EXPR),
5714 TREE_TYPE (exp),
5715 incremented,
5716 TREE_OPERAND (exp, 1));
5717 temp = expand_assignment (incremented, newexp, ! post, 0);
5718 return post ? op0 : temp;
5719 }
5720
5721 /* Convert decrement by a constant into a negative increment. */
5722 if (this_optab == sub_optab
5723 && GET_CODE (op1) == CONST_INT)
5724 {
5725 op1 = GEN_INT (- INTVAL (op1));
5726 this_optab = add_optab;
5727 }
5728
5729 if (post)
5730 {
5731 /* We have a true reference to the value in OP0.
5732 If there is an insn to add or subtract in this mode, queue it. */
5733
5734 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
5735 op0 = stabilize (op0);
5736 #endif
5737
5738 icode = (int) this_optab->handlers[(int) mode].insn_code;
5739 if (icode != (int) CODE_FOR_nothing
5740 /* Make sure that OP0 is valid for operands 0 and 1
5741 of the insn we want to queue. */
5742 && (*insn_operand_predicate[icode][0]) (op0, mode)
5743 && (*insn_operand_predicate[icode][1]) (op0, mode))
5744 {
5745 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5746 op1 = force_reg (mode, op1);
5747
5748 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5749 }
5750 }
5751
5752 /* Preincrement, or we can't increment with one simple insn. */
5753 if (post)
5754 /* Save a copy of the value before inc or dec, to return it later. */
5755 temp = value = copy_to_reg (op0);
5756 else
5757 /* Arrange to return the incremented value. */
5758 /* Copy the rtx because expand_binop will protect from the queue,
5759 and the results of that would be invalid for us to return
5760 if our caller does emit_queue before using our result. */
5761 temp = copy_rtx (value = op0);
5762
5763 /* Increment however we can. */
5764 op1 = expand_binop (mode, this_optab, value, op1, op0,
5765 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5766 /* Make sure the value is stored into OP0. */
5767 if (op1 != op0)
5768 emit_move_insn (op0, op1);
5769
5770 return temp;
5771 }
5772 \f
5773 /* Expand all function calls contained within EXP, innermost ones first.
5774 But don't look within expressions that have sequence points.
5775 For each CALL_EXPR, record the rtx for its value
5776 in the CALL_EXPR_RTL field. */
5777
5778 static void
5779 preexpand_calls (exp)
5780 tree exp;
5781 {
5782 register int nops, i;
5783 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5784
5785 if (! do_preexpand_calls)
5786 return;
5787
5788 /* Only expressions and references can contain calls. */
5789
5790 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5791 return;
5792
5793 switch (TREE_CODE (exp))
5794 {
5795 case CALL_EXPR:
5796 /* Do nothing if already expanded. */
5797 if (CALL_EXPR_RTL (exp) != 0)
5798 return;
5799
5800 /* Do nothing to built-in functions. */
5801 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5802 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5803 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5804 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
5805 return;
5806
5807 case COMPOUND_EXPR:
5808 case COND_EXPR:
5809 case TRUTH_ANDIF_EXPR:
5810 case TRUTH_ORIF_EXPR:
5811 /* If we find one of these, then we can be sure
5812 the adjust will be done for it (since it makes jumps).
5813 Do it now, so that if this is inside an argument
5814 of a function, we don't get the stack adjustment
5815 after some other args have already been pushed. */
5816 do_pending_stack_adjust ();
5817 return;
5818
5819 case BLOCK:
5820 case RTL_EXPR:
5821 case WITH_CLEANUP_EXPR:
5822 return;
5823
5824 case SAVE_EXPR:
5825 if (SAVE_EXPR_RTL (exp) != 0)
5826 return;
5827 }
5828
5829 nops = tree_code_length[(int) TREE_CODE (exp)];
5830 for (i = 0; i < nops; i++)
5831 if (TREE_OPERAND (exp, i) != 0)
5832 {
5833 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5834 if (type == 'e' || type == '<' || type == '1' || type == '2'
5835 || type == 'r')
5836 preexpand_calls (TREE_OPERAND (exp, i));
5837 }
5838 }
5839 \f
5840 /* At the start of a function, record that we have no previously-pushed
5841 arguments waiting to be popped. */
5842
5843 void
5844 init_pending_stack_adjust ()
5845 {
5846 pending_stack_adjust = 0;
5847 }
5848
5849 /* When exiting from function, if safe, clear out any pending stack adjust
5850 so the adjustment won't get done. */
5851
5852 void
5853 clear_pending_stack_adjust ()
5854 {
5855 #ifdef EXIT_IGNORE_STACK
5856 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5857 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
5858 && ! flag_inline_functions)
5859 pending_stack_adjust = 0;
5860 #endif
5861 }
5862
5863 /* Pop any previously-pushed arguments that have not been popped yet. */
5864
5865 void
5866 do_pending_stack_adjust ()
5867 {
5868 if (inhibit_defer_pop == 0)
5869 {
5870 if (pending_stack_adjust != 0)
5871 adjust_stack (GEN_INT (pending_stack_adjust));
5872 pending_stack_adjust = 0;
5873 }
5874 }
5875
5876 /* Expand all cleanups up to OLD_CLEANUPS.
5877 Needed here, and also for language-dependent calls. */
5878
5879 void
5880 expand_cleanups_to (old_cleanups)
5881 tree old_cleanups;
5882 {
5883 while (cleanups_this_call != old_cleanups)
5884 {
5885 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
5886 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5887 }
5888 }
5889 \f
5890 /* Expand conditional expressions. */
5891
5892 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5893 LABEL is an rtx of code CODE_LABEL, in this function and all the
5894 functions here. */
5895
5896 void
5897 jumpifnot (exp, label)
5898 tree exp;
5899 rtx label;
5900 {
5901 do_jump (exp, label, NULL_RTX);
5902 }
5903
5904 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5905
5906 void
5907 jumpif (exp, label)
5908 tree exp;
5909 rtx label;
5910 {
5911 do_jump (exp, NULL_RTX, label);
5912 }
5913
5914 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5915 the result is zero, or IF_TRUE_LABEL if the result is one.
5916 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5917 meaning fall through in that case.
5918
5919 do_jump always does any pending stack adjust except when it does not
5920 actually perform a jump. An example where there is no jump
5921 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5922
5923 This function is responsible for optimizing cases such as
5924 &&, || and comparison operators in EXP. */
5925
5926 void
5927 do_jump (exp, if_false_label, if_true_label)
5928 tree exp;
5929 rtx if_false_label, if_true_label;
5930 {
5931 register enum tree_code code = TREE_CODE (exp);
5932 /* Some cases need to create a label to jump to
5933 in order to properly fall through.
5934 These cases set DROP_THROUGH_LABEL nonzero. */
5935 rtx drop_through_label = 0;
5936 rtx temp;
5937 rtx comparison = 0;
5938 int i;
5939 tree type;
5940
5941 emit_queue ();
5942
5943 switch (code)
5944 {
5945 case ERROR_MARK:
5946 break;
5947
5948 case INTEGER_CST:
5949 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5950 if (temp)
5951 emit_jump (temp);
5952 break;
5953
5954 #if 0
5955 /* This is not true with #pragma weak */
5956 case ADDR_EXPR:
5957 /* The address of something can never be zero. */
5958 if (if_true_label)
5959 emit_jump (if_true_label);
5960 break;
5961 #endif
5962
5963 case NOP_EXPR:
5964 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5965 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5966 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5967 goto normal;
5968 case CONVERT_EXPR:
5969 /* If we are narrowing the operand, we have to do the compare in the
5970 narrower mode. */
5971 if ((TYPE_PRECISION (TREE_TYPE (exp))
5972 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5973 goto normal;
5974 case NON_LVALUE_EXPR:
5975 case REFERENCE_EXPR:
5976 case ABS_EXPR:
5977 case NEGATE_EXPR:
5978 case LROTATE_EXPR:
5979 case RROTATE_EXPR:
5980 /* These cannot change zero->non-zero or vice versa. */
5981 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5982 break;
5983
5984 #if 0
5985 /* This is never less insns than evaluating the PLUS_EXPR followed by
5986 a test and can be longer if the test is eliminated. */
5987 case PLUS_EXPR:
5988 /* Reduce to minus. */
5989 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5990 TREE_OPERAND (exp, 0),
5991 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5992 TREE_OPERAND (exp, 1))));
5993 /* Process as MINUS. */
5994 #endif
5995
5996 case MINUS_EXPR:
5997 /* Non-zero iff operands of minus differ. */
5998 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5999 TREE_OPERAND (exp, 0),
6000 TREE_OPERAND (exp, 1)),
6001 NE, NE);
6002 break;
6003
6004 case BIT_AND_EXPR:
6005 /* If we are AND'ing with a small constant, do this comparison in the
6006 smallest type that fits. If the machine doesn't have comparisons
6007 that small, it will be converted back to the wider comparison.
6008 This helps if we are testing the sign bit of a narrower object.
6009 combine can't do this for us because it can't know whether a
6010 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6011
6012 if (! SLOW_BYTE_ACCESS
6013 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6014 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6015 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6016 && (type = type_for_size (i + 1, 1)) != 0
6017 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6018 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6019 != CODE_FOR_nothing))
6020 {
6021 do_jump (convert (type, exp), if_false_label, if_true_label);
6022 break;
6023 }
6024 goto normal;
6025
6026 case TRUTH_NOT_EXPR:
6027 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6028 break;
6029
6030 case TRUTH_ANDIF_EXPR:
6031 if (if_false_label == 0)
6032 if_false_label = drop_through_label = gen_label_rtx ();
6033 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6034 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6035 break;
6036
6037 case TRUTH_ORIF_EXPR:
6038 if (if_true_label == 0)
6039 if_true_label = drop_through_label = gen_label_rtx ();
6040 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6041 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6042 break;
6043
6044 case COMPOUND_EXPR:
6045 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6046 free_temp_slots ();
6047 emit_queue ();
6048 do_pending_stack_adjust ();
6049 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6050 break;
6051
6052 case COMPONENT_REF:
6053 case BIT_FIELD_REF:
6054 case ARRAY_REF:
6055 {
6056 int bitsize, bitpos, unsignedp;
6057 enum machine_mode mode;
6058 tree type;
6059 tree offset;
6060 int volatilep = 0;
6061
6062 /* Get description of this reference. We don't actually care
6063 about the underlying object here. */
6064 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6065 &mode, &unsignedp, &volatilep);
6066
6067 type = type_for_size (bitsize, unsignedp);
6068 if (! SLOW_BYTE_ACCESS
6069 && type != 0 && bitsize >= 0
6070 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6071 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6072 != CODE_FOR_nothing))
6073 {
6074 do_jump (convert (type, exp), if_false_label, if_true_label);
6075 break;
6076 }
6077 goto normal;
6078 }
6079
6080 case COND_EXPR:
6081 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6082 if (integer_onep (TREE_OPERAND (exp, 1))
6083 && integer_zerop (TREE_OPERAND (exp, 2)))
6084 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6085
6086 else if (integer_zerop (TREE_OPERAND (exp, 1))
6087 && integer_onep (TREE_OPERAND (exp, 2)))
6088 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6089
6090 else
6091 {
6092 register rtx label1 = gen_label_rtx ();
6093 drop_through_label = gen_label_rtx ();
6094 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6095 /* Now the THEN-expression. */
6096 do_jump (TREE_OPERAND (exp, 1),
6097 if_false_label ? if_false_label : drop_through_label,
6098 if_true_label ? if_true_label : drop_through_label);
6099 /* In case the do_jump just above never jumps. */
6100 do_pending_stack_adjust ();
6101 emit_label (label1);
6102 /* Now the ELSE-expression. */
6103 do_jump (TREE_OPERAND (exp, 2),
6104 if_false_label ? if_false_label : drop_through_label,
6105 if_true_label ? if_true_label : drop_through_label);
6106 }
6107 break;
6108
6109 case EQ_EXPR:
6110 if (integer_zerop (TREE_OPERAND (exp, 1)))
6111 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6112 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6113 == MODE_INT)
6114 &&
6115 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6116 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6117 else
6118 comparison = compare (exp, EQ, EQ);
6119 break;
6120
6121 case NE_EXPR:
6122 if (integer_zerop (TREE_OPERAND (exp, 1)))
6123 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6124 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6125 == MODE_INT)
6126 &&
6127 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6128 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6129 else
6130 comparison = compare (exp, NE, NE);
6131 break;
6132
6133 case LT_EXPR:
6134 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6135 == MODE_INT)
6136 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6137 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6138 else
6139 comparison = compare (exp, LT, LTU);
6140 break;
6141
6142 case LE_EXPR:
6143 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6144 == MODE_INT)
6145 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6146 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6147 else
6148 comparison = compare (exp, LE, LEU);
6149 break;
6150
6151 case GT_EXPR:
6152 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6153 == MODE_INT)
6154 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6155 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6156 else
6157 comparison = compare (exp, GT, GTU);
6158 break;
6159
6160 case GE_EXPR:
6161 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6162 == MODE_INT)
6163 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6164 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6165 else
6166 comparison = compare (exp, GE, GEU);
6167 break;
6168
6169 default:
6170 normal:
6171 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6172 #if 0
6173 /* This is not needed any more and causes poor code since it causes
6174 comparisons and tests from non-SI objects to have different code
6175 sequences. */
6176 /* Copy to register to avoid generating bad insns by cse
6177 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6178 if (!cse_not_expected && GET_CODE (temp) == MEM)
6179 temp = copy_to_reg (temp);
6180 #endif
6181 do_pending_stack_adjust ();
6182 if (GET_CODE (temp) == CONST_INT)
6183 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6184 else if (GET_CODE (temp) == LABEL_REF)
6185 comparison = const_true_rtx;
6186 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6187 && !can_compare_p (GET_MODE (temp)))
6188 /* Note swapping the labels gives us not-equal. */
6189 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6190 else if (GET_MODE (temp) != VOIDmode)
6191 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6192 NE, 1, GET_MODE (temp), NULL_RTX, 0);
6193 else
6194 abort ();
6195 }
6196
6197 /* Do any postincrements in the expression that was tested. */
6198 emit_queue ();
6199
6200 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6201 straight into a conditional jump instruction as the jump condition.
6202 Otherwise, all the work has been done already. */
6203
6204 if (comparison == const_true_rtx)
6205 {
6206 if (if_true_label)
6207 emit_jump (if_true_label);
6208 }
6209 else if (comparison == const0_rtx)
6210 {
6211 if (if_false_label)
6212 emit_jump (if_false_label);
6213 }
6214 else if (comparison)
6215 do_jump_for_compare (comparison, if_false_label, if_true_label);
6216
6217 free_temp_slots ();
6218
6219 if (drop_through_label)
6220 {
6221 /* If do_jump produces code that might be jumped around,
6222 do any stack adjusts from that code, before the place
6223 where control merges in. */
6224 do_pending_stack_adjust ();
6225 emit_label (drop_through_label);
6226 }
6227 }
6228 \f
6229 /* Given a comparison expression EXP for values too wide to be compared
6230 with one insn, test the comparison and jump to the appropriate label.
6231 The code of EXP is ignored; we always test GT if SWAP is 0,
6232 and LT if SWAP is 1. */
6233
6234 static void
6235 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6236 tree exp;
6237 int swap;
6238 rtx if_false_label, if_true_label;
6239 {
6240 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6241 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6242 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6243 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6244 rtx drop_through_label = 0;
6245 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6246 int i;
6247
6248 if (! if_true_label || ! if_false_label)
6249 drop_through_label = gen_label_rtx ();
6250 if (! if_true_label)
6251 if_true_label = drop_through_label;
6252 if (! if_false_label)
6253 if_false_label = drop_through_label;
6254
6255 /* Compare a word at a time, high order first. */
6256 for (i = 0; i < nwords; i++)
6257 {
6258 rtx comp;
6259 rtx op0_word, op1_word;
6260
6261 if (WORDS_BIG_ENDIAN)
6262 {
6263 op0_word = operand_subword_force (op0, i, mode);
6264 op1_word = operand_subword_force (op1, i, mode);
6265 }
6266 else
6267 {
6268 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6269 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6270 }
6271
6272 /* All but high-order word must be compared as unsigned. */
6273 comp = compare_from_rtx (op0_word, op1_word,
6274 (unsignedp || i > 0) ? GTU : GT,
6275 unsignedp, word_mode, NULL_RTX, 0);
6276 if (comp == const_true_rtx)
6277 emit_jump (if_true_label);
6278 else if (comp != const0_rtx)
6279 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6280
6281 /* Consider lower words only if these are equal. */
6282 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6283 NULL_RTX, 0);
6284 if (comp == const_true_rtx)
6285 emit_jump (if_false_label);
6286 else if (comp != const0_rtx)
6287 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6288 }
6289
6290 if (if_false_label)
6291 emit_jump (if_false_label);
6292 if (drop_through_label)
6293 emit_label (drop_through_label);
6294 }
6295
6296 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6297 with one insn, test the comparison and jump to the appropriate label. */
6298
6299 static void
6300 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6301 tree exp;
6302 rtx if_false_label, if_true_label;
6303 {
6304 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6305 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6306 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6307 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6308 int i;
6309 rtx drop_through_label = 0;
6310
6311 if (! if_false_label)
6312 drop_through_label = if_false_label = gen_label_rtx ();
6313
6314 for (i = 0; i < nwords; i++)
6315 {
6316 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6317 operand_subword_force (op1, i, mode),
6318 EQ, 0, word_mode, NULL_RTX, 0);
6319 if (comp == const_true_rtx)
6320 emit_jump (if_false_label);
6321 else if (comp != const0_rtx)
6322 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6323 }
6324
6325 if (if_true_label)
6326 emit_jump (if_true_label);
6327 if (drop_through_label)
6328 emit_label (drop_through_label);
6329 }
6330 \f
6331 /* Jump according to whether OP0 is 0.
6332 We assume that OP0 has an integer mode that is too wide
6333 for the available compare insns. */
6334
6335 static void
6336 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6337 rtx op0;
6338 rtx if_false_label, if_true_label;
6339 {
6340 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6341 int i;
6342 rtx drop_through_label = 0;
6343
6344 if (! if_false_label)
6345 drop_through_label = if_false_label = gen_label_rtx ();
6346
6347 for (i = 0; i < nwords; i++)
6348 {
6349 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6350 GET_MODE (op0)),
6351 const0_rtx, EQ, 0, word_mode, NULL_RTX, 0);
6352 if (comp == const_true_rtx)
6353 emit_jump (if_false_label);
6354 else if (comp != const0_rtx)
6355 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6356 }
6357
6358 if (if_true_label)
6359 emit_jump (if_true_label);
6360 if (drop_through_label)
6361 emit_label (drop_through_label);
6362 }
6363
6364 /* Given a comparison expression in rtl form, output conditional branches to
6365 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6366
6367 static void
6368 do_jump_for_compare (comparison, if_false_label, if_true_label)
6369 rtx comparison, if_false_label, if_true_label;
6370 {
6371 if (if_true_label)
6372 {
6373 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6374 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6375 else
6376 abort ();
6377
6378 if (if_false_label)
6379 emit_jump (if_false_label);
6380 }
6381 else if (if_false_label)
6382 {
6383 rtx insn;
6384 rtx prev = PREV_INSN (get_last_insn ());
6385 rtx branch = 0;
6386
6387 /* Output the branch with the opposite condition. Then try to invert
6388 what is generated. If more than one insn is a branch, or if the
6389 branch is not the last insn written, abort. If we can't invert
6390 the branch, emit make a true label, redirect this jump to that,
6391 emit a jump to the false label and define the true label. */
6392
6393 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6394 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6395 else
6396 abort ();
6397
6398 /* Here we get the insn before what was just emitted.
6399 On some machines, emitting the branch can discard
6400 the previous compare insn and emit a replacement. */
6401 if (prev == 0)
6402 /* If there's only one preceding insn... */
6403 insn = get_insns ();
6404 else
6405 insn = NEXT_INSN (prev);
6406
6407 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6408 if (GET_CODE (insn) == JUMP_INSN)
6409 {
6410 if (branch)
6411 abort ();
6412 branch = insn;
6413 }
6414
6415 if (branch != get_last_insn ())
6416 abort ();
6417
6418 if (! invert_jump (branch, if_false_label))
6419 {
6420 if_true_label = gen_label_rtx ();
6421 redirect_jump (branch, if_true_label);
6422 emit_jump (if_false_label);
6423 emit_label (if_true_label);
6424 }
6425 }
6426 }
6427 \f
6428 /* Generate code for a comparison expression EXP
6429 (including code to compute the values to be compared)
6430 and set (CC0) according to the result.
6431 SIGNED_CODE should be the rtx operation for this comparison for
6432 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6433
6434 We force a stack adjustment unless there are currently
6435 things pushed on the stack that aren't yet used. */
6436
6437 static rtx
6438 compare (exp, signed_code, unsigned_code)
6439 register tree exp;
6440 enum rtx_code signed_code, unsigned_code;
6441 {
6442 register rtx op0
6443 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6444 register rtx op1
6445 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6446 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6447 register enum machine_mode mode = TYPE_MODE (type);
6448 int unsignedp = TREE_UNSIGNED (type);
6449 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6450
6451 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6452 ((mode == BLKmode)
6453 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6454 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6455 }
6456
6457 /* Like compare but expects the values to compare as two rtx's.
6458 The decision as to signed or unsigned comparison must be made by the caller.
6459
6460 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6461 compared.
6462
6463 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6464 size of MODE should be used. */
6465
6466 rtx
6467 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6468 register rtx op0, op1;
6469 enum rtx_code code;
6470 int unsignedp;
6471 enum machine_mode mode;
6472 rtx size;
6473 int align;
6474 {
6475 /* If one operand is constant, make it the second one. */
6476
6477 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6478 {
6479 rtx tem = op0;
6480 op0 = op1;
6481 op1 = tem;
6482 code = swap_condition (code);
6483 }
6484
6485 if (flag_force_mem)
6486 {
6487 op0 = force_not_mem (op0);
6488 op1 = force_not_mem (op1);
6489 }
6490
6491 do_pending_stack_adjust ();
6492
6493 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6494 return simplify_relational_operation (code, mode, op0, op1);
6495
6496 /* If this is a signed equality comparison, we can do it as an
6497 unsigned comparison since zero-extension is cheaper than sign
6498 extension and comparisons with zero are done as unsigned. This is
6499 the case even on machines that can do fast sign extension, since
6500 zero-extension is easier to combinen with other operations than
6501 sign-extension is. If we are comparing against a constant, we must
6502 convert it to what it would look like unsigned. */
6503 if ((code == EQ || code == NE) && ! unsignedp
6504 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6505 {
6506 if (GET_CODE (op1) == CONST_INT
6507 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6508 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6509 unsignedp = 1;
6510 }
6511
6512 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6513
6514 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6515 }
6516 \f
6517 /* Generate code to calculate EXP using a store-flag instruction
6518 and return an rtx for the result. EXP is either a comparison
6519 or a TRUTH_NOT_EXPR whose operand is a comparison.
6520
6521 If TARGET is nonzero, store the result there if convenient.
6522
6523 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6524 cheap.
6525
6526 Return zero if there is no suitable set-flag instruction
6527 available on this machine.
6528
6529 Once expand_expr has been called on the arguments of the comparison,
6530 we are committed to doing the store flag, since it is not safe to
6531 re-evaluate the expression. We emit the store-flag insn by calling
6532 emit_store_flag, but only expand the arguments if we have a reason
6533 to believe that emit_store_flag will be successful. If we think that
6534 it will, but it isn't, we have to simulate the store-flag with a
6535 set/jump/set sequence. */
6536
6537 static rtx
6538 do_store_flag (exp, target, mode, only_cheap)
6539 tree exp;
6540 rtx target;
6541 enum machine_mode mode;
6542 int only_cheap;
6543 {
6544 enum rtx_code code;
6545 tree arg0, arg1, type;
6546 tree tem;
6547 enum machine_mode operand_mode;
6548 int invert = 0;
6549 int unsignedp;
6550 rtx op0, op1;
6551 enum insn_code icode;
6552 rtx subtarget = target;
6553 rtx result, label, pattern, jump_pat;
6554
6555 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6556 result at the end. We can't simply invert the test since it would
6557 have already been inverted if it were valid. This case occurs for
6558 some floating-point comparisons. */
6559
6560 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6561 invert = 1, exp = TREE_OPERAND (exp, 0);
6562
6563 arg0 = TREE_OPERAND (exp, 0);
6564 arg1 = TREE_OPERAND (exp, 1);
6565 type = TREE_TYPE (arg0);
6566 operand_mode = TYPE_MODE (type);
6567 unsignedp = TREE_UNSIGNED (type);
6568
6569 /* We won't bother with BLKmode store-flag operations because it would mean
6570 passing a lot of information to emit_store_flag. */
6571 if (operand_mode == BLKmode)
6572 return 0;
6573
6574 STRIP_NOPS (arg0);
6575 STRIP_NOPS (arg1);
6576
6577 /* Get the rtx comparison code to use. We know that EXP is a comparison
6578 operation of some type. Some comparisons against 1 and -1 can be
6579 converted to comparisons with zero. Do so here so that the tests
6580 below will be aware that we have a comparison with zero. These
6581 tests will not catch constants in the first operand, but constants
6582 are rarely passed as the first operand. */
6583
6584 switch (TREE_CODE (exp))
6585 {
6586 case EQ_EXPR:
6587 code = EQ;
6588 break;
6589 case NE_EXPR:
6590 code = NE;
6591 break;
6592 case LT_EXPR:
6593 if (integer_onep (arg1))
6594 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6595 else
6596 code = unsignedp ? LTU : LT;
6597 break;
6598 case LE_EXPR:
6599 if (integer_all_onesp (arg1))
6600 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6601 else
6602 code = unsignedp ? LEU : LE;
6603 break;
6604 case GT_EXPR:
6605 if (integer_all_onesp (arg1))
6606 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6607 else
6608 code = unsignedp ? GTU : GT;
6609 break;
6610 case GE_EXPR:
6611 if (integer_onep (arg1))
6612 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6613 else
6614 code = unsignedp ? GEU : GE;
6615 break;
6616 default:
6617 abort ();
6618 }
6619
6620 /* Put a constant second. */
6621 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6622 {
6623 tem = arg0; arg0 = arg1; arg1 = tem;
6624 code = swap_condition (code);
6625 }
6626
6627 /* If this is an equality or inequality test of a single bit, we can
6628 do this by shifting the bit being tested to the low-order bit and
6629 masking the result with the constant 1. If the condition was EQ,
6630 we xor it with 1. This does not require an scc insn and is faster
6631 than an scc insn even if we have it. */
6632
6633 if ((code == NE || code == EQ)
6634 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6635 && integer_pow2p (TREE_OPERAND (arg0, 1))
6636 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
6637 {
6638 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6639 NULL_RTX, VOIDmode, 0)));
6640
6641 if (subtarget == 0 || GET_CODE (subtarget) != REG
6642 || GET_MODE (subtarget) != operand_mode
6643 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6644 subtarget = 0;
6645
6646 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6647
6648 if (bitnum != 0)
6649 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6650 size_int (bitnum), target, 1);
6651
6652 if (GET_MODE (op0) != mode)
6653 op0 = convert_to_mode (mode, op0, 1);
6654
6655 if (bitnum != TYPE_PRECISION (type) - 1)
6656 op0 = expand_and (op0, const1_rtx, target);
6657
6658 if ((code == EQ && ! invert) || (code == NE && invert))
6659 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6660 OPTAB_LIB_WIDEN);
6661
6662 return op0;
6663 }
6664
6665 /* Now see if we are likely to be able to do this. Return if not. */
6666 if (! can_compare_p (operand_mode))
6667 return 0;
6668 icode = setcc_gen_code[(int) code];
6669 if (icode == CODE_FOR_nothing
6670 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6671 {
6672 /* We can only do this if it is one of the special cases that
6673 can be handled without an scc insn. */
6674 if ((code == LT && integer_zerop (arg1))
6675 || (! only_cheap && code == GE && integer_zerop (arg1)))
6676 ;
6677 else if (BRANCH_COST >= 0
6678 && ! only_cheap && (code == NE || code == EQ)
6679 && TREE_CODE (type) != REAL_TYPE
6680 && ((abs_optab->handlers[(int) operand_mode].insn_code
6681 != CODE_FOR_nothing)
6682 || (ffs_optab->handlers[(int) operand_mode].insn_code
6683 != CODE_FOR_nothing)))
6684 ;
6685 else
6686 return 0;
6687 }
6688
6689 preexpand_calls (exp);
6690 if (subtarget == 0 || GET_CODE (subtarget) != REG
6691 || GET_MODE (subtarget) != operand_mode
6692 || ! safe_from_p (subtarget, arg1))
6693 subtarget = 0;
6694
6695 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6696 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6697
6698 if (target == 0)
6699 target = gen_reg_rtx (mode);
6700
6701 result = emit_store_flag (target, code, op0, op1, operand_mode,
6702 unsignedp, 1);
6703
6704 if (result)
6705 {
6706 if (invert)
6707 result = expand_binop (mode, xor_optab, result, const1_rtx,
6708 result, 0, OPTAB_LIB_WIDEN);
6709 return result;
6710 }
6711
6712 /* If this failed, we have to do this with set/compare/jump/set code. */
6713 if (target == 0 || GET_CODE (target) != REG
6714 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6715 target = gen_reg_rtx (GET_MODE (target));
6716
6717 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
6718 result = compare_from_rtx (op0, op1, code, unsignedp,
6719 operand_mode, NULL_RTX, 0);
6720 if (GET_CODE (result) == CONST_INT)
6721 return (((result == const0_rtx && ! invert)
6722 || (result != const0_rtx && invert))
6723 ? const0_rtx : const1_rtx);
6724
6725 label = gen_label_rtx ();
6726 if (bcc_gen_fctn[(int) code] == 0)
6727 abort ();
6728
6729 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
6730 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
6731 emit_label (label);
6732
6733 return target;
6734 }
6735 \f
6736 /* Generate a tablejump instruction (used for switch statements). */
6737
6738 #ifdef HAVE_tablejump
6739
6740 /* INDEX is the value being switched on, with the lowest value
6741 in the table already subtracted.
6742 MODE is its expected mode (needed if INDEX is constant).
6743 RANGE is the length of the jump table.
6744 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6745
6746 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6747 index value is out of range. */
6748
6749 void
6750 do_tablejump (index, mode, range, table_label, default_label)
6751 rtx index, range, table_label, default_label;
6752 enum machine_mode mode;
6753 {
6754 register rtx temp, vector;
6755
6756 /* Do an unsigned comparison (in the proper mode) between the index
6757 expression and the value which represents the length of the range.
6758 Since we just finished subtracting the lower bound of the range
6759 from the index expression, this comparison allows us to simultaneously
6760 check that the original index expression value is both greater than
6761 or equal to the minimum value of the range and less than or equal to
6762 the maximum value of the range. */
6763
6764 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
6765 emit_jump_insn (gen_bltu (default_label));
6766
6767 /* If index is in range, it must fit in Pmode.
6768 Convert to Pmode so we can index with it. */
6769 if (mode != Pmode)
6770 index = convert_to_mode (Pmode, index, 1);
6771
6772 /* If flag_force_addr were to affect this address
6773 it could interfere with the tricky assumptions made
6774 about addresses that contain label-refs,
6775 which may be valid only very near the tablejump itself. */
6776 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6777 GET_MODE_SIZE, because this indicates how large insns are. The other
6778 uses should all be Pmode, because they are addresses. This code
6779 could fail if addresses and insns are not the same size. */
6780 index = memory_address_noforce
6781 (CASE_VECTOR_MODE,
6782 gen_rtx (PLUS, Pmode,
6783 gen_rtx (MULT, Pmode, index,
6784 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
6785 gen_rtx (LABEL_REF, Pmode, table_label)));
6786 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6787 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6788 RTX_UNCHANGING_P (vector) = 1;
6789 convert_move (temp, vector, 0);
6790
6791 emit_jump_insn (gen_tablejump (temp, table_label));
6792
6793 #ifndef CASE_VECTOR_PC_RELATIVE
6794 /* If we are generating PIC code or if the table is PC-relative, the
6795 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6796 if (! flag_pic)
6797 emit_barrier ();
6798 #endif
6799 }
6800
6801 #endif /* HAVE_tablejump */
This page took 0.341276 seconds and 6 git commands to generate.