]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
*** empty log message ***
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
105/* MOVE_RATIO is the number of move instructions that is better than
106 a block move. */
107
108#ifndef MOVE_RATIO
109#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
110#define MOVE_RATIO 2
111#else
112/* A value of around 6 would minimize code size; infinity would minimize
113 execution time. */
114#define MOVE_RATIO 15
115#endif
116#endif
e87b4f3f
RS
117
118/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
119
120#ifndef SLOW_UNALIGNED_ACCESS
121#define SLOW_UNALIGNED_ACCESS 0
122#endif
bbf6f052
RK
123\f
124/* This is run at the start of compiling a function. */
125
126void
127init_expr ()
128{
129 init_queue ();
130
131 pending_stack_adjust = 0;
132 inhibit_defer_pop = 0;
133 cleanups_this_call = 0;
134 saveregs_value = 0;
e87b4f3f 135 forced_labels = 0;
bbf6f052
RK
136}
137
138/* Save all variables describing the current status into the structure *P.
139 This is used before starting a nested function. */
140
141void
142save_expr_status (p)
143 struct function *p;
144{
145 /* Instead of saving the postincrement queue, empty it. */
146 emit_queue ();
147
148 p->pending_stack_adjust = pending_stack_adjust;
149 p->inhibit_defer_pop = inhibit_defer_pop;
150 p->cleanups_this_call = cleanups_this_call;
151 p->saveregs_value = saveregs_value;
e87b4f3f 152 p->forced_labels = forced_labels;
bbf6f052
RK
153
154 pending_stack_adjust = 0;
155 inhibit_defer_pop = 0;
156 cleanups_this_call = 0;
157 saveregs_value = 0;
e87b4f3f 158 forced_labels = 0;
bbf6f052
RK
159}
160
161/* Restore all variables describing the current status from the structure *P.
162 This is used after a nested function. */
163
164void
165restore_expr_status (p)
166 struct function *p;
167{
168 pending_stack_adjust = p->pending_stack_adjust;
169 inhibit_defer_pop = p->inhibit_defer_pop;
170 cleanups_this_call = p->cleanups_this_call;
171 saveregs_value = p->saveregs_value;
e87b4f3f 172 forced_labels = p->forced_labels;
bbf6f052
RK
173}
174\f
175/* Manage the queue of increment instructions to be output
176 for POSTINCREMENT_EXPR expressions, etc. */
177
178static rtx pending_chain;
179
180/* Queue up to increment (or change) VAR later. BODY says how:
181 BODY should be the same thing you would pass to emit_insn
182 to increment right away. It will go to emit_insn later on.
183
184 The value is a QUEUED expression to be used in place of VAR
185 where you want to guarantee the pre-incrementation value of VAR. */
186
187static rtx
188enqueue_insn (var, body)
189 rtx var, body;
190{
191 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
192 var, 0, 0, body, pending_chain);
193 return pending_chain;
194}
195
196/* Use protect_from_queue to convert a QUEUED expression
197 into something that you can put immediately into an instruction.
198 If the queued incrementation has not happened yet,
199 protect_from_queue returns the variable itself.
200 If the incrementation has happened, protect_from_queue returns a temp
201 that contains a copy of the old value of the variable.
202
203 Any time an rtx which might possibly be a QUEUED is to be put
204 into an instruction, it must be passed through protect_from_queue first.
205 QUEUED expressions are not meaningful in instructions.
206
207 Do not pass a value through protect_from_queue and then hold
208 on to it for a while before putting it in an instruction!
209 If the queue is flushed in between, incorrect code will result. */
210
211rtx
212protect_from_queue (x, modify)
213 register rtx x;
214 int modify;
215{
216 register RTX_CODE code = GET_CODE (x);
217
218#if 0 /* A QUEUED can hang around after the queue is forced out. */
219 /* Shortcut for most common case. */
220 if (pending_chain == 0)
221 return x;
222#endif
223
224 if (code != QUEUED)
225 {
226 /* A special hack for read access to (MEM (QUEUED ...))
227 to facilitate use of autoincrement.
228 Make a copy of the contents of the memory location
229 rather than a copy of the address, but not
230 if the value is of mode BLKmode. */
231 if (code == MEM && GET_MODE (x) != BLKmode
232 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
233 {
234 register rtx y = XEXP (x, 0);
235 XEXP (x, 0) = QUEUED_VAR (y);
236 if (QUEUED_INSN (y))
237 {
238 register rtx temp = gen_reg_rtx (GET_MODE (x));
239 emit_insn_before (gen_move_insn (temp, x),
240 QUEUED_INSN (y));
241 return temp;
242 }
243 return x;
244 }
245 /* Otherwise, recursively protect the subexpressions of all
246 the kinds of rtx's that can contain a QUEUED. */
247 if (code == MEM)
248 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
249 else if (code == PLUS || code == MULT)
250 {
251 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
252 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
253 }
254 return x;
255 }
256 /* If the increment has not happened, use the variable itself. */
257 if (QUEUED_INSN (x) == 0)
258 return QUEUED_VAR (x);
259 /* If the increment has happened and a pre-increment copy exists,
260 use that copy. */
261 if (QUEUED_COPY (x) != 0)
262 return QUEUED_COPY (x);
263 /* The increment has happened but we haven't set up a pre-increment copy.
264 Set one up now, and use it. */
265 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
266 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
267 QUEUED_INSN (x));
268 return QUEUED_COPY (x);
269}
270
271/* Return nonzero if X contains a QUEUED expression:
272 if it contains anything that will be altered by a queued increment.
273 We handle only combinations of MEM, PLUS, MINUS and MULT operators
274 since memory addresses generally contain only those. */
275
276static int
277queued_subexp_p (x)
278 rtx x;
279{
280 register enum rtx_code code = GET_CODE (x);
281 switch (code)
282 {
283 case QUEUED:
284 return 1;
285 case MEM:
286 return queued_subexp_p (XEXP (x, 0));
287 case MULT:
288 case PLUS:
289 case MINUS:
290 return queued_subexp_p (XEXP (x, 0))
291 || queued_subexp_p (XEXP (x, 1));
292 }
293 return 0;
294}
295
296/* Perform all the pending incrementations. */
297
298void
299emit_queue ()
300{
301 register rtx p;
302 while (p = pending_chain)
303 {
304 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
305 pending_chain = QUEUED_NEXT (p);
306 }
307}
308
309static void
310init_queue ()
311{
312 if (pending_chain)
313 abort ();
314}
315\f
316/* Copy data from FROM to TO, where the machine modes are not the same.
317 Both modes may be integer, or both may be floating.
318 UNSIGNEDP should be nonzero if FROM is an unsigned type.
319 This causes zero-extension instead of sign-extension. */
320
321void
322convert_move (to, from, unsignedp)
323 register rtx to, from;
324 int unsignedp;
325{
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
332
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
335
336 to = protect_from_queue (to, 1);
337 from = protect_from_queue (from, 0);
338
339 if (to_real != from_real)
340 abort ();
341
342 if (to_mode == from_mode
343 || (from_mode == VOIDmode && CONSTANT_P (from)))
344 {
345 emit_move_insn (to, from);
346 return;
347 }
348
349 if (to_real)
350 {
351#ifdef HAVE_extendsfdf2
352 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
353 {
354 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
355 return;
356 }
357#endif
358#ifdef HAVE_extendsftf2
359 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
360 {
361 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
362 return;
363 }
364#endif
365#ifdef HAVE_extenddftf2
366 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
367 {
368 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
369 return;
370 }
371#endif
372#ifdef HAVE_truncdfsf2
373 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
374 {
375 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
376 return;
377 }
378#endif
379#ifdef HAVE_trunctfsf2
380 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
381 {
382 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
383 return;
384 }
385#endif
386#ifdef HAVE_trunctfdf2
387 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
388 {
389 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
390 return;
391 }
392#endif
393
394 if (from_mode == SFmode && to_mode == DFmode)
395 libcall = extendsfdf2_libfunc;
396 else if (from_mode == DFmode && to_mode == SFmode)
397 libcall = truncdfsf2_libfunc;
398 else
399 /* This conversion is not implemented yet. There aren't any TFmode
400 library calls. */
401 abort ();
402
e87b4f3f 403 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
404 emit_move_insn (to, hard_libcall_value (to_mode));
405 return;
406 }
407
408 /* Now both modes are integers. */
409
410 /* Handle expanding beyond a word. */
411 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
412 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
413 {
414 rtx insns;
415 rtx lowpart;
416 rtx fill_value;
417 rtx lowfrom;
418 int i;
419 enum machine_mode lowpart_mode;
420 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
421
422 /* Try converting directly if the insn is supported. */
423 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
424 != CODE_FOR_nothing)
425 {
426 emit_unop_insn (code, to, from, equiv_code);
427 return;
428 }
429 /* Next, try converting via full word. */
430 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
431 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
432 != CODE_FOR_nothing))
433 {
434 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
435 emit_unop_insn (code, to,
436 gen_lowpart (word_mode, to), equiv_code);
437 return;
438 }
439
440 /* No special multiword conversion insn; do it by hand. */
441 start_sequence ();
442
443 /* Get a copy of FROM widened to a word, if necessary. */
444 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
445 lowpart_mode = word_mode;
446 else
447 lowpart_mode = from_mode;
448
449 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
450
451 lowpart = gen_lowpart (lowpart_mode, to);
452 emit_move_insn (lowpart, lowfrom);
453
454 /* Compute the value to put in each remaining word. */
455 if (unsignedp)
456 fill_value = const0_rtx;
457 else
458 {
459#ifdef HAVE_slt
460 if (HAVE_slt
461 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
462 && STORE_FLAG_VALUE == -1)
463 {
464 emit_cmp_insn (lowfrom, const0_rtx, NE, 0, lowpart_mode, 0, 0);
465 fill_value = gen_reg_rtx (word_mode);
466 emit_insn (gen_slt (fill_value));
467 }
468 else
469#endif
470 {
471 fill_value
472 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
473 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
474 0, 0);
475 fill_value = convert_to_mode (word_mode, fill_value, 1);
476 }
477 }
478
479 /* Fill the remaining words. */
480 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
481 {
482 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
483 rtx subword = operand_subword (to, index, 1, to_mode);
484
485 if (subword == 0)
486 abort ();
487
488 if (fill_value != subword)
489 emit_move_insn (subword, fill_value);
490 }
491
492 insns = get_insns ();
493 end_sequence ();
494
495 emit_no_conflict_block (insns, to, from, 0,
496 gen_rtx (equiv_code, to_mode, from));
497 return;
498 }
499
500 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
501 {
502 convert_move (to, gen_lowpart (word_mode, from), 0);
503 return;
504 }
505
506 /* Handle pointer conversion */ /* SPEE 900220 */
507 if (to_mode == PSImode)
508 {
509 if (from_mode != SImode)
510 from = convert_to_mode (SImode, from, unsignedp);
511
512#ifdef HAVE_truncsipsi
513 if (HAVE_truncsipsi)
514 {
515 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
516 return;
517 }
518#endif /* HAVE_truncsipsi */
519 abort ();
520 }
521
522 if (from_mode == PSImode)
523 {
524 if (to_mode != SImode)
525 {
526 from = convert_to_mode (SImode, from, unsignedp);
527 from_mode = SImode;
528 }
529 else
530 {
531#ifdef HAVE_extendpsisi
532 if (HAVE_extendpsisi)
533 {
534 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
535 return;
536 }
537#endif /* HAVE_extendpsisi */
538 abort ();
539 }
540 }
541
542 /* Now follow all the conversions between integers
543 no more than a word long. */
544
545 /* For truncation, usually we can just refer to FROM in a narrower mode. */
546 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
547 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
548 GET_MODE_BITSIZE (from_mode))
549 && ((GET_CODE (from) == MEM
550 && ! MEM_VOLATILE_P (from)
551 && ! mode_dependent_address_p (XEXP (from, 0)))
552 || GET_CODE (from) == REG
553 || GET_CODE (from) == SUBREG))
554 {
555 emit_move_insn (to, gen_lowpart (to_mode, from));
556 return;
557 }
558
559 /* For truncation, usually we can just refer to FROM in a narrower mode. */
560 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
561 {
562 /* Convert directly if that works. */
563 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
564 != CODE_FOR_nothing)
565 {
566 emit_unop_insn (code, to, from, equiv_code);
567 return;
568 }
569 else
570 {
571 enum machine_mode intermediate;
572
573 /* Search for a mode to convert via. */
574 for (intermediate = from_mode; intermediate != VOIDmode;
575 intermediate = GET_MODE_WIDER_MODE (intermediate))
576 if ((can_extend_p (to_mode, intermediate, unsignedp)
577 != CODE_FOR_nothing)
578 && (can_extend_p (intermediate, from_mode, unsignedp)
579 != CODE_FOR_nothing))
580 {
581 convert_move (to, convert_to_mode (intermediate, from,
582 unsignedp), unsignedp);
583 return;
584 }
585
586 /* No suitable intermediate mode. */
587 abort ();
588 }
589 }
590
591 /* Support special truncate insns for certain modes. */
592
593 if (from_mode == DImode && to_mode == SImode)
594 {
595#ifdef HAVE_truncdisi2
596 if (HAVE_truncdisi2)
597 {
598 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
599 return;
600 }
601#endif
602 convert_move (to, force_reg (from_mode, from), unsignedp);
603 return;
604 }
605
606 if (from_mode == DImode && to_mode == HImode)
607 {
608#ifdef HAVE_truncdihi2
609 if (HAVE_truncdihi2)
610 {
611 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
612 return;
613 }
614#endif
615 convert_move (to, force_reg (from_mode, from), unsignedp);
616 return;
617 }
618
619 if (from_mode == DImode && to_mode == QImode)
620 {
621#ifdef HAVE_truncdiqi2
622 if (HAVE_truncdiqi2)
623 {
624 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628 convert_move (to, force_reg (from_mode, from), unsignedp);
629 return;
630 }
631
632 if (from_mode == SImode && to_mode == HImode)
633 {
634#ifdef HAVE_truncsihi2
635 if (HAVE_truncsihi2)
636 {
637 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
638 return;
639 }
640#endif
641 convert_move (to, force_reg (from_mode, from), unsignedp);
642 return;
643 }
644
645 if (from_mode == SImode && to_mode == QImode)
646 {
647#ifdef HAVE_truncsiqi2
648 if (HAVE_truncsiqi2)
649 {
650 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654 convert_move (to, force_reg (from_mode, from), unsignedp);
655 return;
656 }
657
658 if (from_mode == HImode && to_mode == QImode)
659 {
660#ifdef HAVE_trunchiqi2
661 if (HAVE_trunchiqi2)
662 {
663 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
664 return;
665 }
666#endif
667 convert_move (to, force_reg (from_mode, from), unsignedp);
668 return;
669 }
670
671 /* Handle truncation of volatile memrefs, and so on;
672 the things that couldn't be truncated directly,
673 and for which there was no special instruction. */
674 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
675 {
676 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
677 emit_move_insn (to, temp);
678 return;
679 }
680
681 /* Mode combination is not recognized. */
682 abort ();
683}
684
685/* Return an rtx for a value that would result
686 from converting X to mode MODE.
687 Both X and MODE may be floating, or both integer.
688 UNSIGNEDP is nonzero if X is an unsigned value.
689 This can be done by referring to a part of X in place
690 or by copying to a new temporary with conversion. */
691
692rtx
693convert_to_mode (mode, x, unsignedp)
694 enum machine_mode mode;
695 rtx x;
696 int unsignedp;
697{
698 register rtx temp;
699
700 x = protect_from_queue (x, 0);
701
702 if (mode == GET_MODE (x))
703 return x;
704
705 /* There is one case that we must handle specially: If we are converting
706 a CONST_INT into a mode whose size is twice HOST_BITS_PER_INT and
707 we are to interpret the constant as unsigned, gen_lowpart will do
708 the wrong if the constant appears negative. What we want to do is
709 make the high-order word of the constant zero, not all ones. */
710
711 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
712 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_INT
713 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
714 return immed_double_const (INTVAL (x), 0, mode);
715
716 /* We can do this with a gen_lowpart if both desired and current modes
717 are integer, and this is either a constant integer, a register, or a
718 non-volatile MEM. Except for the constant case, we must be narrowing
719 the operand. */
720
721 if (GET_CODE (x) == CONST_INT
722 || (GET_MODE_CLASS (mode) == MODE_INT
723 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
724 && (GET_CODE (x) == CONST_DOUBLE
725 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
726 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
727 || GET_CODE (x) == REG)))))
728 return gen_lowpart (mode, x);
729
730 temp = gen_reg_rtx (mode);
731 convert_move (temp, x, unsignedp);
732 return temp;
733}
734\f
735/* Generate several move instructions to copy LEN bytes
736 from block FROM to block TO. (These are MEM rtx's with BLKmode).
737 The caller must pass FROM and TO
738 through protect_from_queue before calling.
739 ALIGN (in bytes) is maximum alignment we can assume. */
740
741struct move_by_pieces
742{
743 rtx to;
744 rtx to_addr;
745 int autinc_to;
746 int explicit_inc_to;
747 rtx from;
748 rtx from_addr;
749 int autinc_from;
750 int explicit_inc_from;
751 int len;
752 int offset;
753 int reverse;
754};
755
756static void move_by_pieces_1 ();
757static int move_by_pieces_ninsns ();
758
759static void
760move_by_pieces (to, from, len, align)
761 rtx to, from;
762 int len, align;
763{
764 struct move_by_pieces data;
765 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 766 int max_size = MOVE_MAX + 1;
bbf6f052
RK
767
768 data.offset = 0;
769 data.to_addr = to_addr;
770 data.from_addr = from_addr;
771 data.to = to;
772 data.from = from;
773 data.autinc_to
774 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
775 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
776 data.autinc_from
777 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
778 || GET_CODE (from_addr) == POST_INC
779 || GET_CODE (from_addr) == POST_DEC);
780
781 data.explicit_inc_from = 0;
782 data.explicit_inc_to = 0;
783 data.reverse
784 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
785 if (data.reverse) data.offset = len;
786 data.len = len;
787
788 /* If copying requires more than two move insns,
789 copy addresses to registers (to make displacements shorter)
790 and use post-increment if available. */
791 if (!(data.autinc_from && data.autinc_to)
792 && move_by_pieces_ninsns (len, align) > 2)
793 {
794#ifdef HAVE_PRE_DECREMENT
795 if (data.reverse && ! data.autinc_from)
796 {
797 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
798 data.autinc_from = 1;
799 data.explicit_inc_from = -1;
800 }
801#endif
802#ifdef HAVE_POST_INCREMENT
803 if (! data.autinc_from)
804 {
805 data.from_addr = copy_addr_to_reg (from_addr);
806 data.autinc_from = 1;
807 data.explicit_inc_from = 1;
808 }
809#endif
810 if (!data.autinc_from && CONSTANT_P (from_addr))
811 data.from_addr = copy_addr_to_reg (from_addr);
812#ifdef HAVE_PRE_DECREMENT
813 if (data.reverse && ! data.autinc_to)
814 {
815 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
816 data.autinc_to = 1;
817 data.explicit_inc_to = -1;
818 }
819#endif
820#ifdef HAVE_POST_INCREMENT
821 if (! data.reverse && ! data.autinc_to)
822 {
823 data.to_addr = copy_addr_to_reg (to_addr);
824 data.autinc_to = 1;
825 data.explicit_inc_to = 1;
826 }
827#endif
828 if (!data.autinc_to && CONSTANT_P (to_addr))
829 data.to_addr = copy_addr_to_reg (to_addr);
830 }
831
e87b4f3f
RS
832 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
833 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 834 align = MOVE_MAX;
bbf6f052
RK
835
836 /* First move what we can in the largest integer mode, then go to
837 successively smaller modes. */
838
839 while (max_size > 1)
840 {
841 enum machine_mode mode = VOIDmode, tmode;
842 enum insn_code icode;
843
e7c33f54
RK
844 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
845 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
846 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
847 mode = tmode;
848
849 if (mode == VOIDmode)
850 break;
851
852 icode = mov_optab->handlers[(int) mode].insn_code;
853 if (icode != CODE_FOR_nothing
854 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
855 GET_MODE_SIZE (mode)))
856 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
857
858 max_size = GET_MODE_SIZE (mode);
859 }
860
861 /* The code above should have handled everything. */
862 if (data.len != 0)
863 abort ();
864}
865
866/* Return number of insns required to move L bytes by pieces.
867 ALIGN (in bytes) is maximum alignment we can assume. */
868
869static int
870move_by_pieces_ninsns (l, align)
871 unsigned int l;
872 int align;
873{
874 register int n_insns = 0;
e87b4f3f 875 int max_size = MOVE_MAX + 1;
bbf6f052 876
e87b4f3f
RS
877 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
878 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 879 align = MOVE_MAX;
bbf6f052
RK
880
881 while (max_size > 1)
882 {
883 enum machine_mode mode = VOIDmode, tmode;
884 enum insn_code icode;
885
e7c33f54
RK
886 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
887 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
888 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
889 mode = tmode;
890
891 if (mode == VOIDmode)
892 break;
893
894 icode = mov_optab->handlers[(int) mode].insn_code;
895 if (icode != CODE_FOR_nothing
896 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
897 GET_MODE_SIZE (mode)))
898 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
899
900 max_size = GET_MODE_SIZE (mode);
901 }
902
903 return n_insns;
904}
905
906/* Subroutine of move_by_pieces. Move as many bytes as appropriate
907 with move instructions for mode MODE. GENFUN is the gen_... function
908 to make a move insn for that mode. DATA has all the other info. */
909
910static void
911move_by_pieces_1 (genfun, mode, data)
912 rtx (*genfun) ();
913 enum machine_mode mode;
914 struct move_by_pieces *data;
915{
916 register int size = GET_MODE_SIZE (mode);
917 register rtx to1, from1;
918
919 while (data->len >= size)
920 {
921 if (data->reverse) data->offset -= size;
922
923 to1 = (data->autinc_to
924 ? gen_rtx (MEM, mode, data->to_addr)
925 : change_address (data->to, mode,
926 plus_constant (data->to_addr, data->offset)));
927 from1 =
928 (data->autinc_from
929 ? gen_rtx (MEM, mode, data->from_addr)
930 : change_address (data->from, mode,
931 plus_constant (data->from_addr, data->offset)));
932
933#ifdef HAVE_PRE_DECREMENT
934 if (data->explicit_inc_to < 0)
e87b4f3f
RS
935 emit_insn (gen_add2_insn (data->to_addr,
936 gen_rtx (CONST_INT, VOIDmode, -size)));
bbf6f052 937 if (data->explicit_inc_from < 0)
e87b4f3f
RS
938 emit_insn (gen_add2_insn (data->from_addr,
939 gen_rtx (CONST_INT, VOIDmode, -size)));
bbf6f052
RK
940#endif
941
942 emit_insn ((*genfun) (to1, from1));
943#ifdef HAVE_POST_INCREMENT
944 if (data->explicit_inc_to > 0)
945 emit_insn (gen_add2_insn (data->to_addr,
946 gen_rtx (CONST_INT, VOIDmode, size)));
947 if (data->explicit_inc_from > 0)
948 emit_insn (gen_add2_insn (data->from_addr,
949 gen_rtx (CONST_INT, VOIDmode, size)));
950#endif
951
952 if (! data->reverse) data->offset += size;
953
954 data->len -= size;
955 }
956}
957\f
958/* Emit code to move a block Y to a block X.
959 This may be done with string-move instructions,
960 with multiple scalar move instructions, or with a library call.
961
962 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
963 with mode BLKmode.
964 SIZE is an rtx that says how long they are.
965 ALIGN is the maximum alignment we can assume they have,
966 measured in bytes. */
967
968void
969emit_block_move (x, y, size, align)
970 rtx x, y;
971 rtx size;
972 int align;
973{
974 if (GET_MODE (x) != BLKmode)
975 abort ();
976
977 if (GET_MODE (y) != BLKmode)
978 abort ();
979
980 x = protect_from_queue (x, 1);
981 y = protect_from_queue (y, 0);
982
983 if (GET_CODE (x) != MEM)
984 abort ();
985 if (GET_CODE (y) != MEM)
986 abort ();
987 if (size == 0)
988 abort ();
989
990 if (GET_CODE (size) == CONST_INT
991 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
992 < MOVE_RATIO))
993 move_by_pieces (x, y, INTVAL (size), align);
994 else
995 {
996 /* Try the most limited insn first, because there's no point
997 including more than one in the machine description unless
998 the more limited one has some advantage. */
999#ifdef HAVE_movstrqi
1000 if (HAVE_movstrqi
1001 && GET_CODE (size) == CONST_INT
1002 && ((unsigned) INTVAL (size)
1003 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1004 {
1005 rtx insn = gen_movstrqi (x, y, size,
1006 gen_rtx (CONST_INT, VOIDmode, align));
1007 if (insn)
1008 {
1009 emit_insn (insn);
1010 return;
1011 }
1012 }
1013#endif
1014#ifdef HAVE_movstrhi
1015 if (HAVE_movstrhi
1016 && GET_CODE (size) == CONST_INT
1017 && ((unsigned) INTVAL (size)
1018 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1019 {
1020 rtx insn = gen_movstrhi (x, y, size,
1021 gen_rtx (CONST_INT, VOIDmode, align));
1022 if (insn)
1023 {
1024 emit_insn (insn);
1025 return;
1026 }
1027 }
1028#endif
1029#ifdef HAVE_movstrsi
1030 if (HAVE_movstrsi)
1031 {
1032 rtx insn = gen_movstrsi (x, y, size,
1033 gen_rtx (CONST_INT, VOIDmode, align));
1034 if (insn)
1035 {
1036 emit_insn (insn);
1037 return;
1038 }
1039 }
1040#endif
1041#ifdef HAVE_movstrdi
1042 if (HAVE_movstrdi)
1043 {
1044 rtx insn = gen_movstrdi (x, y, size,
1045 gen_rtx (CONST_INT, VOIDmode, align));
1046 if (insn)
1047 {
1048 emit_insn (insn);
1049 return;
1050 }
1051 }
1052#endif
1053
1054#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1055 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1056 VOIDmode, 3, XEXP (x, 0), Pmode,
1057 XEXP (y, 0), Pmode,
1058 size, Pmode);
1059#else
e87b4f3f 1060 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1061 VOIDmode, 3, XEXP (y, 0), Pmode,
1062 XEXP (x, 0), Pmode,
1063 size, Pmode);
1064#endif
1065 }
1066}
1067\f
1068/* Copy all or part of a value X into registers starting at REGNO.
1069 The number of registers to be filled is NREGS. */
1070
1071void
1072move_block_to_reg (regno, x, nregs, mode)
1073 int regno;
1074 rtx x;
1075 int nregs;
1076 enum machine_mode mode;
1077{
1078 int i;
1079 rtx pat, last;
1080
1081 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1082 x = validize_mem (force_const_mem (mode, x));
1083
1084 /* See if the machine can do this with a load multiple insn. */
1085#ifdef HAVE_load_multiple
1086 last = get_last_insn ();
1087 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1088 gen_rtx (CONST_INT, VOIDmode, nregs));
1089 if (pat)
1090 {
1091 emit_insn (pat);
1092 return;
1093 }
1094 else
1095 delete_insns_since (last);
1096#endif
1097
1098 for (i = 0; i < nregs; i++)
1099 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1100 operand_subword_force (x, i, mode));
1101}
1102
1103/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1104 The number of registers to be filled is NREGS. */
1105
1106void
1107move_block_from_reg (regno, x, nregs)
1108 int regno;
1109 rtx x;
1110 int nregs;
1111{
1112 int i;
1113 rtx pat, last;
1114
1115 /* See if the machine can do this with a store multiple insn. */
1116#ifdef HAVE_store_multiple
1117 last = get_last_insn ();
1118 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1119 gen_rtx (CONST_INT, VOIDmode, nregs));
1120 if (pat)
1121 {
1122 emit_insn (pat);
1123 return;
1124 }
1125 else
1126 delete_insns_since (last);
1127#endif
1128
1129 for (i = 0; i < nregs; i++)
1130 {
1131 rtx tem = operand_subword (x, i, 1, BLKmode);
1132
1133 if (tem == 0)
1134 abort ();
1135
1136 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1137 }
1138}
1139
1140/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1141
1142void
1143use_regs (regno, nregs)
1144 int regno;
1145 int nregs;
1146{
1147 int i;
1148
1149 for (i = 0; i < nregs; i++)
1150 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1151}
1152\f
1153/* Write zeros through the storage of OBJECT.
1154 If OBJECT has BLKmode, SIZE is its length in bytes. */
1155
1156void
1157clear_storage (object, size)
1158 rtx object;
1159 int size;
1160{
1161 if (GET_MODE (object) == BLKmode)
1162 {
1163#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1164 emit_library_call (memset_libfunc, 1,
bbf6f052
RK
1165 VOIDmode, 3,
1166 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1167 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1168#else
e87b4f3f 1169 emit_library_call (bzero_libfunc, 1,
bbf6f052
RK
1170 VOIDmode, 2,
1171 XEXP (object, 0), Pmode,
1172 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1173#endif
1174 }
1175 else
1176 emit_move_insn (object, const0_rtx);
1177}
1178
1179/* Generate code to copy Y into X.
1180 Both Y and X must have the same mode, except that
1181 Y can be a constant with VOIDmode.
1182 This mode cannot be BLKmode; use emit_block_move for that.
1183
1184 Return the last instruction emitted. */
1185
1186rtx
1187emit_move_insn (x, y)
1188 rtx x, y;
1189{
1190 enum machine_mode mode = GET_MODE (x);
1191 int i;
1192
1193 x = protect_from_queue (x, 1);
1194 y = protect_from_queue (y, 0);
1195
1196 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1197 abort ();
1198
1199 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1200 y = force_const_mem (mode, y);
1201
1202 /* If X or Y are memory references, verify that their addresses are valid
1203 for the machine. */
1204 if (GET_CODE (x) == MEM
1205 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1206 && ! push_operand (x, GET_MODE (x)))
1207 || (flag_force_addr
1208 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1209 x = change_address (x, VOIDmode, XEXP (x, 0));
1210
1211 if (GET_CODE (y) == MEM
1212 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1213 || (flag_force_addr
1214 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1215 y = change_address (y, VOIDmode, XEXP (y, 0));
1216
1217 if (mode == BLKmode)
1218 abort ();
1219
1220 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1221 return
1222 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1223
1224 /* This will handle any multi-word mode that lacks a move_insn pattern.
1225 However, you will get better code if you define such patterns,
1226 even if they must turn into multiple assembler instructions. */
1227 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1228 {
1229 rtx last_insn = 0;
1230
1231 for (i = 0;
1232 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1233 i++)
1234 {
1235 rtx xpart = operand_subword (x, i, 1, mode);
1236 rtx ypart = operand_subword (y, i, 1, mode);
1237
1238 /* If we can't get a part of Y, put Y into memory if it is a
1239 constant. Otherwise, force it into a register. If we still
1240 can't get a part of Y, abort. */
1241 if (ypart == 0 && CONSTANT_P (y))
1242 {
1243 y = force_const_mem (mode, y);
1244 ypart = operand_subword (y, i, 1, mode);
1245 }
1246 else if (ypart == 0)
1247 ypart = operand_subword_force (y, i, mode);
1248
1249 if (xpart == 0 || ypart == 0)
1250 abort ();
1251
1252 last_insn = emit_move_insn (xpart, ypart);
1253 }
1254 return last_insn;
1255 }
1256 else
1257 abort ();
1258}
1259\f
1260/* Pushing data onto the stack. */
1261
1262/* Push a block of length SIZE (perhaps variable)
1263 and return an rtx to address the beginning of the block.
1264 Note that it is not possible for the value returned to be a QUEUED.
1265 The value may be virtual_outgoing_args_rtx.
1266
1267 EXTRA is the number of bytes of padding to push in addition to SIZE.
1268 BELOW nonzero means this padding comes at low addresses;
1269 otherwise, the padding comes at high addresses. */
1270
1271rtx
1272push_block (size, extra, below)
1273 rtx size;
1274 int extra, below;
1275{
1276 register rtx temp;
1277 if (CONSTANT_P (size))
1278 anti_adjust_stack (plus_constant (size, extra));
1279 else if (GET_CODE (size) == REG && extra == 0)
1280 anti_adjust_stack (size);
1281 else
1282 {
1283 rtx temp = copy_to_mode_reg (Pmode, size);
1284 if (extra != 0)
1285 temp = expand_binop (Pmode, add_optab,
1286 temp,
1287 gen_rtx (CONST_INT, VOIDmode, extra),
1288 temp, 0, OPTAB_LIB_WIDEN);
1289 anti_adjust_stack (temp);
1290 }
1291
1292#ifdef STACK_GROWS_DOWNWARD
1293 temp = virtual_outgoing_args_rtx;
1294 if (extra != 0 && below)
1295 temp = plus_constant (temp, extra);
1296#else
1297 if (GET_CODE (size) == CONST_INT)
1298 temp = plus_constant (virtual_outgoing_args_rtx,
1299 - INTVAL (size) - (below ? 0 : extra));
1300 else if (extra != 0 && !below)
1301 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1302 negate_rtx (Pmode, plus_constant (size, extra)));
1303 else
1304 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1305 negate_rtx (Pmode, size));
1306#endif
1307
1308 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1309}
1310
1311static rtx
1312gen_push_operand ()
1313{
1314 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1315}
1316
1317/* Generate code to push X onto the stack, assuming it has mode MODE and
1318 type TYPE.
1319 MODE is redundant except when X is a CONST_INT (since they don't
1320 carry mode info).
1321 SIZE is an rtx for the size of data to be copied (in bytes),
1322 needed only if X is BLKmode.
1323
1324 ALIGN (in bytes) is maximum alignment we can assume.
1325
1326 If PARTIAL is nonzero, then copy that many of the first words
1327 of X into registers starting with REG, and push the rest of X.
1328 The amount of space pushed is decreased by PARTIAL words,
1329 rounded *down* to a multiple of PARM_BOUNDARY.
1330 REG must be a hard register in this case.
1331
1332 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1333 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1334
1335 On a machine that lacks real push insns, ARGS_ADDR is the address of
1336 the bottom of the argument block for this call. We use indexing off there
1337 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1338 argument block has not been preallocated.
1339
1340 ARGS_SO_FAR is the size of args previously pushed for this call. */
1341
1342void
1343emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1344 args_addr, args_so_far)
1345 register rtx x;
1346 enum machine_mode mode;
1347 tree type;
1348 rtx size;
1349 int align;
1350 int partial;
1351 rtx reg;
1352 int extra;
1353 rtx args_addr;
1354 rtx args_so_far;
1355{
1356 rtx xinner;
1357 enum direction stack_direction
1358#ifdef STACK_GROWS_DOWNWARD
1359 = downward;
1360#else
1361 = upward;
1362#endif
1363
1364 /* Decide where to pad the argument: `downward' for below,
1365 `upward' for above, or `none' for don't pad it.
1366 Default is below for small data on big-endian machines; else above. */
1367 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1368
1369 /* Invert direction if stack is post-update. */
1370 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1371 if (where_pad != none)
1372 where_pad = (where_pad == downward ? upward : downward);
1373
1374 xinner = x = protect_from_queue (x, 0);
1375
1376 if (mode == BLKmode)
1377 {
1378 /* Copy a block into the stack, entirely or partially. */
1379
1380 register rtx temp;
1381 int used = partial * UNITS_PER_WORD;
1382 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1383 int skip;
1384
1385 if (size == 0)
1386 abort ();
1387
1388 used -= offset;
1389
1390 /* USED is now the # of bytes we need not copy to the stack
1391 because registers will take care of them. */
1392
1393 if (partial != 0)
1394 xinner = change_address (xinner, BLKmode,
1395 plus_constant (XEXP (xinner, 0), used));
1396
1397 /* If the partial register-part of the arg counts in its stack size,
1398 skip the part of stack space corresponding to the registers.
1399 Otherwise, start copying to the beginning of the stack space,
1400 by setting SKIP to 0. */
1401#ifndef REG_PARM_STACK_SPACE
1402 skip = 0;
1403#else
1404 skip = used;
1405#endif
1406
1407#ifdef PUSH_ROUNDING
1408 /* Do it with several push insns if that doesn't take lots of insns
1409 and if there is no difficulty with push insns that skip bytes
1410 on the stack for alignment purposes. */
1411 if (args_addr == 0
1412 && GET_CODE (size) == CONST_INT
1413 && skip == 0
1414 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1415 < MOVE_RATIO)
bbf6f052
RK
1416 /* Here we avoid the case of a structure whose weak alignment
1417 forces many pushes of a small amount of data,
1418 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1419 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1420 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1421 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1422 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1423 {
1424 /* Push padding now if padding above and stack grows down,
1425 or if padding below and stack grows up.
1426 But if space already allocated, this has already been done. */
1427 if (extra && args_addr == 0
1428 && where_pad != none && where_pad != stack_direction)
1429 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1430
1431 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1432 INTVAL (size) - used, align);
1433 }
1434 else
1435#endif /* PUSH_ROUNDING */
1436 {
1437 /* Otherwise make space on the stack and copy the data
1438 to the address of that space. */
1439
1440 /* Deduct words put into registers from the size we must copy. */
1441 if (partial != 0)
1442 {
1443 if (GET_CODE (size) == CONST_INT)
1444 size = gen_rtx (CONST_INT, VOIDmode, INTVAL (size) - used);
1445 else
1446 size = expand_binop (GET_MODE (size), sub_optab, size,
1447 gen_rtx (CONST_INT, VOIDmode, used),
1448 0, 0, OPTAB_LIB_WIDEN);
1449 }
1450
1451 /* Get the address of the stack space.
1452 In this case, we do not deal with EXTRA separately.
1453 A single stack adjust will do. */
1454 if (! args_addr)
1455 {
1456 temp = push_block (size, extra, where_pad == downward);
1457 extra = 0;
1458 }
1459 else if (GET_CODE (args_so_far) == CONST_INT)
1460 temp = memory_address (BLKmode,
1461 plus_constant (args_addr,
1462 skip + INTVAL (args_so_far)));
1463 else
1464 temp = memory_address (BLKmode,
1465 plus_constant (gen_rtx (PLUS, Pmode,
1466 args_addr, args_so_far),
1467 skip));
1468
1469 /* TEMP is the address of the block. Copy the data there. */
1470 if (GET_CODE (size) == CONST_INT
1471 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1472 < MOVE_RATIO))
1473 {
1474 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1475 INTVAL (size), align);
1476 goto ret;
1477 }
1478 /* Try the most limited insn first, because there's no point
1479 including more than one in the machine description unless
1480 the more limited one has some advantage. */
1481#ifdef HAVE_movstrqi
1482 if (HAVE_movstrqi
1483 && GET_CODE (size) == CONST_INT
1484 && ((unsigned) INTVAL (size)
1485 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1486 {
1487 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1488 xinner, size,
1489 gen_rtx (CONST_INT, VOIDmode, align)));
1490 goto ret;
1491 }
1492#endif
1493#ifdef HAVE_movstrhi
1494 if (HAVE_movstrhi
1495 && GET_CODE (size) == CONST_INT
1496 && ((unsigned) INTVAL (size)
1497 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1498 {
1499 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1500 xinner, size,
1501 gen_rtx (CONST_INT, VOIDmode, align)));
1502 goto ret;
1503 }
1504#endif
1505#ifdef HAVE_movstrsi
1506 if (HAVE_movstrsi)
1507 {
1508 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1509 xinner, size,
1510 gen_rtx (CONST_INT, VOIDmode, align)));
1511 goto ret;
1512 }
1513#endif
1514#ifdef HAVE_movstrdi
1515 if (HAVE_movstrdi)
1516 {
1517 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1518 xinner, size,
1519 gen_rtx (CONST_INT, VOIDmode, align)));
1520 goto ret;
1521 }
1522#endif
1523
1524#ifndef ACCUMULATE_OUTGOING_ARGS
1525 /* If the source is referenced relative to the stack pointer,
1526 copy it to another register to stabilize it. We do not need
1527 to do this if we know that we won't be changing sp. */
1528
1529 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1530 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1531 temp = copy_to_reg (temp);
1532#endif
1533
1534 /* Make inhibit_defer_pop nonzero around the library call
1535 to force it to pop the bcopy-arguments right away. */
1536 NO_DEFER_POP;
1537#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1538 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1539 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1540 size, Pmode);
1541#else
e87b4f3f 1542 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1543 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1544 size, Pmode);
1545#endif
1546 OK_DEFER_POP;
1547 }
1548 }
1549 else if (partial > 0)
1550 {
1551 /* Scalar partly in registers. */
1552
1553 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1554 int i;
1555 int not_stack;
1556 /* # words of start of argument
1557 that we must make space for but need not store. */
1558 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1559 int args_offset = INTVAL (args_so_far);
1560 int skip;
1561
1562 /* Push padding now if padding above and stack grows down,
1563 or if padding below and stack grows up.
1564 But if space already allocated, this has already been done. */
1565 if (extra && args_addr == 0
1566 && where_pad != none && where_pad != stack_direction)
1567 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1568
1569 /* If we make space by pushing it, we might as well push
1570 the real data. Otherwise, we can leave OFFSET nonzero
1571 and leave the space uninitialized. */
1572 if (args_addr == 0)
1573 offset = 0;
1574
1575 /* Now NOT_STACK gets the number of words that we don't need to
1576 allocate on the stack. */
1577 not_stack = partial - offset;
1578
1579 /* If the partial register-part of the arg counts in its stack size,
1580 skip the part of stack space corresponding to the registers.
1581 Otherwise, start copying to the beginning of the stack space,
1582 by setting SKIP to 0. */
1583#ifndef REG_PARM_STACK_SPACE
1584 skip = 0;
1585#else
1586 skip = not_stack;
1587#endif
1588
1589 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1590 x = validize_mem (force_const_mem (mode, x));
1591
1592 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1593 SUBREGs of such registers are not allowed. */
1594 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1595 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1596 x = copy_to_reg (x);
1597
1598 /* Loop over all the words allocated on the stack for this arg. */
1599 /* We can do it by words, because any scalar bigger than a word
1600 has a size a multiple of a word. */
1601#ifndef PUSH_ARGS_REVERSED
1602 for (i = not_stack; i < size; i++)
1603#else
1604 for (i = size - 1; i >= not_stack; i--)
1605#endif
1606 if (i >= not_stack + offset)
1607 emit_push_insn (operand_subword_force (x, i, mode),
1608 word_mode, 0, 0, align, 0, 0, 0, args_addr,
1609 gen_rtx (CONST_INT, VOIDmode,
1610 args_offset + ((i - not_stack + skip)
1611 * UNITS_PER_WORD)));
1612 }
1613 else
1614 {
1615 rtx addr;
1616
1617 /* Push padding now if padding above and stack grows down,
1618 or if padding below and stack grows up.
1619 But if space already allocated, this has already been done. */
1620 if (extra && args_addr == 0
1621 && where_pad != none && where_pad != stack_direction)
1622 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1623
1624#ifdef PUSH_ROUNDING
1625 if (args_addr == 0)
1626 addr = gen_push_operand ();
1627 else
1628#endif
1629 if (GET_CODE (args_so_far) == CONST_INT)
1630 addr
1631 = memory_address (mode,
1632 plus_constant (args_addr, INTVAL (args_so_far)));
1633 else
1634 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1635 args_so_far));
1636
1637 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1638 }
1639
1640 ret:
1641 /* If part should go in registers, copy that part
1642 into the appropriate registers. Do this now, at the end,
1643 since mem-to-mem copies above may do function calls. */
1644 if (partial > 0)
1645 move_block_to_reg (REGNO (reg), x, partial, mode);
1646
1647 if (extra && args_addr == 0 && where_pad == stack_direction)
1648 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1649}
1650\f
1651/* Output a library call to function FUN (a SYMBOL_REF rtx)
1652 (emitting the queue unless NO_QUEUE is nonzero),
1653 for a value of mode OUTMODE,
1654 with NARGS different arguments, passed as alternating rtx values
1655 and machine_modes to convert them to.
1656 The rtx values should have been passed through protect_from_queue already.
1657
1658 NO_QUEUE will be true if and only if the library call is a `const' call
1659 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1660 to the variable is_const in expand_call. */
1661
1662void
1663emit_library_call (va_alist)
1664 va_dcl
1665{
1666 va_list p;
1667 struct args_size args_size;
1668 register int argnum;
1669 enum machine_mode outmode;
1670 int nargs;
1671 rtx fun;
1672 rtx orgfun;
1673 int inc;
1674 int count;
1675 rtx argblock = 0;
1676 CUMULATIVE_ARGS args_so_far;
1677 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1678 struct args_size offset; struct args_size size; };
1679 struct arg *argvec;
1680 int old_inhibit_defer_pop = inhibit_defer_pop;
1681 int no_queue = 0;
1682 rtx use_insns;
1683
1684 va_start (p);
1685 orgfun = fun = va_arg (p, rtx);
1686 no_queue = va_arg (p, int);
1687 outmode = va_arg (p, enum machine_mode);
1688 nargs = va_arg (p, int);
1689
1690 /* Copy all the libcall-arguments out of the varargs data
1691 and into a vector ARGVEC.
1692
1693 Compute how to pass each argument. We only support a very small subset
1694 of the full argument passing conventions to limit complexity here since
1695 library functions shouldn't have many args. */
1696
1697 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1698
1699 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1700
1701 args_size.constant = 0;
1702 args_size.var = 0;
1703
1704 for (count = 0; count < nargs; count++)
1705 {
1706 rtx val = va_arg (p, rtx);
1707 enum machine_mode mode = va_arg (p, enum machine_mode);
1708
1709 /* We cannot convert the arg value to the mode the library wants here;
1710 must do it earlier where we know the signedness of the arg. */
1711 if (mode == BLKmode
1712 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1713 abort ();
1714
1715 /* On some machines, there's no way to pass a float to a library fcn.
1716 Pass it as a double instead. */
1717#ifdef LIBGCC_NEEDS_DOUBLE
1718 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1719 val = convert_to_mode (DFmode, val), mode = DFmode;
1720#endif
1721
1722 /* Make sure it is a reasonable operand for a move or push insn. */
1723 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1724 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1725 val = force_operand (val, 0);
1726
1727 argvec[count].value = val;
1728 argvec[count].mode = mode;
1729
1730#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1731 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, (tree)0, 1))
1732 abort ();
1733#endif
1734
1735 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, (tree)0, 1);
1736 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1737 abort ();
1738#ifdef FUNCTION_ARG_PARTIAL_NREGS
1739 argvec[count].partial
1740 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, (tree)0, 1);
1741#else
1742 argvec[count].partial = 0;
1743#endif
1744
1745 locate_and_pad_parm (mode, 0,
1746 argvec[count].reg && argvec[count].partial == 0,
1747 0, &args_size, &argvec[count].offset,
1748 &argvec[count].size);
1749
1750 if (argvec[count].size.var)
1751 abort ();
1752
1753#ifndef REG_PARM_STACK_SPACE
1754 if (argvec[count].partial)
1755 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1756#endif
1757
1758 if (argvec[count].reg == 0 || argvec[count].partial != 0
1759#ifdef REG_PARM_STACK_SPACE
1760 || 1
1761#endif
1762 )
1763 args_size.constant += argvec[count].size.constant;
1764
1765#ifdef ACCUMULATE_OUTGOING_ARGS
1766 /* If this arg is actually passed on the stack, it might be
1767 clobbering something we already put there (this library call might
1768 be inside the evaluation of an argument to a function whose call
1769 requires the stack). This will only occur when the library call
1770 has sufficient args to run out of argument registers. Abort in
1771 this case; if this ever occurs, code must be added to save and
1772 restore the arg slot. */
1773
1774 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1775 abort ();
1776#endif
1777
1778 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1779 }
1780 va_end (p);
1781
1782 /* If this machine requires an external definition for library
1783 functions, write one out. */
1784 assemble_external_libcall (fun);
1785
1786#ifdef STACK_BOUNDARY
1787 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1788 / STACK_BYTES) * STACK_BYTES);
1789#endif
1790
1791#ifdef REG_PARM_STACK_SPACE
1792 args_size.constant = MAX (args_size.constant,
1793 REG_PARM_STACK_SPACE ((tree) 0));
1794#endif
1795
1796#ifdef ACCUMULATE_OUTGOING_ARGS
1797 if (args_size.constant > current_function_outgoing_args_size)
1798 current_function_outgoing_args_size = args_size.constant;
1799 args_size.constant = 0;
1800#endif
1801
1802#ifndef PUSH_ROUNDING
1803 argblock = push_block (gen_rtx (CONST_INT, VOIDmode, args_size.constant),
1804 0, 0);
1805#endif
1806
1807#ifdef PUSH_ARGS_REVERSED
1808 inc = -1;
1809 argnum = nargs - 1;
1810#else
1811 inc = 1;
1812 argnum = 0;
1813#endif
1814
1815 /* Push the args that need to be pushed. */
1816
1817 for (count = 0; count < nargs; count++, argnum += inc)
1818 {
1819 register enum machine_mode mode = argvec[argnum].mode;
1820 register rtx val = argvec[argnum].value;
1821 rtx reg = argvec[argnum].reg;
1822 int partial = argvec[argnum].partial;
1823
1824 if (! (reg != 0 && partial == 0))
1825 emit_push_insn (val, mode, 0, 0, 0, partial, reg, 0, argblock,
1826 gen_rtx (CONST_INT, VOIDmode,
1827 argvec[count].offset.constant));
1828 NO_DEFER_POP;
1829 }
1830
1831#ifdef PUSH_ARGS_REVERSED
1832 argnum = nargs - 1;
1833#else
1834 argnum = 0;
1835#endif
1836
1837 /* Now load any reg parms into their regs. */
1838
1839 for (count = 0; count < nargs; count++, argnum += inc)
1840 {
1841 register enum machine_mode mode = argvec[argnum].mode;
1842 register rtx val = argvec[argnum].value;
1843 rtx reg = argvec[argnum].reg;
1844 int partial = argvec[argnum].partial;
1845
1846 if (reg != 0 && partial == 0)
1847 emit_move_insn (reg, val);
1848 NO_DEFER_POP;
1849 }
1850
1851 /* For version 1.37, try deleting this entirely. */
1852 if (! no_queue)
1853 emit_queue ();
1854
1855 /* Any regs containing parms remain in use through the call. */
1856 start_sequence ();
1857 for (count = 0; count < nargs; count++)
1858 if (argvec[count].reg != 0)
1859 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
1860
1861 use_insns = get_insns ();
1862 end_sequence ();
1863
1864 fun = prepare_call_address (fun, 0, &use_insns);
1865
1866 /* Don't allow popping to be deferred, since then
1867 cse'ing of library calls could delete a call and leave the pop. */
1868 NO_DEFER_POP;
1869
1870 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
1871 will set inhibit_defer_pop to that value. */
1872
1873 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
1874 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1875 outmode != VOIDmode ? hard_libcall_value (outmode) : 0,
1876 old_inhibit_defer_pop + 1, use_insns, no_queue);
1877
1878 /* Now restore inhibit_defer_pop to its actual original value. */
1879 OK_DEFER_POP;
1880}
1881\f
1882/* Expand an assignment that stores the value of FROM into TO.
1883 If WANT_VALUE is nonzero, return an rtx for the value of TO.
1884 (This may contain a QUEUED rtx.)
1885 Otherwise, the returned value is not meaningful.
1886
1887 SUGGEST_REG is no longer actually used.
1888 It used to mean, copy the value through a register
1889 and return that register, if that is possible.
1890 But now we do this if WANT_VALUE.
1891
1892 If the value stored is a constant, we return the constant. */
1893
1894rtx
1895expand_assignment (to, from, want_value, suggest_reg)
1896 tree to, from;
1897 int want_value;
1898 int suggest_reg;
1899{
1900 register rtx to_rtx = 0;
1901 rtx result;
1902
1903 /* Don't crash if the lhs of the assignment was erroneous. */
1904
1905 if (TREE_CODE (to) == ERROR_MARK)
1906 return expand_expr (from, 0, VOIDmode, 0);
1907
1908 /* Assignment of a structure component needs special treatment
1909 if the structure component's rtx is not simply a MEM.
1910 Assignment of an array element at a constant index
1911 has the same problem. */
1912
1913 if (TREE_CODE (to) == COMPONENT_REF
1914 || TREE_CODE (to) == BIT_FIELD_REF
1915 || (TREE_CODE (to) == ARRAY_REF
1916 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
1917 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
1918 {
1919 enum machine_mode mode1;
1920 int bitsize;
1921 int bitpos;
7bb0943f 1922 tree offset;
bbf6f052
RK
1923 int unsignedp;
1924 int volatilep = 0;
7bb0943f 1925 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
1926 &mode1, &unsignedp, &volatilep);
1927
1928 /* If we are going to use store_bit_field and extract_bit_field,
1929 make sure to_rtx will be safe for multiple use. */
1930
1931 if (mode1 == VOIDmode && want_value)
1932 tem = stabilize_reference (tem);
1933
1934 to_rtx = expand_expr (tem, 0, VOIDmode, 0);
7bb0943f
RS
1935 if (offset != 0)
1936 {
1937 rtx offset_rtx = expand_expr (offset, 0, VOIDmode, 0);
1938
1939 if (GET_CODE (to_rtx) != MEM)
1940 abort ();
1941 to_rtx = change_address (to_rtx, VOIDmode,
1942 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
1943 force_reg (Pmode, offset_rtx)));
1944 }
bbf6f052
RK
1945 if (volatilep)
1946 {
1947 if (GET_CODE (to_rtx) == MEM)
1948 MEM_VOLATILE_P (to_rtx) = 1;
1949#if 0 /* This was turned off because, when a field is volatile
1950 in an object which is not volatile, the object may be in a register,
1951 and then we would abort over here. */
1952 else
1953 abort ();
1954#endif
1955 }
1956
1957 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
1958 (want_value
1959 /* Spurious cast makes HPUX compiler happy. */
1960 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
1961 : VOIDmode),
1962 unsignedp,
1963 /* Required alignment of containing datum. */
1964 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
1965 int_size_in_bytes (TREE_TYPE (tem)));
1966 preserve_temp_slots (result);
1967 free_temp_slots ();
1968
1969 return result;
1970 }
1971
1972 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
1973 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
1974
1975 if (to_rtx == 0)
1976 to_rtx = expand_expr (to, 0, VOIDmode, 0);
1977
1978 /* In case we are returning the contents of an object which overlaps
1979 the place the value is being stored, use a safe function when copying
1980 a value through a pointer into a structure value return block. */
1981 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
1982 && current_function_returns_struct
1983 && !current_function_returns_pcc_struct)
1984 {
1985 rtx from_rtx = expand_expr (from, 0, VOIDmode, 0);
1986 rtx size = expr_size (from);
1987
1988#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1989 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1990 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
1991 XEXP (from_rtx, 0), Pmode,
1992 size, Pmode);
1993#else
e87b4f3f 1994 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1995 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
1996 XEXP (to_rtx, 0), Pmode,
1997 size, Pmode);
1998#endif
1999
2000 preserve_temp_slots (to_rtx);
2001 free_temp_slots ();
2002 return to_rtx;
2003 }
2004
2005 /* Compute FROM and store the value in the rtx we got. */
2006
2007 result = store_expr (from, to_rtx, want_value);
2008 preserve_temp_slots (result);
2009 free_temp_slots ();
2010 return result;
2011}
2012
2013/* Generate code for computing expression EXP,
2014 and storing the value into TARGET.
2015 Returns TARGET or an equivalent value.
2016 TARGET may contain a QUEUED rtx.
2017
2018 If SUGGEST_REG is nonzero, copy the value through a register
2019 and return that register, if that is possible.
2020
2021 If the value stored is a constant, we return the constant. */
2022
2023rtx
2024store_expr (exp, target, suggest_reg)
2025 register tree exp;
2026 register rtx target;
2027 int suggest_reg;
2028{
2029 register rtx temp;
2030 int dont_return_target = 0;
2031
2032 if (TREE_CODE (exp) == COMPOUND_EXPR)
2033 {
2034 /* Perform first part of compound expression, then assign from second
2035 part. */
2036 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2037 emit_queue ();
2038 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2039 }
2040 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2041 {
2042 /* For conditional expression, get safe form of the target. Then
2043 test the condition, doing the appropriate assignment on either
2044 side. This avoids the creation of unnecessary temporaries.
2045 For non-BLKmode, it is more efficient not to do this. */
2046
2047 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2048
2049 emit_queue ();
2050 target = protect_from_queue (target, 1);
2051
2052 NO_DEFER_POP;
2053 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2054 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2055 emit_queue ();
2056 emit_jump_insn (gen_jump (lab2));
2057 emit_barrier ();
2058 emit_label (lab1);
2059 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2060 emit_queue ();
2061 emit_label (lab2);
2062 OK_DEFER_POP;
2063 return target;
2064 }
2065 else if (suggest_reg && GET_CODE (target) == MEM
2066 && GET_MODE (target) != BLKmode)
2067 /* If target is in memory and caller wants value in a register instead,
2068 arrange that. Pass TARGET as target for expand_expr so that,
2069 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2070 We know expand_expr will not use the target in that case. */
2071 {
2072 temp = expand_expr (exp, cse_not_expected ? 0 : target,
2073 GET_MODE (target), 0);
2074 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2075 temp = copy_to_reg (temp);
2076 dont_return_target = 1;
2077 }
2078 else if (queued_subexp_p (target))
2079 /* If target contains a postincrement, it is not safe
2080 to use as the returned value. It would access the wrong
2081 place by the time the queued increment gets output.
2082 So copy the value through a temporary and use that temp
2083 as the result. */
2084 {
2085 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2086 {
2087 /* Expand EXP into a new pseudo. */
2088 temp = gen_reg_rtx (GET_MODE (target));
2089 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2090 }
2091 else
2092 temp = expand_expr (exp, 0, GET_MODE (target), 0);
2093 dont_return_target = 1;
2094 }
2095 else
2096 {
2097 temp = expand_expr (exp, target, GET_MODE (target), 0);
2098 /* DO return TARGET if it's a specified hardware register.
2099 expand_return relies on this. */
2100 if (!(target && GET_CODE (target) == REG
2101 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2102 && CONSTANT_P (temp))
2103 dont_return_target = 1;
2104 }
2105
2106 /* If value was not generated in the target, store it there.
2107 Convert the value to TARGET's type first if nec. */
2108
2109 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2110 {
2111 target = protect_from_queue (target, 1);
2112 if (GET_MODE (temp) != GET_MODE (target)
2113 && GET_MODE (temp) != VOIDmode)
2114 {
2115 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2116 if (dont_return_target)
2117 {
2118 /* In this case, we will return TEMP,
2119 so make sure it has the proper mode.
2120 But don't forget to store the value into TARGET. */
2121 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2122 emit_move_insn (target, temp);
2123 }
2124 else
2125 convert_move (target, temp, unsignedp);
2126 }
2127
2128 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2129 {
2130 /* Handle copying a string constant into an array.
2131 The string constant may be shorter than the array.
2132 So copy just the string's actual length, and clear the rest. */
2133 rtx size;
2134
e87b4f3f
RS
2135 /* Get the size of the data type of the string,
2136 which is actually the size of the target. */
2137 size = expr_size (exp);
2138 if (GET_CODE (size) == CONST_INT
2139 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2140 emit_block_move (target, temp, size,
2141 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2142 else
bbf6f052 2143 {
e87b4f3f
RS
2144 /* Compute the size of the data to copy from the string. */
2145 tree copy_size
2146 = fold (build (MIN_EXPR, sizetype,
2147 size_binop (CEIL_DIV_EXPR,
2148 TYPE_SIZE (TREE_TYPE (exp)),
2149 size_int (BITS_PER_UNIT)),
2150 convert (sizetype,
2151 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2152 rtx copy_size_rtx = expand_expr (copy_size, 0, VOIDmode, 0);
2153 rtx label = 0;
2154
2155 /* Copy that much. */
2156 emit_block_move (target, temp, copy_size_rtx,
2157 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2158
2159 /* Figure out how much is left in TARGET
2160 that we have to clear. */
2161 if (GET_CODE (copy_size_rtx) == CONST_INT)
2162 {
2163 temp = plus_constant (XEXP (target, 0),
2164 TREE_STRING_LENGTH (exp));
2165 size = plus_constant (size,
2166 - TREE_STRING_LENGTH (exp));
2167 }
2168 else
2169 {
2170 enum machine_mode size_mode = Pmode;
2171
2172 temp = force_reg (Pmode, XEXP (target, 0));
2173 temp = expand_binop (size_mode, add_optab, temp,
2174 copy_size_rtx, 0, 0, OPTAB_LIB_WIDEN);
2175
2176 size = expand_binop (size_mode, sub_optab, size,
2177 copy_size_rtx, 0, 0, OPTAB_LIB_WIDEN);
2178
2179 emit_cmp_insn (size, const0_rtx, LT, 0,
2180 GET_MODE (size), 0, 0);
2181 label = gen_label_rtx ();
2182 emit_jump_insn (gen_blt (label));
2183 }
2184
2185 if (size != const0_rtx)
2186 {
bbf6f052 2187#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f
RS
2188 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2189 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2190#else
e87b4f3f
RS
2191 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2192 temp, Pmode, size, Pmode);
bbf6f052 2193#endif
e87b4f3f
RS
2194 }
2195 if (label)
2196 emit_label (label);
bbf6f052
RK
2197 }
2198 }
2199 else if (GET_MODE (temp) == BLKmode)
2200 emit_block_move (target, temp, expr_size (exp),
2201 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2202 else
2203 emit_move_insn (target, temp);
2204 }
2205 if (dont_return_target)
2206 return temp;
2207 return target;
2208}
2209\f
2210/* Store the value of constructor EXP into the rtx TARGET.
2211 TARGET is either a REG or a MEM. */
2212
2213static void
2214store_constructor (exp, target)
2215 tree exp;
2216 rtx target;
2217{
4af3895e
JVA
2218 tree type = TREE_TYPE (exp);
2219
bbf6f052
RK
2220 /* We know our target cannot conflict, since safe_from_p has been called. */
2221#if 0
2222 /* Don't try copying piece by piece into a hard register
2223 since that is vulnerable to being clobbered by EXP.
2224 Instead, construct in a pseudo register and then copy it all. */
2225 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2226 {
2227 rtx temp = gen_reg_rtx (GET_MODE (target));
2228 store_constructor (exp, temp);
2229 emit_move_insn (target, temp);
2230 return;
2231 }
2232#endif
2233
4af3895e 2234 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2235 {
2236 register tree elt;
2237
4af3895e
JVA
2238 /* Inform later passes that the whole union value is dead. */
2239 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2240 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2241
2242 /* If we are building a static constructor into a register,
2243 set the initial value as zero so we can fold the value into
2244 a constant. */
2245 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2246 emit_move_insn (target, const0_rtx);
2247
bbf6f052
RK
2248 /* If the constructor has fewer fields than the structure,
2249 clear the whole structure first. */
2250 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2251 != list_length (TYPE_FIELDS (type)))
2252 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2253 else
2254 /* Inform later passes that the old value is dead. */
2255 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2256
2257 /* Store each element of the constructor into
2258 the corresponding field of TARGET. */
2259
2260 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2261 {
2262 register tree field = TREE_PURPOSE (elt);
2263 register enum machine_mode mode;
2264 int bitsize;
2265 int bitpos;
2266 int unsignedp;
2267
2268 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2269 unsignedp = TREE_UNSIGNED (field);
2270 mode = DECL_MODE (field);
2271 if (DECL_BIT_FIELD (field))
2272 mode = VOIDmode;
2273
2274 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2275 /* ??? This case remains to be written. */
2276 abort ();
2277
2278 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2279
2280 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2281 /* The alignment of TARGET is
2282 at least what its type requires. */
2283 VOIDmode, 0,
4af3895e
JVA
2284 TYPE_ALIGN (type) / BITS_PER_UNIT,
2285 int_size_in_bytes (type));
bbf6f052
RK
2286 }
2287 }
4af3895e 2288 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2289 {
2290 register tree elt;
2291 register int i;
4af3895e 2292 tree domain = TYPE_DOMAIN (type);
bbf6f052
RK
2293 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2294 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2295 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2296
2297 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2298 clear the whole structure first. Similarly if this this is
2299 static constructor of a non-BLKmode object. */
bbf6f052 2300
4af3895e
JVA
2301 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2302 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2303 clear_storage (target, maxelt - minelt + 1);
2304 else
2305 /* Inform later passes that the old value is dead. */
2306 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2307
2308 /* Store each element of the constructor into
2309 the corresponding element of TARGET, determined
2310 by counting the elements. */
2311 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2312 elt;
2313 elt = TREE_CHAIN (elt), i++)
2314 {
2315 register enum machine_mode mode;
2316 int bitsize;
2317 int bitpos;
2318 int unsignedp;
2319
2320 mode = TYPE_MODE (elttype);
2321 bitsize = GET_MODE_BITSIZE (mode);
2322 unsignedp = TREE_UNSIGNED (elttype);
2323
2324 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2325
2326 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2327 /* The alignment of TARGET is
2328 at least what its type requires. */
2329 VOIDmode, 0,
4af3895e
JVA
2330 TYPE_ALIGN (type) / BITS_PER_UNIT,
2331 int_size_in_bytes (type));
bbf6f052
RK
2332 }
2333 }
2334
2335 else
2336 abort ();
2337}
2338
2339/* Store the value of EXP (an expression tree)
2340 into a subfield of TARGET which has mode MODE and occupies
2341 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2342 If MODE is VOIDmode, it means that we are storing into a bit-field.
2343
2344 If VALUE_MODE is VOIDmode, return nothing in particular.
2345 UNSIGNEDP is not used in this case.
2346
2347 Otherwise, return an rtx for the value stored. This rtx
2348 has mode VALUE_MODE if that is convenient to do.
2349 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2350
2351 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2352 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2353
2354static rtx
2355store_field (target, bitsize, bitpos, mode, exp, value_mode,
2356 unsignedp, align, total_size)
2357 rtx target;
2358 int bitsize, bitpos;
2359 enum machine_mode mode;
2360 tree exp;
2361 enum machine_mode value_mode;
2362 int unsignedp;
2363 int align;
2364 int total_size;
2365{
2366 int width_mask = 0;
2367
2368 if (bitsize < HOST_BITS_PER_INT)
2369 width_mask = (1 << bitsize) - 1;
2370
2371 /* If we are storing into an unaligned field of an aligned union that is
2372 in a register, we may have the mode of TARGET being an integer mode but
2373 MODE == BLKmode. In that case, get an aligned object whose size and
2374 alignment are the same as TARGET and store TARGET into it (we can avoid
2375 the store if the field being stored is the entire width of TARGET). Then
2376 call ourselves recursively to store the field into a BLKmode version of
2377 that object. Finally, load from the object into TARGET. This is not
2378 very efficient in general, but should only be slightly more expensive
2379 than the otherwise-required unaligned accesses. Perhaps this can be
2380 cleaned up later. */
2381
2382 if (mode == BLKmode
2383 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2384 {
2385 rtx object = assign_stack_temp (GET_MODE (target),
2386 GET_MODE_SIZE (GET_MODE (target)), 0);
2387 rtx blk_object = copy_rtx (object);
2388
2389 PUT_MODE (blk_object, BLKmode);
2390
2391 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2392 emit_move_insn (object, target);
2393
2394 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2395 align, total_size);
2396
2397 emit_move_insn (target, object);
2398
2399 return target;
2400 }
2401
2402 /* If the structure is in a register or if the component
2403 is a bit field, we cannot use addressing to access it.
2404 Use bit-field techniques or SUBREG to store in it. */
2405
2406 if (mode == VOIDmode || GET_CODE (target) == REG
2407 || GET_CODE (target) == SUBREG)
2408 {
2409 rtx temp = expand_expr (exp, 0, VOIDmode, 0);
2410 /* Store the value in the bitfield. */
2411 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2412 if (value_mode != VOIDmode)
2413 {
2414 /* The caller wants an rtx for the value. */
2415 /* If possible, avoid refetching from the bitfield itself. */
2416 if (width_mask != 0
2417 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2418 return expand_and (temp,
2419 gen_rtx (CONST_INT, VOIDmode, width_mask), 0);
2420 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2421 0, value_mode, 0, align, total_size);
2422 }
2423 return const0_rtx;
2424 }
2425 else
2426 {
2427 rtx addr = XEXP (target, 0);
2428 rtx to_rtx;
2429
2430 /* If a value is wanted, it must be the lhs;
2431 so make the address stable for multiple use. */
2432
2433 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2434 && ! CONSTANT_ADDRESS_P (addr)
2435 /* A frame-pointer reference is already stable. */
2436 && ! (GET_CODE (addr) == PLUS
2437 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2438 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2439 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2440 addr = copy_to_reg (addr);
2441
2442 /* Now build a reference to just the desired component. */
2443
2444 to_rtx = change_address (target, mode,
2445 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2446 MEM_IN_STRUCT_P (to_rtx) = 1;
2447
2448 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2449 }
2450}
2451\f
2452/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2453 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2454 ARRAY_REFs at constant positions and find the ultimate containing object,
2455 which we return.
2456
2457 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2458 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2459 If the position of the field is variable, we store a tree
2460 giving the variable offset (in units) in *POFFSET.
2461 This offset is in addition to the bit position.
2462 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2463
2464 If any of the extraction expressions is volatile,
2465 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2466
2467 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2468 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2469 is redundant.
2470
2471 If the field describes a variable-sized object, *PMODE is set to
2472 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2473 this case, but the address of the object can be found. */
bbf6f052
RK
2474
2475tree
7bb0943f 2476get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
bbf6f052
RK
2477 tree exp;
2478 int *pbitsize;
2479 int *pbitpos;
7bb0943f 2480 tree *poffset;
bbf6f052
RK
2481 enum machine_mode *pmode;
2482 int *punsignedp;
2483 int *pvolatilep;
2484{
2485 tree size_tree = 0;
2486 enum machine_mode mode = VOIDmode;
7bb0943f 2487 tree offset = 0;
bbf6f052
RK
2488
2489 if (TREE_CODE (exp) == COMPONENT_REF)
2490 {
2491 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2492 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2493 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2494 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2495 }
2496 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2497 {
2498 size_tree = TREE_OPERAND (exp, 1);
2499 *punsignedp = TREE_UNSIGNED (exp);
2500 }
2501 else
2502 {
2503 mode = TYPE_MODE (TREE_TYPE (exp));
2504 *pbitsize = GET_MODE_BITSIZE (mode);
2505 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2506 }
2507
2508 if (size_tree)
2509 {
2510 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2511 mode = BLKmode, *pbitsize = -1;
2512 else
2513 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2514 }
2515
2516 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2517 and find the ultimate containing object. */
2518
2519 *pbitpos = 0;
2520
2521 while (1)
2522 {
7bb0943f 2523 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2524 {
7bb0943f
RS
2525 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2526 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2527 : TREE_OPERAND (exp, 2));
bbf6f052 2528
7bb0943f
RS
2529 if (TREE_CODE (pos) == PLUS_EXPR)
2530 {
2531 tree constant, var;
2532 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2533 {
2534 constant = TREE_OPERAND (pos, 0);
2535 var = TREE_OPERAND (pos, 1);
2536 }
2537 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2538 {
2539 constant = TREE_OPERAND (pos, 1);
2540 var = TREE_OPERAND (pos, 0);
2541 }
2542 else
2543 abort ();
2544 *pbitpos += TREE_INT_CST_LOW (constant);
2545 if (offset)
2546 offset = size_binop (PLUS_EXPR, offset,
2547 size_binop (FLOOR_DIV_EXPR, var,
2548 size_int (BITS_PER_UNIT)));
2549 else
2550 offset = size_binop (FLOOR_DIV_EXPR, var,
2551 size_int (BITS_PER_UNIT));
2552 }
2553 else if (TREE_CODE (pos) == INTEGER_CST)
2554 *pbitpos += TREE_INT_CST_LOW (pos);
2555 else
2556 {
2557 /* Assume here that the offset is a multiple of a unit.
2558 If not, there should be an explicitly added constant. */
2559 if (offset)
2560 offset = size_binop (PLUS_EXPR, offset,
2561 size_binop (FLOOR_DIV_EXPR, pos,
2562 size_int (BITS_PER_UNIT)));
2563 else
2564 offset = size_binop (FLOOR_DIV_EXPR, pos,
2565 size_int (BITS_PER_UNIT));
2566 }
bbf6f052 2567 }
bbf6f052 2568
bbf6f052
RK
2569 else if (TREE_CODE (exp) == ARRAY_REF
2570 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2571 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2572 {
2573 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2574 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
2575 }
2576 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2577 && ! ((TREE_CODE (exp) == NOP_EXPR
2578 || TREE_CODE (exp) == CONVERT_EXPR)
2579 && (TYPE_MODE (TREE_TYPE (exp))
2580 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2581 break;
7bb0943f
RS
2582
2583 /* If any reference in the chain is volatile, the effect is volatile. */
2584 if (TREE_THIS_VOLATILE (exp))
2585 *pvolatilep = 1;
bbf6f052
RK
2586 exp = TREE_OPERAND (exp, 0);
2587 }
2588
2589 /* If this was a bit-field, see if there is a mode that allows direct
2590 access in case EXP is in memory. */
2591 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2592 {
2593 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2594 if (mode == BLKmode)
2595 mode = VOIDmode;
2596 }
2597
2598 *pmode = mode;
7bb0943f
RS
2599 *poffset = offset;
2600#if 0
2601 /* We aren't finished fixing the callers to really handle nonzero offset. */
2602 if (offset != 0)
2603 abort ();
2604#endif
bbf6f052
RK
2605
2606 return exp;
2607}
2608\f
2609/* Given an rtx VALUE that may contain additions and multiplications,
2610 return an equivalent value that just refers to a register or memory.
2611 This is done by generating instructions to perform the arithmetic
2612 and returning a pseudo-register containing the value. */
2613
2614rtx
2615force_operand (value, target)
2616 rtx value, target;
2617{
2618 register optab binoptab = 0;
2619 /* Use a temporary to force order of execution of calls to
2620 `force_operand'. */
2621 rtx tmp;
2622 register rtx op2;
2623 /* Use subtarget as the target for operand 0 of a binary operation. */
2624 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2625
2626 if (GET_CODE (value) == PLUS)
2627 binoptab = add_optab;
2628 else if (GET_CODE (value) == MINUS)
2629 binoptab = sub_optab;
2630 else if (GET_CODE (value) == MULT)
2631 {
2632 op2 = XEXP (value, 1);
2633 if (!CONSTANT_P (op2)
2634 && !(GET_CODE (op2) == REG && op2 != subtarget))
2635 subtarget = 0;
2636 tmp = force_operand (XEXP (value, 0), subtarget);
2637 return expand_mult (GET_MODE (value), tmp,
2638 force_operand (op2, 0),
2639 target, 0);
2640 }
2641
2642 if (binoptab)
2643 {
2644 op2 = XEXP (value, 1);
2645 if (!CONSTANT_P (op2)
2646 && !(GET_CODE (op2) == REG && op2 != subtarget))
2647 subtarget = 0;
2648 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2649 {
2650 binoptab = add_optab;
2651 op2 = negate_rtx (GET_MODE (value), op2);
2652 }
2653
2654 /* Check for an addition with OP2 a constant integer and our first
2655 operand a PLUS of a virtual register and something else. In that
2656 case, we want to emit the sum of the virtual register and the
2657 constant first and then add the other value. This allows virtual
2658 register instantiation to simply modify the constant rather than
2659 creating another one around this addition. */
2660 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2661 && GET_CODE (XEXP (value, 0)) == PLUS
2662 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2663 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2664 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2665 {
2666 rtx temp = expand_binop (GET_MODE (value), binoptab,
2667 XEXP (XEXP (value, 0), 0), op2,
2668 subtarget, 0, OPTAB_LIB_WIDEN);
2669 return expand_binop (GET_MODE (value), binoptab, temp,
2670 force_operand (XEXP (XEXP (value, 0), 1), 0),
2671 target, 0, OPTAB_LIB_WIDEN);
2672 }
2673
2674 tmp = force_operand (XEXP (value, 0), subtarget);
2675 return expand_binop (GET_MODE (value), binoptab, tmp,
2676 force_operand (op2, 0),
2677 target, 0, OPTAB_LIB_WIDEN);
2678 /* We give UNSIGNEP = 0 to expand_binop
2679 because the only operations we are expanding here are signed ones. */
2680 }
2681 return value;
2682}
2683\f
2684/* Subroutine of expand_expr:
2685 save the non-copied parts (LIST) of an expr (LHS), and return a list
2686 which can restore these values to their previous values,
2687 should something modify their storage. */
2688
2689static tree
2690save_noncopied_parts (lhs, list)
2691 tree lhs;
2692 tree list;
2693{
2694 tree tail;
2695 tree parts = 0;
2696
2697 for (tail = list; tail; tail = TREE_CHAIN (tail))
2698 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2699 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2700 else
2701 {
2702 tree part = TREE_VALUE (tail);
2703 tree part_type = TREE_TYPE (part);
2704 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part, 0);
2705 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2706 int_size_in_bytes (part_type), 0);
2707 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2708 target = change_address (target, TYPE_MODE (part_type), 0);
2709 parts = tree_cons (to_be_saved,
2710 build (RTL_EXPR, part_type, 0, (tree) target),
2711 parts);
2712 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2713 }
2714 return parts;
2715}
2716
2717/* Subroutine of expand_expr:
2718 record the non-copied parts (LIST) of an expr (LHS), and return a list
2719 which specifies the initial values of these parts. */
2720
2721static tree
2722init_noncopied_parts (lhs, list)
2723 tree lhs;
2724 tree list;
2725{
2726 tree tail;
2727 tree parts = 0;
2728
2729 for (tail = list; tail; tail = TREE_CHAIN (tail))
2730 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2731 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2732 else
2733 {
2734 tree part = TREE_VALUE (tail);
2735 tree part_type = TREE_TYPE (part);
2736 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part, 0);
2737 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2738 }
2739 return parts;
2740}
2741
2742/* Subroutine of expand_expr: return nonzero iff there is no way that
2743 EXP can reference X, which is being modified. */
2744
2745static int
2746safe_from_p (x, exp)
2747 rtx x;
2748 tree exp;
2749{
2750 rtx exp_rtl = 0;
2751 int i, nops;
2752
2753 if (x == 0)
2754 return 1;
2755
2756 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2757 find the underlying pseudo. */
2758 if (GET_CODE (x) == SUBREG)
2759 {
2760 x = SUBREG_REG (x);
2761 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2762 return 0;
2763 }
2764
2765 /* If X is a location in the outgoing argument area, it is always safe. */
2766 if (GET_CODE (x) == MEM
2767 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2768 || (GET_CODE (XEXP (x, 0)) == PLUS
2769 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2770 return 1;
2771
2772 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2773 {
2774 case 'd':
2775 exp_rtl = DECL_RTL (exp);
2776 break;
2777
2778 case 'c':
2779 return 1;
2780
2781 case 'x':
2782 if (TREE_CODE (exp) == TREE_LIST)
2783 return (safe_from_p (x, TREE_VALUE (exp))
2784 && (TREE_CHAIN (exp) == 0
2785 || safe_from_p (x, TREE_CHAIN (exp))));
2786 else
2787 return 0;
2788
2789 case '1':
2790 return safe_from_p (x, TREE_OPERAND (exp, 0));
2791
2792 case '2':
2793 case '<':
2794 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2795 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2796
2797 case 'e':
2798 case 'r':
2799 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2800 the expression. If it is set, we conflict iff we are that rtx or
2801 both are in memory. Otherwise, we check all operands of the
2802 expression recursively. */
2803
2804 switch (TREE_CODE (exp))
2805 {
2806 case ADDR_EXPR:
2807 return staticp (TREE_OPERAND (exp, 0));
2808
2809 case INDIRECT_REF:
2810 if (GET_CODE (x) == MEM)
2811 return 0;
2812 break;
2813
2814 case CALL_EXPR:
2815 exp_rtl = CALL_EXPR_RTL (exp);
2816 if (exp_rtl == 0)
2817 {
2818 /* Assume that the call will clobber all hard registers and
2819 all of memory. */
2820 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2821 || GET_CODE (x) == MEM)
2822 return 0;
2823 }
2824
2825 break;
2826
2827 case RTL_EXPR:
2828 exp_rtl = RTL_EXPR_RTL (exp);
2829 if (exp_rtl == 0)
2830 /* We don't know what this can modify. */
2831 return 0;
2832
2833 break;
2834
2835 case WITH_CLEANUP_EXPR:
2836 exp_rtl = RTL_EXPR_RTL (exp);
2837 break;
2838
2839 case SAVE_EXPR:
2840 exp_rtl = SAVE_EXPR_RTL (exp);
2841 break;
2842
8129842c
RS
2843 case BIND_EXPR:
2844 /* The only operand we look at is operand 1. The rest aren't
2845 part of the expression. */
2846 return safe_from_p (x, TREE_OPERAND (exp, 1));
2847
bbf6f052
RK
2848 case METHOD_CALL_EXPR:
2849 /* This takes a rtx argument, but shouldn't appear here. */
2850 abort ();
2851 }
2852
2853 /* If we have an rtx, we do not need to scan our operands. */
2854 if (exp_rtl)
2855 break;
2856
2857 nops = tree_code_length[(int) TREE_CODE (exp)];
2858 for (i = 0; i < nops; i++)
2859 if (TREE_OPERAND (exp, i) != 0
2860 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
2861 return 0;
2862 }
2863
2864 /* If we have an rtl, find any enclosed object. Then see if we conflict
2865 with it. */
2866 if (exp_rtl)
2867 {
2868 if (GET_CODE (exp_rtl) == SUBREG)
2869 {
2870 exp_rtl = SUBREG_REG (exp_rtl);
2871 if (GET_CODE (exp_rtl) == REG
2872 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
2873 return 0;
2874 }
2875
2876 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
2877 are memory and EXP is not readonly. */
2878 return ! (rtx_equal_p (x, exp_rtl)
2879 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
2880 && ! TREE_READONLY (exp)));
2881 }
2882
2883 /* If we reach here, it is safe. */
2884 return 1;
2885}
2886
2887/* Subroutine of expand_expr: return nonzero iff EXP is an
2888 expression whose type is statically determinable. */
2889
2890static int
2891fixed_type_p (exp)
2892 tree exp;
2893{
2894 if (TREE_CODE (exp) == PARM_DECL
2895 || TREE_CODE (exp) == VAR_DECL
2896 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
2897 || TREE_CODE (exp) == COMPONENT_REF
2898 || TREE_CODE (exp) == ARRAY_REF)
2899 return 1;
2900 return 0;
2901}
2902\f
2903/* expand_expr: generate code for computing expression EXP.
2904 An rtx for the computed value is returned. The value is never null.
2905 In the case of a void EXP, const0_rtx is returned.
2906
2907 The value may be stored in TARGET if TARGET is nonzero.
2908 TARGET is just a suggestion; callers must assume that
2909 the rtx returned may not be the same as TARGET.
2910
2911 If TARGET is CONST0_RTX, it means that the value will be ignored.
2912
2913 If TMODE is not VOIDmode, it suggests generating the
2914 result in mode TMODE. But this is done only when convenient.
2915 Otherwise, TMODE is ignored and the value generated in its natural mode.
2916 TMODE is just a suggestion; callers must assume that
2917 the rtx returned may not have mode TMODE.
2918
2919 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
2920 with a constant address even if that address is not normally legitimate.
2921 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
2922
2923 If MODIFIER is EXPAND_SUM then when EXP is an addition
2924 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
2925 or a nest of (PLUS ...) and (MINUS ...) where the terms are
2926 products as above, or REG or MEM, or constant.
2927 Ordinarily in such cases we would output mul or add instructions
2928 and then return a pseudo reg containing the sum.
2929
2930 EXPAND_INITIALIZER is much like EXPAND_SUM except that
2931 it also marks a label as absolutely required (it can't be dead).
6dc42e49 2932 This is used for outputting expressions used in initializers. */
bbf6f052
RK
2933
2934rtx
2935expand_expr (exp, target, tmode, modifier)
2936 register tree exp;
2937 rtx target;
2938 enum machine_mode tmode;
2939 enum expand_modifier modifier;
2940{
2941 register rtx op0, op1, temp;
2942 tree type = TREE_TYPE (exp);
2943 int unsignedp = TREE_UNSIGNED (type);
2944 register enum machine_mode mode = TYPE_MODE (type);
2945 register enum tree_code code = TREE_CODE (exp);
2946 optab this_optab;
2947 /* Use subtarget as the target for operand 0 of a binary operation. */
2948 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2949 rtx original_target = target;
2950 int ignore = target == const0_rtx;
2951 tree context;
2952
2953 /* Don't use hard regs as subtargets, because the combiner
2954 can only handle pseudo regs. */
2955 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
2956 subtarget = 0;
2957 /* Avoid subtargets inside loops,
2958 since they hide some invariant expressions. */
2959 if (preserve_subexpressions_p ())
2960 subtarget = 0;
2961
2962 if (ignore) target = 0, original_target = 0;
2963
2964 /* If will do cse, generate all results into pseudo registers
2965 since 1) that allows cse to find more things
2966 and 2) otherwise cse could produce an insn the machine
2967 cannot support. */
2968
2969 if (! cse_not_expected && mode != BLKmode && target
2970 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
2971 target = subtarget;
2972
2973 /* Ensure we reference a volatile object even if value is ignored. */
2974 if (ignore && TREE_THIS_VOLATILE (exp)
2975 && mode != VOIDmode && mode != BLKmode)
2976 {
2977 target = gen_reg_rtx (mode);
2978 temp = expand_expr (exp, target, VOIDmode, modifier);
2979 if (temp != target)
2980 emit_move_insn (target, temp);
2981 return target;
2982 }
2983
2984 switch (code)
2985 {
2986 case LABEL_DECL:
b552441b
RS
2987 {
2988 tree function = decl_function_context (exp);
2989 /* Handle using a label in a containing function. */
2990 if (function != current_function_decl && function != 0)
2991 {
2992 struct function *p = find_function_data (function);
2993 /* Allocate in the memory associated with the function
2994 that the label is in. */
2995 push_obstacks (p->function_obstack,
2996 p->function_maybepermanent_obstack);
2997
2998 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
2999 label_rtx (exp), p->forced_labels);
3000 pop_obstacks ();
3001 }
3002 else if (modifier == EXPAND_INITIALIZER)
3003 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3004 label_rtx (exp), forced_labels);
3005 return gen_rtx (MEM, FUNCTION_MODE,
3006 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3007 }
bbf6f052
RK
3008
3009 case PARM_DECL:
3010 if (DECL_RTL (exp) == 0)
3011 {
3012 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3013 return CONST0_RTX (mode);
bbf6f052
RK
3014 }
3015
3016 case FUNCTION_DECL:
3017 case VAR_DECL:
3018 case RESULT_DECL:
3019 if (DECL_RTL (exp) == 0)
3020 abort ();
3021 /* Ensure variable marked as used
3022 even if it doesn't go through a parser. */
3023 TREE_USED (exp) = 1;
3024 /* Handle variables inherited from containing functions. */
3025 context = decl_function_context (exp);
3026
3027 /* We treat inline_function_decl as an alias for the current function
3028 because that is the inline function whose vars, types, etc.
3029 are being merged into the current function.
3030 See expand_inline_function. */
3031 if (context != 0 && context != current_function_decl
3032 && context != inline_function_decl
3033 /* If var is static, we don't need a static chain to access it. */
3034 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3035 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3036 {
3037 rtx addr;
3038
3039 /* Mark as non-local and addressable. */
3040 TREE_NONLOCAL (exp) = 1;
3041 mark_addressable (exp);
3042 if (GET_CODE (DECL_RTL (exp)) != MEM)
3043 abort ();
3044 addr = XEXP (DECL_RTL (exp), 0);
3045 if (GET_CODE (addr) == MEM)
3046 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3047 else
3048 addr = fix_lexical_addr (addr, exp);
3049 return change_address (DECL_RTL (exp), mode, addr);
3050 }
4af3895e 3051
bbf6f052
RK
3052 /* This is the case of an array whose size is to be determined
3053 from its initializer, while the initializer is still being parsed.
3054 See expand_decl. */
3055 if (GET_CODE (DECL_RTL (exp)) == MEM
3056 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3057 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3058 XEXP (DECL_RTL (exp), 0));
3059 if (GET_CODE (DECL_RTL (exp)) == MEM
3060 && modifier != EXPAND_CONST_ADDRESS
3061 && modifier != EXPAND_SUM
3062 && modifier != EXPAND_INITIALIZER)
3063 {
3064 /* DECL_RTL probably contains a constant address.
3065 On RISC machines where a constant address isn't valid,
3066 make some insns to get that address into a register. */
3067 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3068 || (flag_force_addr
3069 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3070 return change_address (DECL_RTL (exp), VOIDmode,
3071 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3072 }
3073 return DECL_RTL (exp);
3074
3075 case INTEGER_CST:
3076 return immed_double_const (TREE_INT_CST_LOW (exp),
3077 TREE_INT_CST_HIGH (exp),
3078 mode);
3079
3080 case CONST_DECL:
3081 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3082
3083 case REAL_CST:
3084 /* If optimized, generate immediate CONST_DOUBLE
3085 which will be turned into memory by reload if necessary.
3086
3087 We used to force a register so that loop.c could see it. But
3088 this does not allow gen_* patterns to perform optimizations with
3089 the constants. It also produces two insns in cases like "x = 1.0;".
3090 On most machines, floating-point constants are not permitted in
3091 many insns, so we'd end up copying it to a register in any case.
3092
3093 Now, we do the copying in expand_binop, if appropriate. */
3094 return immed_real_const (exp);
3095
3096 case COMPLEX_CST:
3097 case STRING_CST:
3098 if (! TREE_CST_RTL (exp))
3099 output_constant_def (exp);
3100
3101 /* TREE_CST_RTL probably contains a constant address.
3102 On RISC machines where a constant address isn't valid,
3103 make some insns to get that address into a register. */
3104 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3105 && modifier != EXPAND_CONST_ADDRESS
3106 && modifier != EXPAND_INITIALIZER
3107 && modifier != EXPAND_SUM
3108 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3109 return change_address (TREE_CST_RTL (exp), VOIDmode,
3110 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3111 return TREE_CST_RTL (exp);
3112
3113 case SAVE_EXPR:
3114 context = decl_function_context (exp);
3115 /* We treat inline_function_decl as an alias for the current function
3116 because that is the inline function whose vars, types, etc.
3117 are being merged into the current function.
3118 See expand_inline_function. */
3119 if (context == current_function_decl || context == inline_function_decl)
3120 context = 0;
3121
3122 /* If this is non-local, handle it. */
3123 if (context)
3124 {
3125 temp = SAVE_EXPR_RTL (exp);
3126 if (temp && GET_CODE (temp) == REG)
3127 {
3128 put_var_into_stack (exp);
3129 temp = SAVE_EXPR_RTL (exp);
3130 }
3131 if (temp == 0 || GET_CODE (temp) != MEM)
3132 abort ();
3133 return change_address (temp, mode,
3134 fix_lexical_addr (XEXP (temp, 0), exp));
3135 }
3136 if (SAVE_EXPR_RTL (exp) == 0)
3137 {
3138 if (mode == BLKmode)
3139 temp
3140 = assign_stack_temp (mode,
3141 int_size_in_bytes (TREE_TYPE (exp)), 0);
3142 else
3143 temp = gen_reg_rtx (mode);
3144 SAVE_EXPR_RTL (exp) = temp;
3145 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3146 if (!optimize && GET_CODE (temp) == REG)
3147 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3148 save_expr_regs);
3149 }
3150 return SAVE_EXPR_RTL (exp);
3151
3152 case EXIT_EXPR:
3153 /* Exit the current loop if the body-expression is true. */
3154 {
3155 rtx label = gen_label_rtx ();
3156 do_jump (TREE_OPERAND (exp, 0), label, 0);
3157 expand_exit_loop (0);
3158 emit_label (label);
3159 }
3160 return const0_rtx;
3161
3162 case LOOP_EXPR:
3163 expand_start_loop (1);
3164 expand_expr_stmt (TREE_OPERAND (exp, 0));
3165 expand_end_loop ();
3166
3167 return const0_rtx;
3168
3169 case BIND_EXPR:
3170 {
3171 tree vars = TREE_OPERAND (exp, 0);
3172 int vars_need_expansion = 0;
3173
3174 /* Need to open a binding contour here because
3175 if there are any cleanups they most be contained here. */
3176 expand_start_bindings (0);
3177
3178 /* Mark the corresponding BLOCK for output. */
3179 if (TREE_OPERAND (exp, 2) != 0)
3180 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3181
3182 /* If VARS have not yet been expanded, expand them now. */
3183 while (vars)
3184 {
3185 if (DECL_RTL (vars) == 0)
3186 {
3187 vars_need_expansion = 1;
3188 expand_decl (vars);
3189 }
3190 expand_decl_init (vars);
3191 vars = TREE_CHAIN (vars);
3192 }
3193
3194 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3195
3196 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3197
3198 return temp;
3199 }
3200
3201 case RTL_EXPR:
3202 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3203 abort ();
3204 emit_insns (RTL_EXPR_SEQUENCE (exp));
3205 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3206 return RTL_EXPR_RTL (exp);
3207
3208 case CONSTRUCTOR:
4af3895e
JVA
3209 /* All elts simple constants => refer to a constant in memory. But
3210 if this is a non-BLKmode mode, let it store a field at a time
3211 since that should make a CONST_INT or CONST_DOUBLE when we
3212 fold. */
3213 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3214 {
3215 rtx constructor = output_constant_def (exp);
b552441b
RS
3216 if (modifier != EXPAND_CONST_ADDRESS
3217 && modifier != EXPAND_INITIALIZER
3218 && modifier != EXPAND_SUM
3219 && !memory_address_p (GET_MODE (constructor),
3220 XEXP (constructor, 0)))
bbf6f052
RK
3221 constructor = change_address (constructor, VOIDmode,
3222 XEXP (constructor, 0));
3223 return constructor;
3224 }
3225
3226 if (ignore)
3227 {
3228 tree elt;
3229 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3230 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3231 return const0_rtx;
3232 }
3233 else
3234 {
3235 if (target == 0 || ! safe_from_p (target, exp))
3236 {
3237 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3238 target = gen_reg_rtx (mode);
3239 else
3240 {
3241 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3242 if (target)
3243 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3244 target = safe_target;
3245 }
3246 }
3247 store_constructor (exp, target);
3248 return target;
3249 }
3250
3251 case INDIRECT_REF:
3252 {
3253 tree exp1 = TREE_OPERAND (exp, 0);
3254 tree exp2;
3255
3256 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3257 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3258 This code has the same general effect as simply doing
3259 expand_expr on the save expr, except that the expression PTR
3260 is computed for use as a memory address. This means different
3261 code, suitable for indexing, may be generated. */
3262 if (TREE_CODE (exp1) == SAVE_EXPR
3263 && SAVE_EXPR_RTL (exp1) == 0
3264 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3265 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3266 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3267 {
3268 temp = expand_expr (TREE_OPERAND (exp1, 0), 0, VOIDmode, EXPAND_SUM);
3269 op0 = memory_address (mode, temp);
3270 op0 = copy_all_regs (op0);
3271 SAVE_EXPR_RTL (exp1) = op0;
3272 }
3273 else
3274 {
3275 op0 = expand_expr (exp1, 0, VOIDmode, EXPAND_SUM);
3276 op0 = memory_address (mode, op0);
3277 }
8c8a8e34
JW
3278
3279 temp = gen_rtx (MEM, mode, op0);
3280 /* If address was computed by addition,
3281 mark this as an element of an aggregate. */
3282 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3283 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3284 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3285 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3286 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3287 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3288 || (TREE_CODE (exp1) == ADDR_EXPR
3289 && (exp2 = TREE_OPERAND (exp1, 0))
3290 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3291 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3292 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3293 MEM_IN_STRUCT_P (temp) = 1;
3294 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
bbf6f052
RK
3295#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3296 a location is accessed through a pointer to const does not mean
3297 that the value there can never change. */
8c8a8e34 3298 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3299#endif
8c8a8e34
JW
3300 return temp;
3301 }
bbf6f052
RK
3302
3303 case ARRAY_REF:
3304 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3305 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3306 {
3307 /* Nonconstant array index or nonconstant element size.
3308 Generate the tree for *(&array+index) and expand that,
3309 except do it in a language-independent way
3310 and don't complain about non-lvalue arrays.
3311 `mark_addressable' should already have been called
3312 for any array for which this case will be reached. */
3313
3314 /* Don't forget the const or volatile flag from the array element. */
3315 tree variant_type = build_type_variant (type,
3316 TREE_READONLY (exp),
3317 TREE_THIS_VOLATILE (exp));
3318 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3319 TREE_OPERAND (exp, 0));
3320 tree index = TREE_OPERAND (exp, 1);
3321 tree elt;
3322
3323 /* Convert the integer argument to a type the same size as a pointer
3324 so the multiply won't overflow spuriously. */
3325 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3326 index = convert (type_for_size (POINTER_SIZE, 0), index);
3327
3328 /* Don't think the address has side effects
3329 just because the array does.
3330 (In some cases the address might have side effects,
3331 and we fail to record that fact here. However, it should not
3332 matter, since expand_expr should not care.) */
3333 TREE_SIDE_EFFECTS (array_adr) = 0;
3334
3335 elt = build1 (INDIRECT_REF, type,
3336 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3337 array_adr,
3338 fold (build (MULT_EXPR,
3339 TYPE_POINTER_TO (variant_type),
3340 index, size_in_bytes (type))))));
3341
3342 /* Volatility, etc., of new expression is same as old expression. */
3343 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3344 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3345 TREE_READONLY (elt) = TREE_READONLY (exp);
3346
3347 return expand_expr (elt, target, tmode, modifier);
3348 }
3349
3350 /* Fold an expression like: "foo"[2].
3351 This is not done in fold so it won't happen inside &. */
3352 {
3353 int i;
3354 tree arg0 = TREE_OPERAND (exp, 0);
3355 tree arg1 = TREE_OPERAND (exp, 1);
3356
3357 if (TREE_CODE (arg0) == STRING_CST
3358 && TREE_CODE (arg1) == INTEGER_CST
3359 && !TREE_INT_CST_HIGH (arg1)
3360 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3361 {
3362 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3363 {
3364 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3365 TREE_TYPE (exp) = integer_type_node;
3366 return expand_expr (exp, target, tmode, modifier);
3367 }
3368 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3369 {
3370 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3371 TREE_TYPE (exp) = integer_type_node;
3372 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3373 }
3374 }
3375 }
3376
3377 /* If this is a constant index into a constant array,
4af3895e
JVA
3378 just get the value from the array. Handle both the cases when
3379 we have an explicit constructor and when our operand is a variable
3380 that was declared const. */
3381
3382 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3383 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3384 {
3385 tree index = fold (TREE_OPERAND (exp, 1));
3386 if (TREE_CODE (index) == INTEGER_CST
3387 && TREE_INT_CST_HIGH (index) == 0)
3388 {
3389 int i = TREE_INT_CST_LOW (index);
3390 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3391
3392 while (elem && i--)
3393 elem = TREE_CHAIN (elem);
3394 if (elem)
3395 return expand_expr (fold (TREE_VALUE (elem)), target,
3396 tmode, modifier);
3397 }
3398 }
3399
3400 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3401 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3402 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3403 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3404 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3405 && optimize >= 1
3406 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3407 != ERROR_MARK))
bbf6f052
RK
3408 {
3409 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3410 if (TREE_CODE (index) == INTEGER_CST
3411 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3412 {
3413 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3414 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3415
8c8a8e34
JW
3416 if (TREE_CODE (init) == CONSTRUCTOR)
3417 {
3418 tree elem = CONSTRUCTOR_ELTS (init);
3419
3420 while (elem && i--)
3421 elem = TREE_CHAIN (elem);
3422 if (elem)
3423 return expand_expr (fold (TREE_VALUE (elem)), target,
3424 tmode, modifier);
3425 }
3426 else if (TREE_CODE (init) == STRING_CST
3427 && i < TREE_STRING_LENGTH (init))
3428 {
3429 temp = gen_rtx (CONST_INT, VOIDmode,
3430 TREE_STRING_POINTER (init)[i]);
3431 return convert_to_mode (mode, temp, 0);
3432 }
bbf6f052
RK
3433 }
3434 }
3435 /* Treat array-ref with constant index as a component-ref. */
3436
3437 case COMPONENT_REF:
3438 case BIT_FIELD_REF:
4af3895e
JVA
3439 /* If the operand is a CONSTRUCTOR, we can just extract the
3440 appropriate field if it is present. */
3441 if (code != ARRAY_REF
3442 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3443 {
3444 tree elt;
3445
3446 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3447 elt = TREE_CHAIN (elt))
3448 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3449 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3450 }
3451
bbf6f052
RK
3452 {
3453 enum machine_mode mode1;
3454 int bitsize;
3455 int bitpos;
7bb0943f 3456 tree offset;
bbf6f052 3457 int volatilep = 0;
7bb0943f 3458 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3459 &mode1, &unsignedp, &volatilep);
3460
3461 /* In some cases, we will be offsetting OP0's address by a constant.
3462 So get it as a sum, if possible. If we will be using it
3463 directly in an insn, we validate it. */
3464 op0 = expand_expr (tem, 0, VOIDmode, EXPAND_SUM);
3465
8c8a8e34
JW
3466 /* If this is a constant, put it into a register if it is a
3467 legimate constant and memory if it isn't. */
3468 if (CONSTANT_P (op0))
3469 {
3470 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3471 if (LEGITIMATE_CONSTANT_P (op0))
3472 op0 = force_reg (mode, op0);
3473 else
3474 op0 = validize_mem (force_const_mem (mode, op0));
3475 }
3476
7bb0943f
RS
3477 if (offset != 0)
3478 {
3479 rtx offset_rtx = expand_expr (offset, 0, VOIDmode, 0);
3480
3481 if (GET_CODE (op0) != MEM)
3482 abort ();
3483 op0 = change_address (op0, VOIDmode,
3484 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3485 force_reg (Pmode, offset_rtx)));
3486 }
3487
bbf6f052
RK
3488 /* Don't forget about volatility even if this is a bitfield. */
3489 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3490 {
3491 op0 = copy_rtx (op0);
3492 MEM_VOLATILE_P (op0) = 1;
3493 }
3494
3495 if (mode1 == VOIDmode
3496 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3497 {
3498 /* In cases where an aligned union has an unaligned object
3499 as a field, we might be extracting a BLKmode value from
3500 an integer-mode (e.g., SImode) object. Handle this case
3501 by doing the extract into an object as wide as the field
3502 (which we know to be the width of a basic mode), then
3503 storing into memory, and changing the mode to BLKmode. */
3504 enum machine_mode ext_mode = mode;
3505
3506 if (ext_mode == BLKmode)
3507 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3508
3509 if (ext_mode == BLKmode)
3510 abort ();
3511
3512 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3513 unsignedp, target, ext_mode, ext_mode,
3514 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3515 int_size_in_bytes (TREE_TYPE (tem)));
3516 if (mode == BLKmode)
3517 {
3518 rtx new = assign_stack_temp (ext_mode,
3519 bitsize / BITS_PER_UNIT, 0);
3520
3521 emit_move_insn (new, op0);
3522 op0 = copy_rtx (new);
3523 PUT_MODE (op0, BLKmode);
3524 }
3525
3526 return op0;
3527 }
3528
3529 /* Get a reference to just this component. */
3530 if (modifier == EXPAND_CONST_ADDRESS
3531 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3532 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3533 (bitpos / BITS_PER_UNIT)));
3534 else
3535 op0 = change_address (op0, mode1,
3536 plus_constant (XEXP (op0, 0),
3537 (bitpos / BITS_PER_UNIT)));
3538 MEM_IN_STRUCT_P (op0) = 1;
3539 MEM_VOLATILE_P (op0) |= volatilep;
3540 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3541 return op0;
3542 if (target == 0)
3543 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3544 convert_move (target, op0, unsignedp);
3545 return target;
3546 }
3547
3548 case OFFSET_REF:
3549 {
3550 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3551 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3552 op0 = expand_expr (addr, 0, VOIDmode, EXPAND_SUM);
3553 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3554 MEM_IN_STRUCT_P (temp) = 1;
3555 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3556#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3557 a location is accessed through a pointer to const does not mean
3558 that the value there can never change. */
3559 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3560#endif
3561 return temp;
3562 }
3563
3564 /* Intended for a reference to a buffer of a file-object in Pascal.
3565 But it's not certain that a special tree code will really be
3566 necessary for these. INDIRECT_REF might work for them. */
3567 case BUFFER_REF:
3568 abort ();
3569
3570 case WITH_CLEANUP_EXPR:
3571 if (RTL_EXPR_RTL (exp) == 0)
3572 {
3573 RTL_EXPR_RTL (exp)
3574 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3575 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2), cleanups_this_call);
3576 /* That's it for this cleanup. */
3577 TREE_OPERAND (exp, 2) = 0;
3578 }
3579 return RTL_EXPR_RTL (exp);
3580
3581 case CALL_EXPR:
3582 /* Check for a built-in function. */
3583 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3584 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3585 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3586 return expand_builtin (exp, target, subtarget, tmode, ignore);
3587 /* If this call was expanded already by preexpand_calls,
3588 just return the result we got. */
3589 if (CALL_EXPR_RTL (exp) != 0)
3590 return CALL_EXPR_RTL (exp);
8129842c 3591 return expand_call (exp, target, ignore);
bbf6f052
RK
3592
3593 case NON_LVALUE_EXPR:
3594 case NOP_EXPR:
3595 case CONVERT_EXPR:
3596 case REFERENCE_EXPR:
3597 if (TREE_CODE (type) == VOID_TYPE || ignore)
3598 {
3599 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3600 return const0_rtx;
3601 }
3602 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3603 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3604 if (TREE_CODE (type) == UNION_TYPE)
3605 {
3606 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3607 if (target == 0)
3608 {
3609 if (mode == BLKmode)
3610 {
3611 if (TYPE_SIZE (type) == 0
3612 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3613 abort ();
3614 target = assign_stack_temp (BLKmode,
3615 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3616 + BITS_PER_UNIT - 1)
3617 / BITS_PER_UNIT, 0);
3618 }
3619 else
3620 target = gen_reg_rtx (mode);
3621 }
3622 if (GET_CODE (target) == MEM)
3623 /* Store data into beginning of memory target. */
3624 store_expr (TREE_OPERAND (exp, 0),
3625 change_address (target, TYPE_MODE (valtype), 0), 0);
3626 else if (GET_CODE (target) == REG)
3627 /* Store this field into a union of the proper type. */
3628 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3629 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3630 VOIDmode, 0, 1,
3631 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3632 else
3633 abort ();
3634
3635 /* Return the entire union. */
3636 return target;
3637 }
3638 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, 0);
3639 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3640 return op0;
3641 if (flag_force_mem && GET_CODE (op0) == MEM)
3642 op0 = copy_to_reg (op0);
3643
3644 if (target == 0)
3645 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3646 else
3647 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3648 return target;
3649
3650 case PLUS_EXPR:
3651 /* We come here from MINUS_EXPR when the second operand is a constant. */
3652 plus_expr:
3653 this_optab = add_optab;
3654
3655 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3656 something else, make sure we add the register to the constant and
3657 then to the other thing. This case can occur during strength
3658 reduction and doing it this way will produce better code if the
3659 frame pointer or argument pointer is eliminated.
3660
3661 fold-const.c will ensure that the constant is always in the inner
3662 PLUS_EXPR, so the only case we need to do anything about is if
3663 sp, ap, or fp is our second argument, in which case we must swap
3664 the innermost first argument and our second argument. */
3665
3666 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3667 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3668 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3669 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3670 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3671 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3672 {
3673 tree t = TREE_OPERAND (exp, 1);
3674
3675 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3676 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3677 }
3678
3679 /* If the result is to be Pmode and we are adding an integer to
3680 something, we might be forming a constant. So try to use
3681 plus_constant. If it produces a sum and we can't accept it,
3682 use force_operand. This allows P = &ARR[const] to generate
3683 efficient code on machines where a SYMBOL_REF is not a valid
3684 address.
3685
3686 If this is an EXPAND_SUM call, always return the sum. */
3687 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3688 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3689 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3690 || mode == Pmode))
3691 {
3692 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3693 EXPAND_SUM);
3694 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3695 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3696 op1 = force_operand (op1, target);
3697 return op1;
3698 }
3699
3700 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3701 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3702 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3703 || mode == Pmode))
3704 {
3705 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3706 EXPAND_SUM);
3707 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3708 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3709 op0 = force_operand (op0, target);
3710 return op0;
3711 }
3712
3713 /* No sense saving up arithmetic to be done
3714 if it's all in the wrong mode to form part of an address.
3715 And force_operand won't know whether to sign-extend or
3716 zero-extend. */
3717 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3718 || mode != Pmode) goto binop;
3719
3720 preexpand_calls (exp);
3721 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3722 subtarget = 0;
3723
3724 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3725 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3726
3727 /* Make sure any term that's a sum with a constant comes last. */
3728 if (GET_CODE (op0) == PLUS
3729 && CONSTANT_P (XEXP (op0, 1)))
3730 {
3731 temp = op0;
3732 op0 = op1;
3733 op1 = temp;
3734 }
3735 /* If adding to a sum including a constant,
3736 associate it to put the constant outside. */
3737 if (GET_CODE (op1) == PLUS
3738 && CONSTANT_P (XEXP (op1, 1)))
3739 {
3740 rtx constant_term = const0_rtx;
3741
3742 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3743 if (temp != 0)
3744 op0 = temp;
6f90e075
JW
3745 /* Ensure that MULT comes first if there is one. */
3746 else if (GET_CODE (op0) == MULT)
3747 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
3748 else
3749 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3750
3751 /* Let's also eliminate constants from op0 if possible. */
3752 op0 = eliminate_constant_term (op0, &constant_term);
3753
3754 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3755 their sum should be a constant. Form it into OP1, since the
3756 result we want will then be OP0 + OP1. */
3757
3758 temp = simplify_binary_operation (PLUS, mode, constant_term,
3759 XEXP (op1, 1));
3760 if (temp != 0)
3761 op1 = temp;
3762 else
3763 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3764 }
3765
3766 /* Put a constant term last and put a multiplication first. */
3767 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3768 temp = op1, op1 = op0, op0 = temp;
3769
3770 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3771 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3772
3773 case MINUS_EXPR:
3774 /* Handle difference of two symbolic constants,
3775 for the sake of an initializer. */
3776 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3777 && really_constant_p (TREE_OPERAND (exp, 0))
3778 && really_constant_p (TREE_OPERAND (exp, 1)))
3779 {
3780 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, modifier);
3781 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3782 return gen_rtx (MINUS, mode, op0, op1);
3783 }
3784 /* Convert A - const to A + (-const). */
3785 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3786 {
3787 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3788 fold (build1 (NEGATE_EXPR, type,
3789 TREE_OPERAND (exp, 1))));
3790 goto plus_expr;
3791 }
3792 this_optab = sub_optab;
3793 goto binop;
3794
3795 case MULT_EXPR:
3796 preexpand_calls (exp);
3797 /* If first operand is constant, swap them.
3798 Thus the following special case checks need only
3799 check the second operand. */
3800 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3801 {
3802 register tree t1 = TREE_OPERAND (exp, 0);
3803 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3804 TREE_OPERAND (exp, 1) = t1;
3805 }
3806
3807 /* Attempt to return something suitable for generating an
3808 indexed address, for machines that support that. */
3809
3810 if (modifier == EXPAND_SUM && mode == Pmode
3811 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3812 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT)
3813 {
3814 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3815
3816 /* Apply distributive law if OP0 is x+c. */
3817 if (GET_CODE (op0) == PLUS
3818 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3819 return gen_rtx (PLUS, mode,
3820 gen_rtx (MULT, mode, XEXP (op0, 0),
3821 gen_rtx (CONST_INT, VOIDmode,
3822 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3823 gen_rtx (CONST_INT, VOIDmode,
3824 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3825 * INTVAL (XEXP (op0, 1)))));
3826
3827 if (GET_CODE (op0) != REG)
3828 op0 = force_operand (op0, 0);
3829 if (GET_CODE (op0) != REG)
3830 op0 = copy_to_mode_reg (mode, op0);
3831
3832 return gen_rtx (MULT, mode, op0,
3833 gen_rtx (CONST_INT, VOIDmode,
3834 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3835 }
3836
3837 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3838 subtarget = 0;
3839
3840 /* Check for multiplying things that have been extended
3841 from a narrower type. If this machine supports multiplying
3842 in that narrower type with a result in the desired type,
3843 do it that way, and avoid the explicit type-conversion. */
3844 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
3845 && TREE_CODE (type) == INTEGER_TYPE
3846 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3847 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
3848 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3849 && int_fits_type_p (TREE_OPERAND (exp, 1),
3850 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3851 /* Don't use a widening multiply if a shift will do. */
3852 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
3853 > HOST_BITS_PER_INT)
3854 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
3855 ||
3856 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
3857 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
3858 ==
3859 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
3860 /* If both operands are extended, they must either both
3861 be zero-extended or both be sign-extended. */
3862 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
3863 ==
3864 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
3865 {
3866 enum machine_mode innermode
3867 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
3868 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3869 ? umul_widen_optab : smul_widen_optab);
3870 if (mode == GET_MODE_WIDER_MODE (innermode)
3871 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3872 {
3873 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
3874 0, VOIDmode, 0);
3875 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3876 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3877 else
3878 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
3879 0, VOIDmode, 0);
3880 goto binop2;
3881 }
3882 }
3883 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3884 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3885 return expand_mult (mode, op0, op1, target, unsignedp);
3886
3887 case TRUNC_DIV_EXPR:
3888 case FLOOR_DIV_EXPR:
3889 case CEIL_DIV_EXPR:
3890 case ROUND_DIV_EXPR:
3891 case EXACT_DIV_EXPR:
3892 preexpand_calls (exp);
3893 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3894 subtarget = 0;
3895 /* Possible optimization: compute the dividend with EXPAND_SUM
3896 then if the divisor is constant can optimize the case
3897 where some terms of the dividend have coeffs divisible by it. */
3898 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3899 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3900 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
3901
3902 case RDIV_EXPR:
3903 this_optab = flodiv_optab;
3904 goto binop;
3905
3906 case TRUNC_MOD_EXPR:
3907 case FLOOR_MOD_EXPR:
3908 case CEIL_MOD_EXPR:
3909 case ROUND_MOD_EXPR:
3910 preexpand_calls (exp);
3911 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3912 subtarget = 0;
3913 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3914 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3915 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
3916
3917 case FIX_ROUND_EXPR:
3918 case FIX_FLOOR_EXPR:
3919 case FIX_CEIL_EXPR:
3920 abort (); /* Not used for C. */
3921
3922 case FIX_TRUNC_EXPR:
3923 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
3924 if (target == 0)
3925 target = gen_reg_rtx (mode);
3926 expand_fix (target, op0, unsignedp);
3927 return target;
3928
3929 case FLOAT_EXPR:
3930 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
3931 if (target == 0)
3932 target = gen_reg_rtx (mode);
3933 /* expand_float can't figure out what to do if FROM has VOIDmode.
3934 So give it the correct mode. With -O, cse will optimize this. */
3935 if (GET_MODE (op0) == VOIDmode)
3936 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
3937 op0);
3938 expand_float (target, op0,
3939 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3940 return target;
3941
3942 case NEGATE_EXPR:
3943 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3944 temp = expand_unop (mode, neg_optab, op0, target, 0);
3945 if (temp == 0)
3946 abort ();
3947 return temp;
3948
3949 case ABS_EXPR:
3950 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3951
3952 /* Unsigned abs is simply the operand. Testing here means we don't
3953 risk generating incorrect code below. */
3954 if (TREE_UNSIGNED (type))
3955 return op0;
3956
3957 /* First try to do it with a special abs instruction. */
3958 temp = expand_unop (mode, abs_optab, op0, target, 0);
3959 if (temp != 0)
3960 return temp;
3961
3962 /* If this machine has expensive jumps, we can do integer absolute
3963 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
3964 where W is the width of MODE. */
3965
3966 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
3967 {
3968 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
3969 size_int (GET_MODE_BITSIZE (mode) - 1),
3970 0, 0);
3971
3972 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
3973 OPTAB_LIB_WIDEN);
3974 if (temp != 0)
3975 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
3976 OPTAB_LIB_WIDEN);
3977
3978 if (temp != 0)
3979 return temp;
3980 }
3981
3982 /* If that does not win, use conditional jump and negate. */
3983 target = original_target;
3984 temp = gen_label_rtx ();
3985 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
3986 || (GET_CODE (target) == REG
3987 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3988 target = gen_reg_rtx (mode);
3989 emit_move_insn (target, op0);
3990 emit_cmp_insn (target,
3991 expand_expr (convert (type, integer_zero_node),
3992 0, VOIDmode, 0),
3993 GE, 0, mode, 0, 0);
3994 NO_DEFER_POP;
3995 emit_jump_insn (gen_bge (temp));
3996 op0 = expand_unop (mode, neg_optab, target, target, 0);
3997 if (op0 != target)
3998 emit_move_insn (target, op0);
3999 emit_label (temp);
4000 OK_DEFER_POP;
4001 return target;
4002
4003 case MAX_EXPR:
4004 case MIN_EXPR:
4005 target = original_target;
4006 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4007 || (GET_CODE (target) == REG
4008 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4009 target = gen_reg_rtx (mode);
4010 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4011 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4012
4013 /* First try to do it with a special MIN or MAX instruction.
4014 If that does not win, use a conditional jump to select the proper
4015 value. */
4016 this_optab = (TREE_UNSIGNED (type)
4017 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4018 : (code == MIN_EXPR ? smin_optab : smax_optab));
4019
4020 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4021 OPTAB_WIDEN);
4022 if (temp != 0)
4023 return temp;
4024
4025 if (target != op0)
4026 emit_move_insn (target, op0);
4027 op0 = gen_label_rtx ();
4028 if (code == MAX_EXPR)
4029 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4030 ? compare_from_rtx (target, op1, GEU, 1, mode, 0, 0)
4031 : compare_from_rtx (target, op1, GE, 0, mode, 0, 0));
4032 else
4033 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4034 ? compare_from_rtx (target, op1, LEU, 1, mode, 0, 0)
4035 : compare_from_rtx (target, op1, LE, 0, mode, 0, 0));
4036 if (temp == const0_rtx)
4037 emit_move_insn (target, op1);
4038 else if (temp != const_true_rtx)
4039 {
4040 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4041 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4042 else
4043 abort ();
4044 emit_move_insn (target, op1);
4045 }
4046 emit_label (op0);
4047 return target;
4048
4049/* ??? Can optimize when the operand of this is a bitwise operation,
4050 by using a different bitwise operation. */
4051 case BIT_NOT_EXPR:
4052 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4053 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4054 if (temp == 0)
4055 abort ();
4056 return temp;
4057
4058 case FFS_EXPR:
4059 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4060 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4061 if (temp == 0)
4062 abort ();
4063 return temp;
4064
4065/* ??? Can optimize bitwise operations with one arg constant.
4066 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4067 and (a bitwise1 b) bitwise2 b (etc)
4068 but that is probably not worth while. */
4069
4070/* BIT_AND_EXPR is for bitwise anding.
4071 TRUTH_AND_EXPR is for anding two boolean values
4072 when we want in all cases to compute both of them.
4073 In general it is fastest to do TRUTH_AND_EXPR by
4074 computing both operands as actual zero-or-1 values
4075 and then bitwise anding. In cases where there cannot
4076 be any side effects, better code would be made by
4077 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4078 but the question is how to recognize those cases. */
4079
4080 case TRUTH_AND_EXPR:
4081 case BIT_AND_EXPR:
4082 this_optab = and_optab;
4083 goto binop;
4084
4085/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4086 case TRUTH_OR_EXPR:
4087 case BIT_IOR_EXPR:
4088 this_optab = ior_optab;
4089 goto binop;
4090
4091 case BIT_XOR_EXPR:
4092 this_optab = xor_optab;
4093 goto binop;
4094
4095 case LSHIFT_EXPR:
4096 case RSHIFT_EXPR:
4097 case LROTATE_EXPR:
4098 case RROTATE_EXPR:
4099 preexpand_calls (exp);
4100 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4101 subtarget = 0;
4102 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4103 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4104 unsignedp);
4105
4106/* Could determine the answer when only additive constants differ.
4107 Also, the addition of one can be handled by changing the condition. */
4108 case LT_EXPR:
4109 case LE_EXPR:
4110 case GT_EXPR:
4111 case GE_EXPR:
4112 case EQ_EXPR:
4113 case NE_EXPR:
4114 preexpand_calls (exp);
4115 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4116 if (temp != 0)
4117 return temp;
4118 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4119 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4120 && original_target
4121 && GET_CODE (original_target) == REG
4122 && (GET_MODE (original_target)
4123 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4124 {
4125 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4126 if (temp != original_target)
4127 temp = copy_to_reg (temp);
4128 op1 = gen_label_rtx ();
4129 emit_cmp_insn (temp, const0_rtx, EQ, 0,
4130 GET_MODE (temp), unsignedp, 0);
4131 emit_jump_insn (gen_beq (op1));
4132 emit_move_insn (temp, const1_rtx);
4133 emit_label (op1);
4134 return temp;
4135 }
4136 /* If no set-flag instruction, must generate a conditional
4137 store into a temporary variable. Drop through
4138 and handle this like && and ||. */
4139
4140 case TRUTH_ANDIF_EXPR:
4141 case TRUTH_ORIF_EXPR:
4142 if (target == 0 || ! safe_from_p (target, exp)
4143 /* Make sure we don't have a hard reg (such as function's return
4144 value) live across basic blocks, if not optimizing. */
4145 || (!optimize && GET_CODE (target) == REG
4146 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4147 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4148 emit_clr_insn (target);
4149 op1 = gen_label_rtx ();
4150 jumpifnot (exp, op1);
4151 emit_0_to_1_insn (target);
4152 emit_label (op1);
4153 return target;
4154
4155 case TRUTH_NOT_EXPR:
4156 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4157 /* The parser is careful to generate TRUTH_NOT_EXPR
4158 only with operands that are always zero or one. */
4159 temp = expand_binop (mode, xor_optab, op0,
4160 gen_rtx (CONST_INT, mode, 1),
4161 target, 1, OPTAB_LIB_WIDEN);
4162 if (temp == 0)
4163 abort ();
4164 return temp;
4165
4166 case COMPOUND_EXPR:
4167 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4168 emit_queue ();
4169 return expand_expr (TREE_OPERAND (exp, 1),
4170 (ignore ? const0_rtx : target),
4171 VOIDmode, 0);
4172
4173 case COND_EXPR:
4174 {
4175 /* Note that COND_EXPRs whose type is a structure or union
4176 are required to be constructed to contain assignments of
4177 a temporary variable, so that we can evaluate them here
4178 for side effect only. If type is void, we must do likewise. */
4179
4180 /* If an arm of the branch requires a cleanup,
4181 only that cleanup is performed. */
4182
4183 tree singleton = 0;
4184 tree binary_op = 0, unary_op = 0;
4185 tree old_cleanups = cleanups_this_call;
4186 cleanups_this_call = 0;
4187
4188 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4189 convert it to our mode, if necessary. */
4190 if (integer_onep (TREE_OPERAND (exp, 1))
4191 && integer_zerop (TREE_OPERAND (exp, 2))
4192 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4193 {
4194 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4195 if (GET_MODE (op0) == mode)
4196 return op0;
4197 if (target == 0)
4198 target = gen_reg_rtx (mode);
4199 convert_move (target, op0, unsignedp);
4200 return target;
4201 }
4202
4203 /* If we are not to produce a result, we have no target. Otherwise,
4204 if a target was specified use it; it will not be used as an
4205 intermediate target unless it is safe. If no target, use a
4206 temporary. */
4207
4208 if (mode == VOIDmode || ignore)
4209 temp = 0;
4210 else if (original_target
4211 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4212 temp = original_target;
4213 else if (mode == BLKmode)
4214 {
4215 if (TYPE_SIZE (type) == 0
4216 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4217 abort ();
4218 temp = assign_stack_temp (BLKmode,
4219 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4220 + BITS_PER_UNIT - 1)
4221 / BITS_PER_UNIT, 0);
4222 }
4223 else
4224 temp = gen_reg_rtx (mode);
4225
4226 /* Check for X ? A + B : A. If we have this, we can copy
4227 A to the output and conditionally add B. Similarly for unary
4228 operations. Don't do this if X has side-effects because
4229 those side effects might affect A or B and the "?" operation is
4230 a sequence point in ANSI. (We test for side effects later.) */
4231
4232 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4233 && operand_equal_p (TREE_OPERAND (exp, 2),
4234 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4235 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4236 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4237 && operand_equal_p (TREE_OPERAND (exp, 1),
4238 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4239 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4240 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4241 && operand_equal_p (TREE_OPERAND (exp, 2),
4242 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4243 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4244 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4245 && operand_equal_p (TREE_OPERAND (exp, 1),
4246 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4247 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4248
4249 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4250 operation, do this as A + (X != 0). Similarly for other simple
4251 binary operators. */
4252 if (singleton && binary_op
4253 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4254 && (TREE_CODE (binary_op) == PLUS_EXPR
4255 || TREE_CODE (binary_op) == MINUS_EXPR
4256 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4257 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4258 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4259 && integer_onep (TREE_OPERAND (binary_op, 1))
4260 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4261 {
4262 rtx result;
4263 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4264 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4265 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4266 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4267 : and_optab);
4268
4269 /* If we had X ? A : A + 1, do this as A + (X == 0).
4270
4271 We have to invert the truth value here and then put it
4272 back later if do_store_flag fails. We cannot simply copy
4273 TREE_OPERAND (exp, 0) to another variable and modify that
4274 because invert_truthvalue can modify the tree pointed to
4275 by its argument. */
4276 if (singleton == TREE_OPERAND (exp, 1))
4277 TREE_OPERAND (exp, 0)
4278 = invert_truthvalue (TREE_OPERAND (exp, 0));
4279
4280 result = do_store_flag (TREE_OPERAND (exp, 0),
4281 safe_from_p (temp, singleton) ? temp : 0,
4282 mode, BRANCH_COST <= 1);
4283
4284 if (result)
4285 {
4286 op1 = expand_expr (singleton, 0, VOIDmode, 0);
4287 return expand_binop (mode, boptab, op1, result, temp,
4288 unsignedp, OPTAB_LIB_WIDEN);
4289 }
4290 else if (singleton == TREE_OPERAND (exp, 1))
4291 TREE_OPERAND (exp, 0)
4292 = invert_truthvalue (TREE_OPERAND (exp, 0));
4293 }
4294
4295 NO_DEFER_POP;
4296 op0 = gen_label_rtx ();
4297
4298 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4299 {
4300 if (temp != 0)
4301 {
4302 /* If the target conflicts with the other operand of the
4303 binary op, we can't use it. Also, we can't use the target
4304 if it is a hard register, because evaluating the condition
4305 might clobber it. */
4306 if ((binary_op
4307 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4308 || (GET_CODE (temp) == REG
4309 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4310 temp = gen_reg_rtx (mode);
4311 store_expr (singleton, temp, 0);
4312 }
4313 else
4314 expand_expr (singleton, ignore ? const1_rtx : 0, VOIDmode, 0);
4315 if (cleanups_this_call)
4316 {
4317 sorry ("aggregate value in COND_EXPR");
4318 cleanups_this_call = 0;
4319 }
4320 if (singleton == TREE_OPERAND (exp, 1))
4321 jumpif (TREE_OPERAND (exp, 0), op0);
4322 else
4323 jumpifnot (TREE_OPERAND (exp, 0), op0);
4324
4325 if (binary_op && temp == 0)
4326 /* Just touch the other operand. */
4327 expand_expr (TREE_OPERAND (binary_op, 1),
4328 ignore ? const0_rtx : 0, VOIDmode, 0);
4329 else if (binary_op)
4330 store_expr (build (TREE_CODE (binary_op), type,
4331 make_tree (type, temp),
4332 TREE_OPERAND (binary_op, 1)),
4333 temp, 0);
4334 else
4335 store_expr (build1 (TREE_CODE (unary_op), type,
4336 make_tree (type, temp)),
4337 temp, 0);
4338 op1 = op0;
4339 }
4340#if 0
4341 /* This is now done in jump.c and is better done there because it
4342 produces shorter register lifetimes. */
4343
4344 /* Check for both possibilities either constants or variables
4345 in registers (but not the same as the target!). If so, can
4346 save branches by assigning one, branching, and assigning the
4347 other. */
4348 else if (temp && GET_MODE (temp) != BLKmode
4349 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4350 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4351 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4352 && DECL_RTL (TREE_OPERAND (exp, 1))
4353 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4354 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4355 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4356 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4357 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4358 && DECL_RTL (TREE_OPERAND (exp, 2))
4359 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4360 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4361 {
4362 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4363 temp = gen_reg_rtx (mode);
4364 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4365 jumpifnot (TREE_OPERAND (exp, 0), op0);
4366 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4367 op1 = op0;
4368 }
4369#endif
4370 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4371 comparison operator. If we have one of these cases, set the
4372 output to A, branch on A (cse will merge these two references),
4373 then set the output to FOO. */
4374 else if (temp
4375 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4376 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4377 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4378 TREE_OPERAND (exp, 1), 0)
4379 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4380 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4381 {
4382 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4383 temp = gen_reg_rtx (mode);
4384 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4385 jumpif (TREE_OPERAND (exp, 0), op0);
4386 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4387 op1 = op0;
4388 }
4389 else if (temp
4390 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4391 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4392 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4393 TREE_OPERAND (exp, 2), 0)
4394 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4395 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4396 {
4397 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4398 temp = gen_reg_rtx (mode);
4399 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4400 jumpifnot (TREE_OPERAND (exp, 0), op0);
4401 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4402 op1 = op0;
4403 }
4404 else
4405 {
4406 op1 = gen_label_rtx ();
4407 jumpifnot (TREE_OPERAND (exp, 0), op0);
4408 if (temp != 0)
4409 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4410 else
4411 expand_expr (TREE_OPERAND (exp, 1), ignore ? const0_rtx : 0,
4412 VOIDmode, 0);
4413 if (cleanups_this_call)
4414 {
4415 sorry ("aggregate value in COND_EXPR");
4416 cleanups_this_call = 0;
4417 }
4418
4419 emit_queue ();
4420 emit_jump_insn (gen_jump (op1));
4421 emit_barrier ();
4422 emit_label (op0);
4423 if (temp != 0)
4424 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4425 else
4426 expand_expr (TREE_OPERAND (exp, 2), ignore ? const0_rtx : 0,
4427 VOIDmode, 0);
4428 }
4429
4430 if (cleanups_this_call)
4431 {
4432 sorry ("aggregate value in COND_EXPR");
4433 cleanups_this_call = 0;
4434 }
4435
4436 emit_queue ();
4437 emit_label (op1);
4438 OK_DEFER_POP;
4439 cleanups_this_call = old_cleanups;
4440 return temp;
4441 }
4442
4443 case TARGET_EXPR:
4444 {
4445 /* Something needs to be initialized, but we didn't know
4446 where that thing was when building the tree. For example,
4447 it could be the return value of a function, or a parameter
4448 to a function which lays down in the stack, or a temporary
4449 variable which must be passed by reference.
4450
4451 We guarantee that the expression will either be constructed
4452 or copied into our original target. */
4453
4454 tree slot = TREE_OPERAND (exp, 0);
4455
4456 if (TREE_CODE (slot) != VAR_DECL)
4457 abort ();
4458
4459 if (target == 0)
4460 {
4461 if (DECL_RTL (slot) != 0)
4462 target = DECL_RTL (slot);
4463 else
4464 {
4465 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4466 /* All temp slots at this level must not conflict. */
4467 preserve_temp_slots (target);
4468 DECL_RTL (slot) = target;
4469 }
4470
4471#if 0
4472 /* Since SLOT is not known to the called function
4473 to belong to its stack frame, we must build an explicit
4474 cleanup. This case occurs when we must build up a reference
4475 to pass the reference as an argument. In this case,
4476 it is very likely that such a reference need not be
4477 built here. */
4478
4479 if (TREE_OPERAND (exp, 2) == 0)
4480 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4481 if (TREE_OPERAND (exp, 2))
4482 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2),
4483 cleanups_this_call);
4484#endif
4485 }
4486 else
4487 {
4488 /* This case does occur, when expanding a parameter which
4489 needs to be constructed on the stack. The target
4490 is the actual stack address that we want to initialize.
4491 The function we call will perform the cleanup in this case. */
4492
4493 DECL_RTL (slot) = target;
4494 }
4495
4496 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4497 }
4498
4499 case INIT_EXPR:
4500 {
4501 tree lhs = TREE_OPERAND (exp, 0);
4502 tree rhs = TREE_OPERAND (exp, 1);
4503 tree noncopied_parts = 0;
4504 tree lhs_type = TREE_TYPE (lhs);
4505
4506 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4507 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4508 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4509 TYPE_NONCOPIED_PARTS (lhs_type));
4510 while (noncopied_parts != 0)
4511 {
4512 expand_assignment (TREE_VALUE (noncopied_parts),
4513 TREE_PURPOSE (noncopied_parts), 0, 0);
4514 noncopied_parts = TREE_CHAIN (noncopied_parts);
4515 }
4516 return temp;
4517 }
4518
4519 case MODIFY_EXPR:
4520 {
4521 /* If lhs is complex, expand calls in rhs before computing it.
4522 That's so we don't compute a pointer and save it over a call.
4523 If lhs is simple, compute it first so we can give it as a
4524 target if the rhs is just a call. This avoids an extra temp and copy
4525 and that prevents a partial-subsumption which makes bad code.
4526 Actually we could treat component_ref's of vars like vars. */
4527
4528 tree lhs = TREE_OPERAND (exp, 0);
4529 tree rhs = TREE_OPERAND (exp, 1);
4530 tree noncopied_parts = 0;
4531 tree lhs_type = TREE_TYPE (lhs);
4532
4533 temp = 0;
4534
4535 if (TREE_CODE (lhs) != VAR_DECL
4536 && TREE_CODE (lhs) != RESULT_DECL
4537 && TREE_CODE (lhs) != PARM_DECL)
4538 preexpand_calls (exp);
4539
4540 /* Check for |= or &= of a bitfield of size one into another bitfield
4541 of size 1. In this case, (unless we need the result of the
4542 assignment) we can do this more efficiently with a
4543 test followed by an assignment, if necessary.
4544
4545 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4546 things change so we do, this code should be enhanced to
4547 support it. */
4548 if (ignore
4549 && TREE_CODE (lhs) == COMPONENT_REF
4550 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4551 || TREE_CODE (rhs) == BIT_AND_EXPR)
4552 && TREE_OPERAND (rhs, 0) == lhs
4553 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4554 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4555 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4556 {
4557 rtx label = gen_label_rtx ();
4558
4559 do_jump (TREE_OPERAND (rhs, 1),
4560 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4561 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4562 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4563 (TREE_CODE (rhs) == BIT_IOR_EXPR
4564 ? integer_one_node
4565 : integer_zero_node)),
4566 0, 0);
e7c33f54 4567 do_pending_stack_adjust ();
bbf6f052
RK
4568 emit_label (label);
4569 return const0_rtx;
4570 }
4571
4572 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4573 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4574 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4575 TYPE_NONCOPIED_PARTS (lhs_type));
4576
4577 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4578 while (noncopied_parts != 0)
4579 {
4580 expand_assignment (TREE_PURPOSE (noncopied_parts),
4581 TREE_VALUE (noncopied_parts), 0, 0);
4582 noncopied_parts = TREE_CHAIN (noncopied_parts);
4583 }
4584 return temp;
4585 }
4586
4587 case PREINCREMENT_EXPR:
4588 case PREDECREMENT_EXPR:
4589 return expand_increment (exp, 0);
4590
4591 case POSTINCREMENT_EXPR:
4592 case POSTDECREMENT_EXPR:
4593 /* Faster to treat as pre-increment if result is not used. */
4594 return expand_increment (exp, ! ignore);
4595
4596 case ADDR_EXPR:
4597 /* Are we taking the address of a nested function? */
4598 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4599 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4600 {
4601 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4602 op0 = force_operand (op0, target);
4603 }
4604 else
4605 {
4606 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode,
4607 (modifier == EXPAND_INITIALIZER
4608 ? modifier : EXPAND_CONST_ADDRESS));
4609 if (GET_CODE (op0) != MEM)
4610 abort ();
4611
4612 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4613 return XEXP (op0, 0);
4614 op0 = force_operand (XEXP (op0, 0), target);
4615 }
4616 if (flag_force_addr && GET_CODE (op0) != REG)
4617 return force_reg (Pmode, op0);
4618 return op0;
4619
4620 case ENTRY_VALUE_EXPR:
4621 abort ();
4622
4623 case ERROR_MARK:
4624 return const0_rtx;
4625
4626 default:
4627 return (*lang_expand_expr) (exp, target, tmode, modifier);
4628 }
4629
4630 /* Here to do an ordinary binary operator, generating an instruction
4631 from the optab already placed in `this_optab'. */
4632 binop:
4633 preexpand_calls (exp);
4634 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4635 subtarget = 0;
4636 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4637 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4638 binop2:
4639 temp = expand_binop (mode, this_optab, op0, op1, target,
4640 unsignedp, OPTAB_LIB_WIDEN);
4641 if (temp == 0)
4642 abort ();
4643 return temp;
4644}
4645\f
e87b4f3f
RS
4646/* Return the alignment in bits of EXP, a pointer valued expression.
4647 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
4648 The alignment returned is, by default, the alignment of the thing that
4649 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4650
4651 Otherwise, look at the expression to see if we can do better, i.e., if the
4652 expression is actually pointing at an object whose alignment is tighter. */
4653
4654static int
4655get_pointer_alignment (exp, max_align)
4656 tree exp;
4657 unsigned max_align;
4658{
4659 unsigned align, inner;
4660
4661 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4662 return 0;
4663
4664 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4665 align = MIN (align, max_align);
4666
4667 while (1)
4668 {
4669 switch (TREE_CODE (exp))
4670 {
4671 case NOP_EXPR:
4672 case CONVERT_EXPR:
4673 case NON_LVALUE_EXPR:
4674 exp = TREE_OPERAND (exp, 0);
4675 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4676 return align;
4677 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4678 inner = MIN (inner, max_align);
4679 align = MAX (align, inner);
4680 break;
4681
4682 case PLUS_EXPR:
4683 /* If sum of pointer + int, restrict our maximum alignment to that
4684 imposed by the integer. If not, we can't do any better than
4685 ALIGN. */
4686 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4687 return align;
4688
e87b4f3f
RS
4689 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4690 & (max_align - 1))
4691 != 0)
bbf6f052
RK
4692 max_align >>= 1;
4693
4694 exp = TREE_OPERAND (exp, 0);
4695 break;
4696
4697 case ADDR_EXPR:
4698 /* See what we are pointing at and look at its alignment. */
4699 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
4700 if (TREE_CODE (exp) == FUNCTION_DECL)
4701 align = MAX (align, FUNCTION_BOUNDARY);
4702 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
4703 align = MAX (align, DECL_ALIGN (exp));
4704#ifdef CONSTANT_ALIGNMENT
4705 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4706 align = CONSTANT_ALIGNMENT (exp, align);
4707#endif
4708 return MIN (align, max_align);
4709
4710 default:
4711 return align;
4712 }
4713 }
4714}
4715\f
4716/* Return the tree node and offset if a given argument corresponds to
4717 a string constant. */
4718
4719static tree
4720string_constant (arg, ptr_offset)
4721 tree arg;
4722 tree *ptr_offset;
4723{
4724 STRIP_NOPS (arg);
4725
4726 if (TREE_CODE (arg) == ADDR_EXPR
4727 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4728 {
4729 *ptr_offset = integer_zero_node;
4730 return TREE_OPERAND (arg, 0);
4731 }
4732 else if (TREE_CODE (arg) == PLUS_EXPR)
4733 {
4734 tree arg0 = TREE_OPERAND (arg, 0);
4735 tree arg1 = TREE_OPERAND (arg, 1);
4736
4737 STRIP_NOPS (arg0);
4738 STRIP_NOPS (arg1);
4739
4740 if (TREE_CODE (arg0) == ADDR_EXPR
4741 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4742 {
4743 *ptr_offset = arg1;
4744 return TREE_OPERAND (arg0, 0);
4745 }
4746 else if (TREE_CODE (arg1) == ADDR_EXPR
4747 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4748 {
4749 *ptr_offset = arg0;
4750 return TREE_OPERAND (arg1, 0);
4751 }
4752 }
4753
4754 return 0;
4755}
4756
4757/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4758 way, because it could contain a zero byte in the middle.
4759 TREE_STRING_LENGTH is the size of the character array, not the string.
4760
4761 Unfortunately, string_constant can't access the values of const char
4762 arrays with initializers, so neither can we do so here. */
4763
4764static tree
4765c_strlen (src)
4766 tree src;
4767{
4768 tree offset_node;
4769 int offset, max;
4770 char *ptr;
4771
4772 src = string_constant (src, &offset_node);
4773 if (src == 0)
4774 return 0;
4775 max = TREE_STRING_LENGTH (src);
4776 ptr = TREE_STRING_POINTER (src);
4777 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4778 {
4779 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4780 compute the offset to the following null if we don't know where to
4781 start searching for it. */
4782 int i;
4783 for (i = 0; i < max; i++)
4784 if (ptr[i] == 0)
4785 return 0;
4786 /* We don't know the starting offset, but we do know that the string
4787 has no internal zero bytes. We can assume that the offset falls
4788 within the bounds of the string; otherwise, the programmer deserves
4789 what he gets. Subtract the offset from the length of the string,
4790 and return that. */
4791 /* This would perhaps not be valid if we were dealing with named
4792 arrays in addition to literal string constants. */
4793 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4794 }
4795
4796 /* We have a known offset into the string. Start searching there for
4797 a null character. */
4798 if (offset_node == 0)
4799 offset = 0;
4800 else
4801 {
4802 /* Did we get a long long offset? If so, punt. */
4803 if (TREE_INT_CST_HIGH (offset_node) != 0)
4804 return 0;
4805 offset = TREE_INT_CST_LOW (offset_node);
4806 }
4807 /* If the offset is known to be out of bounds, warn, and call strlen at
4808 runtime. */
4809 if (offset < 0 || offset > max)
4810 {
4811 warning ("offset outside bounds of constant string");
4812 return 0;
4813 }
4814 /* Use strlen to search for the first zero byte. Since any strings
4815 constructed with build_string will have nulls appended, we win even
4816 if we get handed something like (char[4])"abcd".
4817
4818 Since OFFSET is our starting index into the string, no further
4819 calculation is needed. */
4820 return size_int (strlen (ptr + offset));
4821}
4822\f
4823/* Expand an expression EXP that calls a built-in function,
4824 with result going to TARGET if that's convenient
4825 (and in mode MODE if that's convenient).
4826 SUBTARGET may be used as the target for computing one of EXP's operands.
4827 IGNORE is nonzero if the value is to be ignored. */
4828
4829static rtx
4830expand_builtin (exp, target, subtarget, mode, ignore)
4831 tree exp;
4832 rtx target;
4833 rtx subtarget;
4834 enum machine_mode mode;
4835 int ignore;
4836{
4837 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4838 tree arglist = TREE_OPERAND (exp, 1);
4839 rtx op0;
e7c33f54 4840 rtx lab1, lab2, insns;
bbf6f052
RK
4841 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
4842
4843 switch (DECL_FUNCTION_CODE (fndecl))
4844 {
4845 case BUILT_IN_ABS:
4846 case BUILT_IN_LABS:
4847 case BUILT_IN_FABS:
4848 /* build_function_call changes these into ABS_EXPR. */
4849 abort ();
4850
e87b4f3f
RS
4851 case BUILT_IN_FSQRT:
4852 /* If not optimizing, call the library function. */
8c8a8e34 4853 if (! optimize)
e87b4f3f
RS
4854 break;
4855
4856 if (arglist == 0
4857 /* Arg could be non-integer if user redeclared this fcn wrong. */
4858 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4859 return const0_rtx;
4860
4861 /* Compute the argument. */
4862 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
4863
4864 /* Make a suitable register to place result in. */
4865 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4866
4867 /* Test the argument to make sure it is in the proper domain for
4868 the sqrt function. If it is not in the domain, branch to a
4869 library call. */
8c8a8e34
JW
4870 start_sequence ();
4871 lab1 = gen_label_rtx ();
4872 lab2 = gen_label_rtx ();
e7c33f54
RK
4873
4874 /* By default check the arguments. If flag_fast_math is turned on,
4af3895e
JVA
4875 then assume sqrt will always be called with valid arguments.
4876 Note changing the test below from "> 0" to ">= 0" would cause
4877 incorrect results when computing sqrt(-0.0). */
4878
e7c33f54
RK
4879 if (! flag_fast_math)
4880 {
8c8a8e34 4881 /* By checking op > 0 we are able to catch all of the
e7c33f54 4882 IEEE special cases with a single if conditional. */
8c8a8e34
JW
4883 emit_cmp_insn (op0, CONST0_RTX (GET_MODE (op0)), GT, 0,
4884 GET_MODE (op0), 0, 0);
e7c33f54
RK
4885 emit_jump_insn (gen_bgt (lab1));
4886
4af3895e
JVA
4887 /* The argument was not in the domain; do this via library call.
4888 Pop the arguments right away in case the call gets deleted. */
4889 NO_DEFER_POP;
8129842c 4890 expand_call (exp, target, 0);
4af3895e 4891 OK_DEFER_POP;
e7c33f54
RK
4892
4893 /* Branch around open coded version */
4894 emit_jump_insn (gen_jump (lab2));
4895 }
4896
4897 emit_label (lab1);
4898 /* Arg is in the domain, compute sqrt, into TARGET.
e87b4f3f
RS
4899 Set TARGET to wherever the result comes back. */
4900 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8c8a8e34 4901 sqrt_optab, op0, target, 0);
e7c33f54
RK
4902
4903 /* If we were unable to expand via the builtin, stop the
4904 sequence (without outputting the insns) and break, causing
4905 a call the the library function. */
e87b4f3f 4906 if (target == 0)
e7c33f54 4907 {
8c8a8e34 4908 end_sequence ();
e7c33f54
RK
4909 break;
4910 }
4911 emit_label (lab2);
e87b4f3f
RS
4912
4913
e7c33f54 4914 /* Output the entire sequence. */
8c8a8e34
JW
4915 insns = get_insns ();
4916 end_sequence ();
4917 emit_insns (insns);
e7c33f54
RK
4918
4919 return target;
4920
bbf6f052
RK
4921 case BUILT_IN_SAVEREGS:
4922 /* Don't do __builtin_saveregs more than once in a function.
4923 Save the result of the first call and reuse it. */
4924 if (saveregs_value != 0)
4925 return saveregs_value;
4926 {
4927 /* When this function is called, it means that registers must be
4928 saved on entry to this function. So we migrate the
4929 call to the first insn of this function. */
4930 rtx temp;
4931 rtx seq;
4932 rtx valreg, saved_valreg;
4933
4934 /* Now really call the function. `expand_call' does not call
4935 expand_builtin, so there is no danger of infinite recursion here. */
4936 start_sequence ();
4937
4938#ifdef EXPAND_BUILTIN_SAVEREGS
4939 /* Do whatever the machine needs done in this case. */
4940 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
4941#else
4942 /* The register where the function returns its value
4943 is likely to have something else in it, such as an argument.
4944 So preserve that register around the call. */
4945 if (value_mode != VOIDmode)
4946 {
4947 valreg = hard_libcall_value (value_mode);
4948 saved_valreg = gen_reg_rtx (value_mode);
4949 emit_move_insn (saved_valreg, valreg);
4950 }
4951
4952 /* Generate the call, putting the value in a pseudo. */
4953 temp = expand_call (exp, target, ignore);
4954
4955 if (value_mode != VOIDmode)
4956 emit_move_insn (valreg, saved_valreg);
4957#endif
4958
4959 seq = get_insns ();
4960 end_sequence ();
4961
4962 saveregs_value = temp;
4963
4964 /* This won't work inside a SEQUENCE--it really has to be
4965 at the start of the function. */
4966 if (in_sequence_p ())
4967 {
4968 /* Better to do this than to crash. */
4969 error ("`va_start' used within `({...})'");
4970 return temp;
4971 }
4972
4973 /* Put the sequence after the NOTE that starts the function. */
4974 emit_insns_before (seq, NEXT_INSN (get_insns ()));
4975 return temp;
4976 }
4977
4978 /* __builtin_args_info (N) returns word N of the arg space info
4979 for the current function. The number and meanings of words
4980 is controlled by the definition of CUMULATIVE_ARGS. */
4981 case BUILT_IN_ARGS_INFO:
4982 {
4983 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4984 int i;
4985 int *word_ptr = (int *) &current_function_args_info;
4986 tree type, elts, result;
4987
4988 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
4989 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
4990 __FILE__, __LINE__);
4991
4992 if (arglist != 0)
4993 {
4994 tree arg = TREE_VALUE (arglist);
4995 if (TREE_CODE (arg) != INTEGER_CST)
4996 error ("argument of __builtin_args_info must be constant");
4997 else
4998 {
4999 int wordnum = TREE_INT_CST_LOW (arg);
5000
5001 if (wordnum < 0 || wordnum >= nwords)
5002 error ("argument of __builtin_args_info out of range");
5003 else
5004 return gen_rtx (CONST_INT, VOIDmode, word_ptr[wordnum]);
5005 }
5006 }
5007 else
5008 error ("missing argument in __builtin_args_info");
5009
5010 return const0_rtx;
5011
5012#if 0
5013 for (i = 0; i < nwords; i++)
5014 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5015
5016 type = build_array_type (integer_type_node,
5017 build_index_type (build_int_2 (nwords, 0)));
5018 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5019 TREE_CONSTANT (result) = 1;
5020 TREE_STATIC (result) = 1;
5021 result = build (INDIRECT_REF, build_pointer_type (type), result);
5022 TREE_CONSTANT (result) = 1;
5023 return expand_expr (result, 0, VOIDmode, 0);
5024#endif
5025 }
5026
5027 /* Return the address of the first anonymous stack arg. */
5028 case BUILT_IN_NEXT_ARG:
5029 {
5030 tree fntype = TREE_TYPE (current_function_decl);
5031 if (!(TYPE_ARG_TYPES (fntype) != 0
5032 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5033 != void_type_node)))
5034 {
5035 error ("`va_start' used in function with fixed args");
5036 return const0_rtx;
5037 }
5038 }
5039
5040 return expand_binop (Pmode, add_optab,
5041 current_function_internal_arg_pointer,
5042 current_function_arg_offset_rtx,
5043 0, 0, OPTAB_LIB_WIDEN);
5044
5045 case BUILT_IN_CLASSIFY_TYPE:
5046 if (arglist != 0)
5047 {
5048 tree type = TREE_TYPE (TREE_VALUE (arglist));
5049 enum tree_code code = TREE_CODE (type);
5050 if (code == VOID_TYPE)
5051 return gen_rtx (CONST_INT, VOIDmode, void_type_class);
5052 if (code == INTEGER_TYPE)
5053 return gen_rtx (CONST_INT, VOIDmode, integer_type_class);
5054 if (code == CHAR_TYPE)
5055 return gen_rtx (CONST_INT, VOIDmode, char_type_class);
5056 if (code == ENUMERAL_TYPE)
5057 return gen_rtx (CONST_INT, VOIDmode, enumeral_type_class);
5058 if (code == BOOLEAN_TYPE)
5059 return gen_rtx (CONST_INT, VOIDmode, boolean_type_class);
5060 if (code == POINTER_TYPE)
5061 return gen_rtx (CONST_INT, VOIDmode, pointer_type_class);
5062 if (code == REFERENCE_TYPE)
5063 return gen_rtx (CONST_INT, VOIDmode, reference_type_class);
5064 if (code == OFFSET_TYPE)
5065 return gen_rtx (CONST_INT, VOIDmode, offset_type_class);
5066 if (code == REAL_TYPE)
5067 return gen_rtx (CONST_INT, VOIDmode, real_type_class);
5068 if (code == COMPLEX_TYPE)
5069 return gen_rtx (CONST_INT, VOIDmode, complex_type_class);
5070 if (code == FUNCTION_TYPE)
5071 return gen_rtx (CONST_INT, VOIDmode, function_type_class);
5072 if (code == METHOD_TYPE)
5073 return gen_rtx (CONST_INT, VOIDmode, method_type_class);
5074 if (code == RECORD_TYPE)
5075 return gen_rtx (CONST_INT, VOIDmode, record_type_class);
5076 if (code == UNION_TYPE)
5077 return gen_rtx (CONST_INT, VOIDmode, union_type_class);
5078 if (code == ARRAY_TYPE)
5079 return gen_rtx (CONST_INT, VOIDmode, array_type_class);
5080 if (code == STRING_TYPE)
5081 return gen_rtx (CONST_INT, VOIDmode, string_type_class);
5082 if (code == SET_TYPE)
5083 return gen_rtx (CONST_INT, VOIDmode, set_type_class);
5084 if (code == FILE_TYPE)
5085 return gen_rtx (CONST_INT, VOIDmode, file_type_class);
5086 if (code == LANG_TYPE)
5087 return gen_rtx (CONST_INT, VOIDmode, lang_type_class);
5088 }
5089 return gen_rtx (CONST_INT, VOIDmode, no_type_class);
5090
5091 case BUILT_IN_CONSTANT_P:
5092 if (arglist == 0)
5093 return const0_rtx;
5094 else
5095 return (TREE_CODE_CLASS (TREE_VALUE (arglist)) == 'c'
5096 ? const1_rtx : const0_rtx);
5097
5098 case BUILT_IN_FRAME_ADDRESS:
5099 /* The argument must be a nonnegative integer constant.
5100 It counts the number of frames to scan up the stack.
5101 The value is the address of that frame. */
5102 case BUILT_IN_RETURN_ADDRESS:
5103 /* The argument must be a nonnegative integer constant.
5104 It counts the number of frames to scan up the stack.
5105 The value is the return address saved in that frame. */
5106 if (arglist == 0)
5107 /* Warning about missing arg was already issued. */
5108 return const0_rtx;
5109 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5110 {
5111 error ("invalid arg to __builtin_return_address");
5112 return const0_rtx;
5113 }
5114 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5115 {
5116 error ("invalid arg to __builtin_return_address");
5117 return const0_rtx;
5118 }
5119 else
5120 {
5121 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5122 rtx tem = frame_pointer_rtx;
5123 int i;
5124
5125 /* Scan back COUNT frames to the specified frame. */
5126 for (i = 0; i < count; i++)
5127 {
5128 /* Assume the dynamic chain pointer is in the word that
5129 the frame address points to, unless otherwise specified. */
5130#ifdef DYNAMIC_CHAIN_ADDRESS
5131 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5132#endif
5133 tem = memory_address (Pmode, tem);
5134 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5135 }
5136
5137 /* For __builtin_frame_address, return what we've got. */
5138 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5139 return tem;
5140
5141 /* For __builtin_return_address,
5142 Get the return address from that frame. */
5143#ifdef RETURN_ADDR_RTX
5144 return RETURN_ADDR_RTX (count, tem);
5145#else
5146 tem = memory_address (Pmode,
5147 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5148 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5149#endif
5150 }
5151
5152 case BUILT_IN_ALLOCA:
5153 if (arglist == 0
5154 /* Arg could be non-integer if user redeclared this fcn wrong. */
5155 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5156 return const0_rtx;
5157 current_function_calls_alloca = 1;
5158 /* Compute the argument. */
5159 op0 = expand_expr (TREE_VALUE (arglist), 0, VOIDmode, 0);
5160
5161 /* Allocate the desired space. */
8c8a8e34 5162 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5163
5164 /* Record the new stack level for nonlocal gotos. */
6dc42e49
RS
5165 if (nonlocal_goto_handler_slot != 0)
5166 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
bbf6f052
RK
5167 return target;
5168
5169 case BUILT_IN_FFS:
5170 /* If not optimizing, call the library function. */
5171 if (!optimize)
5172 break;
5173
5174 if (arglist == 0
5175 /* Arg could be non-integer if user redeclared this fcn wrong. */
5176 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5177 return const0_rtx;
5178
5179 /* Compute the argument. */
5180 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5181 /* Compute ffs, into TARGET if possible.
5182 Set TARGET to wherever the result comes back. */
5183 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5184 ffs_optab, op0, target, 1);
5185 if (target == 0)
5186 abort ();
5187 return target;
5188
5189 case BUILT_IN_STRLEN:
5190 /* If not optimizing, call the library function. */
5191 if (!optimize)
5192 break;
5193
5194 if (arglist == 0
5195 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5196 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5197 return const0_rtx;
5198 else
5199 {
e7c33f54
RK
5200 tree src = TREE_VALUE (arglist);
5201 tree len = c_strlen (src);
bbf6f052 5202
e7c33f54
RK
5203 int align
5204 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5205
5206 rtx result, src_rtx, char_rtx;
5207 enum machine_mode insn_mode = value_mode, char_mode;
5208 enum insn_code icode;
5209
5210 /* If the length is known, just return it. */
5211 if (len != 0)
5212 return expand_expr (len, target, mode, 0);
5213
5214 /* If SRC is not a pointer type, don't do this operation inline. */
5215 if (align == 0)
5216 break;
5217
5218 /* Call a function if we can't compute strlen in the right mode. */
5219
5220 while (insn_mode != VOIDmode)
5221 {
5222 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5223 if (icode != CODE_FOR_nothing)
5224 break;
5225
5226 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5227 }
5228 if (insn_mode == VOIDmode)
bbf6f052 5229 break;
e7c33f54
RK
5230
5231 /* Make a place to write the result of the instruction. */
5232 result = target;
5233 if (! (result != 0
5234 && GET_CODE (result) == REG
5235 && GET_MODE (result) == insn_mode
5236 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5237 result = gen_reg_rtx (insn_mode);
5238
4d613828 5239 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 5240
4d613828 5241 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
5242 result = gen_reg_rtx (insn_mode);
5243
5244 src_rtx = memory_address (BLKmode,
5245 expand_expr (src, 0, Pmode,
5246 EXPAND_NORMAL));
4d613828 5247 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
5248 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5249
5250 char_rtx = const0_rtx;
4d613828
RS
5251 char_mode = insn_operand_mode[(int)icode][2];
5252 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
5253 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5254
5255 emit_insn (GEN_FCN (icode) (result,
5256 gen_rtx (MEM, BLKmode, src_rtx),
5257 char_rtx,
5258 gen_rtx (CONST_INT, VOIDmode, align)));
5259
5260 /* Return the value in the proper mode for this function. */
5261 if (GET_MODE (result) == value_mode)
5262 return result;
5263 else if (target != 0)
5264 {
5265 convert_move (target, result, 0);
5266 return target;
5267 }
5268 else
5269 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
5270 }
5271
5272 case BUILT_IN_STRCPY:
5273 /* If not optimizing, call the library function. */
5274 if (!optimize)
5275 break;
5276
5277 if (arglist == 0
5278 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5279 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5280 || TREE_CHAIN (arglist) == 0
5281 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5282 return const0_rtx;
5283 else
5284 {
5285 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5286
5287 if (len == 0)
5288 break;
5289
5290 len = size_binop (PLUS_EXPR, len, integer_one_node);
5291
5292 chainon (arglist, build_tree_list (0, len));
5293 }
5294
5295 /* Drops in. */
5296 case BUILT_IN_MEMCPY:
5297 /* If not optimizing, call the library function. */
5298 if (!optimize)
5299 break;
5300
5301 if (arglist == 0
5302 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5303 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5304 || TREE_CHAIN (arglist) == 0
5305 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5306 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5307 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5308 return const0_rtx;
5309 else
5310 {
5311 tree dest = TREE_VALUE (arglist);
5312 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5313 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5314
5315 int src_align
5316 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5317 int dest_align
5318 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5319 rtx dest_rtx;
5320
5321 /* If either SRC or DEST is not a pointer type, don't do
5322 this operation in-line. */
5323 if (src_align == 0 || dest_align == 0)
5324 {
5325 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5326 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5327 break;
5328 }
5329
5330 dest_rtx = expand_expr (dest, 0, Pmode, EXPAND_NORMAL);
5331
5332 /* Copy word part most expediently. */
5333 emit_block_move (gen_rtx (MEM, BLKmode,
5334 memory_address (BLKmode, dest_rtx)),
5335 gen_rtx (MEM, BLKmode,
5336 memory_address (BLKmode,
5337 expand_expr (src, 0, Pmode,
5338 EXPAND_NORMAL))),
5339 expand_expr (len, 0, VOIDmode, 0),
5340 MIN (src_align, dest_align));
5341 return dest_rtx;
5342 }
5343
5344/* These comparison functions need an instruction that returns an actual
5345 index. An ordinary compare that just sets the condition codes
5346 is not enough. */
5347#ifdef HAVE_cmpstrsi
5348 case BUILT_IN_STRCMP:
5349 /* If not optimizing, call the library function. */
5350 if (!optimize)
5351 break;
5352
5353 if (arglist == 0
5354 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5355 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5356 || TREE_CHAIN (arglist) == 0
5357 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5358 return const0_rtx;
5359 else if (!HAVE_cmpstrsi)
5360 break;
5361 {
5362 tree arg1 = TREE_VALUE (arglist);
5363 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5364 tree offset;
5365 tree len, len2;
5366
5367 len = c_strlen (arg1);
5368 if (len)
5369 len = size_binop (PLUS_EXPR, integer_one_node, len);
5370 len2 = c_strlen (arg2);
5371 if (len2)
5372 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5373
5374 /* If we don't have a constant length for the first, use the length
5375 of the second, if we know it. We don't require a constant for
5376 this case; some cost analysis could be done if both are available
5377 but neither is constant. For now, assume they're equally cheap.
5378
5379 If both strings have constant lengths, use the smaller. This
5380 could arise if optimization results in strcpy being called with
5381 two fixed strings, or if the code was machine-generated. We should
5382 add some code to the `memcmp' handler below to deal with such
5383 situations, someday. */
5384 if (!len || TREE_CODE (len) != INTEGER_CST)
5385 {
5386 if (len2)
5387 len = len2;
5388 else if (len == 0)
5389 break;
5390 }
5391 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5392 {
5393 if (tree_int_cst_lt (len2, len))
5394 len = len2;
5395 }
5396
5397 chainon (arglist, build_tree_list (0, len));
5398 }
5399
5400 /* Drops in. */
5401 case BUILT_IN_MEMCMP:
5402 /* If not optimizing, call the library function. */
5403 if (!optimize)
5404 break;
5405
5406 if (arglist == 0
5407 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5408 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5409 || TREE_CHAIN (arglist) == 0
5410 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5411 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5412 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5413 return const0_rtx;
5414 else if (!HAVE_cmpstrsi)
5415 break;
5416 {
5417 tree arg1 = TREE_VALUE (arglist);
5418 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5419 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5420 rtx result;
5421
5422 int arg1_align
5423 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5424 int arg2_align
5425 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5426 enum machine_mode insn_mode
5427 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5428
5429 /* If we don't have POINTER_TYPE, call the function. */
5430 if (arg1_align == 0 || arg2_align == 0)
5431 {
5432 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5433 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5434 break;
5435 }
5436
5437 /* Make a place to write the result of the instruction. */
5438 result = target;
5439 if (! (result != 0
5440 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5441 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5442 result = gen_reg_rtx (insn_mode);
5443
5444 emit_insn (gen_cmpstrsi (result,
5445 gen_rtx (MEM, BLKmode,
5446 expand_expr (arg1, 0, Pmode, EXPAND_NORMAL)),
5447 gen_rtx (MEM, BLKmode,
5448 expand_expr (arg2, 0, Pmode, EXPAND_NORMAL)),
5449 expand_expr (len, 0, VOIDmode, 0),
5450 gen_rtx (CONST_INT, VOIDmode,
5451 MIN (arg1_align, arg2_align))));
5452
5453 /* Return the value in the proper mode for this function. */
5454 mode = TYPE_MODE (TREE_TYPE (exp));
5455 if (GET_MODE (result) == mode)
5456 return result;
5457 else if (target != 0)
5458 {
5459 convert_move (target, result, 0);
5460 return target;
5461 }
5462 else
5463 return convert_to_mode (mode, result, 0);
5464 }
5465#else
5466 case BUILT_IN_STRCMP:
5467 case BUILT_IN_MEMCMP:
5468 break;
5469#endif
5470
5471 default: /* just do library call, if unknown builtin */
5472 error ("built-in function %s not currently supported",
5473 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5474 }
5475
5476 /* The switch statement above can drop through to cause the function
5477 to be called normally. */
5478
5479 return expand_call (exp, target, ignore);
5480}
5481\f
5482/* Expand code for a post- or pre- increment or decrement
5483 and return the RTX for the result.
5484 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5485
5486static rtx
5487expand_increment (exp, post)
5488 register tree exp;
5489 int post;
5490{
5491 register rtx op0, op1;
5492 register rtx temp, value;
5493 register tree incremented = TREE_OPERAND (exp, 0);
5494 optab this_optab = add_optab;
5495 int icode;
5496 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5497 int op0_is_copy = 0;
5498
5499 /* Stabilize any component ref that might need to be
5500 evaluated more than once below. */
5501 if (TREE_CODE (incremented) == BIT_FIELD_REF
5502 || (TREE_CODE (incremented) == COMPONENT_REF
5503 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5504 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5505 incremented = stabilize_reference (incremented);
5506
5507 /* Compute the operands as RTX.
5508 Note whether OP0 is the actual lvalue or a copy of it:
94a58076
RS
5509 I believe it is a copy iff it is a register or subreg
5510 and insns were generated in computing it. */
bbf6f052
RK
5511 temp = get_last_insn ();
5512 op0 = expand_expr (incremented, 0, VOIDmode, 0);
94a58076
RS
5513 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5514 && temp != get_last_insn ());
bbf6f052
RK
5515 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5516
5517 /* Decide whether incrementing or decrementing. */
5518 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5519 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5520 this_optab = sub_optab;
5521
5522 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5523 then we cannot just increment OP0. We must
5524 therefore contrive to increment the original value.
5525 Then we can return OP0 since it is a copy of the old value. */
5526 if (op0_is_copy)
5527 {
5528 /* This is the easiest way to increment the value wherever it is.
5529 Problems with multiple evaluation of INCREMENTED
5530 are prevented because either (1) it is a component_ref,
5531 in which case it was stabilized above, or (2) it is an array_ref
5532 with constant index in an array in a register, which is
5533 safe to reevaluate. */
5534 tree newexp = build ((this_optab == add_optab
5535 ? PLUS_EXPR : MINUS_EXPR),
5536 TREE_TYPE (exp),
5537 incremented,
5538 TREE_OPERAND (exp, 1));
5539 temp = expand_assignment (incremented, newexp, ! post, 0);
5540 return post ? op0 : temp;
5541 }
5542
5543 /* Convert decrement by a constant into a negative increment. */
5544 if (this_optab == sub_optab
5545 && GET_CODE (op1) == CONST_INT)
5546 {
5547 op1 = gen_rtx (CONST_INT, VOIDmode, - INTVAL (op1));
5548 this_optab = add_optab;
5549 }
5550
5551 if (post)
5552 {
5553 /* We have a true reference to the value in OP0.
5554 If there is an insn to add or subtract in this mode, queue it. */
5555
5556#if 0 /* Turned off to avoid making extra insn for indexed memref. */
5557 op0 = stabilize (op0);
5558#endif
5559
5560 icode = (int) this_optab->handlers[(int) mode].insn_code;
5561 if (icode != (int) CODE_FOR_nothing
5562 /* Make sure that OP0 is valid for operands 0 and 1
5563 of the insn we want to queue. */
5564 && (*insn_operand_predicate[icode][0]) (op0, mode)
5565 && (*insn_operand_predicate[icode][1]) (op0, mode))
5566 {
5567 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5568 op1 = force_reg (mode, op1);
5569
5570 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5571 }
5572 }
5573
5574 /* Preincrement, or we can't increment with one simple insn. */
5575 if (post)
5576 /* Save a copy of the value before inc or dec, to return it later. */
5577 temp = value = copy_to_reg (op0);
5578 else
5579 /* Arrange to return the incremented value. */
5580 /* Copy the rtx because expand_binop will protect from the queue,
5581 and the results of that would be invalid for us to return
5582 if our caller does emit_queue before using our result. */
5583 temp = copy_rtx (value = op0);
5584
5585 /* Increment however we can. */
5586 op1 = expand_binop (mode, this_optab, value, op1, op0,
5587 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5588 /* Make sure the value is stored into OP0. */
5589 if (op1 != op0)
5590 emit_move_insn (op0, op1);
5591
5592 return temp;
5593}
5594\f
5595/* Expand all function calls contained within EXP, innermost ones first.
5596 But don't look within expressions that have sequence points.
5597 For each CALL_EXPR, record the rtx for its value
5598 in the CALL_EXPR_RTL field. */
5599
5600static void
5601preexpand_calls (exp)
5602 tree exp;
5603{
5604 register int nops, i;
5605 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5606
5607 if (! do_preexpand_calls)
5608 return;
5609
5610 /* Only expressions and references can contain calls. */
5611
5612 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5613 return;
5614
5615 switch (TREE_CODE (exp))
5616 {
5617 case CALL_EXPR:
5618 /* Do nothing if already expanded. */
5619 if (CALL_EXPR_RTL (exp) != 0)
5620 return;
5621
5622 /* Do nothing to built-in functions. */
5623 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5624 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5625 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8129842c 5626 CALL_EXPR_RTL (exp) = expand_call (exp, 0, 0);
bbf6f052
RK
5627 return;
5628
5629 case COMPOUND_EXPR:
5630 case COND_EXPR:
5631 case TRUTH_ANDIF_EXPR:
5632 case TRUTH_ORIF_EXPR:
5633 /* If we find one of these, then we can be sure
5634 the adjust will be done for it (since it makes jumps).
5635 Do it now, so that if this is inside an argument
5636 of a function, we don't get the stack adjustment
5637 after some other args have already been pushed. */
5638 do_pending_stack_adjust ();
5639 return;
5640
5641 case BLOCK:
5642 case RTL_EXPR:
5643 case WITH_CLEANUP_EXPR:
5644 return;
5645
5646 case SAVE_EXPR:
5647 if (SAVE_EXPR_RTL (exp) != 0)
5648 return;
5649 }
5650
5651 nops = tree_code_length[(int) TREE_CODE (exp)];
5652 for (i = 0; i < nops; i++)
5653 if (TREE_OPERAND (exp, i) != 0)
5654 {
5655 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5656 if (type == 'e' || type == '<' || type == '1' || type == '2'
5657 || type == 'r')
5658 preexpand_calls (TREE_OPERAND (exp, i));
5659 }
5660}
5661\f
5662/* At the start of a function, record that we have no previously-pushed
5663 arguments waiting to be popped. */
5664
5665void
5666init_pending_stack_adjust ()
5667{
5668 pending_stack_adjust = 0;
5669}
5670
5671/* When exiting from function, if safe, clear out any pending stack adjust
5672 so the adjustment won't get done. */
5673
5674void
5675clear_pending_stack_adjust ()
5676{
5677#ifdef EXIT_IGNORE_STACK
5678 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5679 && ! (TREE_INLINE (current_function_decl) && ! flag_no_inline)
5680 && ! flag_inline_functions)
5681 pending_stack_adjust = 0;
5682#endif
5683}
5684
5685/* Pop any previously-pushed arguments that have not been popped yet. */
5686
5687void
5688do_pending_stack_adjust ()
5689{
5690 if (inhibit_defer_pop == 0)
5691 {
5692 if (pending_stack_adjust != 0)
5693 adjust_stack (gen_rtx (CONST_INT, VOIDmode, pending_stack_adjust));
5694 pending_stack_adjust = 0;
5695 }
5696}
5697
5698/* Expand all cleanups up to OLD_CLEANUPS.
5699 Needed here, and also for language-dependent calls. */
5700
5701void
5702expand_cleanups_to (old_cleanups)
5703 tree old_cleanups;
5704{
5705 while (cleanups_this_call != old_cleanups)
5706 {
5707 expand_expr (TREE_VALUE (cleanups_this_call), 0, VOIDmode, 0);
5708 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5709 }
5710}
5711\f
5712/* Expand conditional expressions. */
5713
5714/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5715 LABEL is an rtx of code CODE_LABEL, in this function and all the
5716 functions here. */
5717
5718void
5719jumpifnot (exp, label)
5720 tree exp;
5721 rtx label;
5722{
5723 do_jump (exp, label, 0);
5724}
5725
5726/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5727
5728void
5729jumpif (exp, label)
5730 tree exp;
5731 rtx label;
5732{
5733 do_jump (exp, 0, label);
5734}
5735
5736/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5737 the result is zero, or IF_TRUE_LABEL if the result is one.
5738 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5739 meaning fall through in that case.
5740
e7c33f54
RK
5741 do_jump always does any pending stack adjust except when it does not
5742 actually perform a jump. An example where there is no jump
5743 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5744
bbf6f052
RK
5745 This function is responsible for optimizing cases such as
5746 &&, || and comparison operators in EXP. */
5747
5748void
5749do_jump (exp, if_false_label, if_true_label)
5750 tree exp;
5751 rtx if_false_label, if_true_label;
5752{
5753 register enum tree_code code = TREE_CODE (exp);
5754 /* Some cases need to create a label to jump to
5755 in order to properly fall through.
5756 These cases set DROP_THROUGH_LABEL nonzero. */
5757 rtx drop_through_label = 0;
5758 rtx temp;
5759 rtx comparison = 0;
5760 int i;
5761 tree type;
5762
5763 emit_queue ();
5764
5765 switch (code)
5766 {
5767 case ERROR_MARK:
5768 break;
5769
5770 case INTEGER_CST:
5771 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5772 if (temp)
5773 emit_jump (temp);
5774 break;
5775
5776#if 0
5777 /* This is not true with #pragma weak */
5778 case ADDR_EXPR:
5779 /* The address of something can never be zero. */
5780 if (if_true_label)
5781 emit_jump (if_true_label);
5782 break;
5783#endif
5784
5785 case NOP_EXPR:
5786 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5787 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5788 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5789 goto normal;
5790 case CONVERT_EXPR:
5791 /* If we are narrowing the operand, we have to do the compare in the
5792 narrower mode. */
5793 if ((TYPE_PRECISION (TREE_TYPE (exp))
5794 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5795 goto normal;
5796 case NON_LVALUE_EXPR:
5797 case REFERENCE_EXPR:
5798 case ABS_EXPR:
5799 case NEGATE_EXPR:
5800 case LROTATE_EXPR:
5801 case RROTATE_EXPR:
5802 /* These cannot change zero->non-zero or vice versa. */
5803 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5804 break;
5805
5806#if 0
5807 /* This is never less insns than evaluating the PLUS_EXPR followed by
5808 a test and can be longer if the test is eliminated. */
5809 case PLUS_EXPR:
5810 /* Reduce to minus. */
5811 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5812 TREE_OPERAND (exp, 0),
5813 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5814 TREE_OPERAND (exp, 1))));
5815 /* Process as MINUS. */
5816#endif
5817
5818 case MINUS_EXPR:
5819 /* Non-zero iff operands of minus differ. */
5820 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5821 TREE_OPERAND (exp, 0),
5822 TREE_OPERAND (exp, 1)),
5823 NE, NE);
5824 break;
5825
5826 case BIT_AND_EXPR:
5827 /* If we are AND'ing with a small constant, do this comparison in the
5828 smallest type that fits. If the machine doesn't have comparisons
5829 that small, it will be converted back to the wider comparison.
5830 This helps if we are testing the sign bit of a narrower object.
5831 combine can't do this for us because it can't know whether a
5832 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
5833
5834 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5835 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_INT
5836 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
5837 && (type = type_for_size (i + 1, 1)) != 0
5838 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)))
5839 {
5840 do_jump (convert (type, exp), if_false_label, if_true_label);
5841 break;
5842 }
5843 goto normal;
5844
5845 case TRUTH_NOT_EXPR:
5846 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5847 break;
5848
5849 case TRUTH_ANDIF_EXPR:
5850 if (if_false_label == 0)
5851 if_false_label = drop_through_label = gen_label_rtx ();
5852 do_jump (TREE_OPERAND (exp, 0), if_false_label, 0);
5853 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5854 break;
5855
5856 case TRUTH_ORIF_EXPR:
5857 if (if_true_label == 0)
5858 if_true_label = drop_through_label = gen_label_rtx ();
5859 do_jump (TREE_OPERAND (exp, 0), 0, if_true_label);
5860 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5861 break;
5862
5863 case COMPOUND_EXPR:
5864 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5865 free_temp_slots ();
5866 emit_queue ();
e7c33f54 5867 do_pending_stack_adjust ();
bbf6f052
RK
5868 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5869 break;
5870
5871 case COMPONENT_REF:
5872 case BIT_FIELD_REF:
5873 case ARRAY_REF:
5874 {
5875 int bitsize, bitpos, unsignedp;
5876 enum machine_mode mode;
5877 tree type;
7bb0943f 5878 tree offset;
bbf6f052
RK
5879 int volatilep = 0;
5880
5881 /* Get description of this reference. We don't actually care
5882 about the underlying object here. */
7bb0943f
RS
5883 get_inner_reference (exp, &bitsize, &bitpos, &offset,
5884 &mode, &unsignedp, &volatilep);
bbf6f052
RK
5885
5886 type = type_for_size (bitsize, unsignedp);
e7c33f54 5887 if (type != 0 && bitsize >= 0
bbf6f052
RK
5888 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)))
5889 {
5890 do_jump (convert (type, exp), if_false_label, if_true_label);
5891 break;
5892 }
5893 goto normal;
5894 }
5895
5896 case COND_EXPR:
5897 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
5898 if (integer_onep (TREE_OPERAND (exp, 1))
5899 && integer_zerop (TREE_OPERAND (exp, 2)))
5900 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5901
5902 else if (integer_zerop (TREE_OPERAND (exp, 1))
5903 && integer_onep (TREE_OPERAND (exp, 2)))
5904 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5905
5906 else
5907 {
5908 register rtx label1 = gen_label_rtx ();
5909 drop_through_label = gen_label_rtx ();
5910 do_jump (TREE_OPERAND (exp, 0), label1, 0);
5911 /* Now the THEN-expression. */
5912 do_jump (TREE_OPERAND (exp, 1),
5913 if_false_label ? if_false_label : drop_through_label,
5914 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
5915 /* In case the do_jump just above never jumps. */
5916 do_pending_stack_adjust ();
bbf6f052
RK
5917 emit_label (label1);
5918 /* Now the ELSE-expression. */
5919 do_jump (TREE_OPERAND (exp, 2),
5920 if_false_label ? if_false_label : drop_through_label,
5921 if_true_label ? if_true_label : drop_through_label);
5922 }
5923 break;
5924
5925 case EQ_EXPR:
5926 if (integer_zerop (TREE_OPERAND (exp, 1)))
5927 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5928 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5929 == MODE_INT)
5930 &&
5931 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5932 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
5933 else
5934 comparison = compare (exp, EQ, EQ);
5935 break;
5936
5937 case NE_EXPR:
5938 if (integer_zerop (TREE_OPERAND (exp, 1)))
5939 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5940 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5941 == MODE_INT)
5942 &&
5943 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5944 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
5945 else
5946 comparison = compare (exp, NE, NE);
5947 break;
5948
5949 case LT_EXPR:
5950 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5951 == MODE_INT)
5952 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5953 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
5954 else
5955 comparison = compare (exp, LT, LTU);
5956 break;
5957
5958 case LE_EXPR:
5959 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5960 == MODE_INT)
5961 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5962 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
5963 else
5964 comparison = compare (exp, LE, LEU);
5965 break;
5966
5967 case GT_EXPR:
5968 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5969 == MODE_INT)
5970 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5971 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
5972 else
5973 comparison = compare (exp, GT, GTU);
5974 break;
5975
5976 case GE_EXPR:
5977 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5978 == MODE_INT)
5979 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5980 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
5981 else
5982 comparison = compare (exp, GE, GEU);
5983 break;
5984
5985 default:
5986 normal:
5987 temp = expand_expr (exp, 0, VOIDmode, 0);
5988#if 0
5989 /* This is not needed any more and causes poor code since it causes
5990 comparisons and tests from non-SI objects to have different code
5991 sequences. */
5992 /* Copy to register to avoid generating bad insns by cse
5993 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
5994 if (!cse_not_expected && GET_CODE (temp) == MEM)
5995 temp = copy_to_reg (temp);
5996#endif
5997 do_pending_stack_adjust ();
5998 if (GET_CODE (temp) == CONST_INT)
5999 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6000 else if (GET_CODE (temp) == LABEL_REF)
6001 comparison = const_true_rtx;
6002 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6003 && !can_compare_p (GET_MODE (temp)))
6004 /* Note swapping the labels gives us not-equal. */
6005 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6006 else if (GET_MODE (temp) != VOIDmode)
6007 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6008 NE, 1, GET_MODE (temp), 0, 0);
6009 else
6010 abort ();
6011 }
6012
6013 /* Do any postincrements in the expression that was tested. */
6014 emit_queue ();
6015
6016 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6017 straight into a conditional jump instruction as the jump condition.
6018 Otherwise, all the work has been done already. */
6019
6020 if (comparison == const_true_rtx)
6021 {
6022 if (if_true_label)
6023 emit_jump (if_true_label);
6024 }
6025 else if (comparison == const0_rtx)
6026 {
6027 if (if_false_label)
6028 emit_jump (if_false_label);
6029 }
6030 else if (comparison)
6031 do_jump_for_compare (comparison, if_false_label, if_true_label);
6032
6033 free_temp_slots ();
6034
6035 if (drop_through_label)
e7c33f54
RK
6036 {
6037 /* If do_jump produces code that might be jumped around,
6038 do any stack adjusts from that code, before the place
6039 where control merges in. */
6040 do_pending_stack_adjust ();
6041 emit_label (drop_through_label);
6042 }
bbf6f052
RK
6043}
6044\f
6045/* Given a comparison expression EXP for values too wide to be compared
6046 with one insn, test the comparison and jump to the appropriate label.
6047 The code of EXP is ignored; we always test GT if SWAP is 0,
6048 and LT if SWAP is 1. */
6049
6050static void
6051do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6052 tree exp;
6053 int swap;
6054 rtx if_false_label, if_true_label;
6055{
6056 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), 0, VOIDmode, 0);
6057 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), 0, VOIDmode, 0);
6058 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6059 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6060 rtx drop_through_label = 0;
6061 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6062 int i;
6063
6064 if (! if_true_label || ! if_false_label)
6065 drop_through_label = gen_label_rtx ();
6066 if (! if_true_label)
6067 if_true_label = drop_through_label;
6068 if (! if_false_label)
6069 if_false_label = drop_through_label;
6070
6071 /* Compare a word at a time, high order first. */
6072 for (i = 0; i < nwords; i++)
6073 {
6074 rtx comp;
6075 rtx op0_word, op1_word;
6076
6077 if (WORDS_BIG_ENDIAN)
6078 {
6079 op0_word = operand_subword_force (op0, i, mode);
6080 op1_word = operand_subword_force (op1, i, mode);
6081 }
6082 else
6083 {
6084 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6085 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6086 }
6087
6088 /* All but high-order word must be compared as unsigned. */
6089 comp = compare_from_rtx (op0_word, op1_word,
6090 (unsignedp || i > 0) ? GTU : GT,
6091 unsignedp, word_mode, 0, 0);
6092 if (comp == const_true_rtx)
6093 emit_jump (if_true_label);
6094 else if (comp != const0_rtx)
6095 do_jump_for_compare (comp, 0, if_true_label);
6096
6097 /* Consider lower words only if these are equal. */
6098 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6099 0, 0);
6100 if (comp == const_true_rtx)
6101 emit_jump (if_false_label);
6102 else if (comp != const0_rtx)
6103 do_jump_for_compare (comp, 0, if_false_label);
6104 }
6105
6106 if (if_false_label)
6107 emit_jump (if_false_label);
6108 if (drop_through_label)
6109 emit_label (drop_through_label);
6110}
6111
6112/* Given an EQ_EXPR expression EXP for values too wide to be compared
6113 with one insn, test the comparison and jump to the appropriate label. */
6114
6115static void
6116do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6117 tree exp;
6118 rtx if_false_label, if_true_label;
6119{
6120 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6121 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6122 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6123 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6124 int i;
6125 rtx drop_through_label = 0;
6126
6127 if (! if_false_label)
6128 drop_through_label = if_false_label = gen_label_rtx ();
6129
6130 for (i = 0; i < nwords; i++)
6131 {
6132 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6133 operand_subword_force (op1, i, mode),
6134 EQ, 0, word_mode, 0, 0);
6135 if (comp == const_true_rtx)
6136 emit_jump (if_false_label);
6137 else if (comp != const0_rtx)
6138 do_jump_for_compare (comp, if_false_label, 0);
6139 }
6140
6141 if (if_true_label)
6142 emit_jump (if_true_label);
6143 if (drop_through_label)
6144 emit_label (drop_through_label);
6145}
6146\f
6147/* Jump according to whether OP0 is 0.
6148 We assume that OP0 has an integer mode that is too wide
6149 for the available compare insns. */
6150
6151static void
6152do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6153 rtx op0;
6154 rtx if_false_label, if_true_label;
6155{
6156 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6157 int i;
6158 rtx drop_through_label = 0;
6159
6160 if (! if_false_label)
6161 drop_through_label = if_false_label = gen_label_rtx ();
6162
6163 for (i = 0; i < nwords; i++)
6164 {
6165 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6166 GET_MODE (op0)),
6167 const0_rtx, EQ, 0, word_mode, 0, 0);
6168 if (comp == const_true_rtx)
6169 emit_jump (if_false_label);
6170 else if (comp != const0_rtx)
6171 do_jump_for_compare (comp, if_false_label, 0);
6172 }
6173
6174 if (if_true_label)
6175 emit_jump (if_true_label);
6176 if (drop_through_label)
6177 emit_label (drop_through_label);
6178}
6179
6180/* Given a comparison expression in rtl form, output conditional branches to
6181 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6182
6183static void
6184do_jump_for_compare (comparison, if_false_label, if_true_label)
6185 rtx comparison, if_false_label, if_true_label;
6186{
6187 if (if_true_label)
6188 {
6189 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6190 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6191 else
6192 abort ();
6193
6194 if (if_false_label)
6195 emit_jump (if_false_label);
6196 }
6197 else if (if_false_label)
6198 {
6199 rtx insn;
6200 rtx prev = PREV_INSN (get_last_insn ());
6201 rtx branch = 0;
6202
6203 /* Output the branch with the opposite condition. Then try to invert
6204 what is generated. If more than one insn is a branch, or if the
6205 branch is not the last insn written, abort. If we can't invert
6206 the branch, emit make a true label, redirect this jump to that,
6207 emit a jump to the false label and define the true label. */
6208
6209 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6210 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6211 else
6212 abort ();
6213
6214 /* Here we get the insn before what was just emitted.
6215 On some machines, emitting the branch can discard
6216 the previous compare insn and emit a replacement. */
6217 if (prev == 0)
6218 /* If there's only one preceding insn... */
6219 insn = get_insns ();
6220 else
6221 insn = NEXT_INSN (prev);
6222
6223 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6224 if (GET_CODE (insn) == JUMP_INSN)
6225 {
6226 if (branch)
6227 abort ();
6228 branch = insn;
6229 }
6230
6231 if (branch != get_last_insn ())
6232 abort ();
6233
6234 if (! invert_jump (branch, if_false_label))
6235 {
6236 if_true_label = gen_label_rtx ();
6237 redirect_jump (branch, if_true_label);
6238 emit_jump (if_false_label);
6239 emit_label (if_true_label);
6240 }
6241 }
6242}
6243\f
6244/* Generate code for a comparison expression EXP
6245 (including code to compute the values to be compared)
6246 and set (CC0) according to the result.
6247 SIGNED_CODE should be the rtx operation for this comparison for
6248 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6249
6250 We force a stack adjustment unless there are currently
6251 things pushed on the stack that aren't yet used. */
6252
6253static rtx
6254compare (exp, signed_code, unsigned_code)
6255 register tree exp;
6256 enum rtx_code signed_code, unsigned_code;
6257{
6258 register rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6259 register rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6260 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6261 register enum machine_mode mode = TYPE_MODE (type);
6262 int unsignedp = TREE_UNSIGNED (type);
6263 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6264
6265 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6266 ((mode == BLKmode)
6267 ? expr_size (TREE_OPERAND (exp, 0)) : 0),
6268 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6269}
6270
6271/* Like compare but expects the values to compare as two rtx's.
6272 The decision as to signed or unsigned comparison must be made by the caller.
6273
6274 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6275 compared.
6276
6277 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6278 size of MODE should be used. */
6279
6280rtx
6281compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6282 register rtx op0, op1;
6283 enum rtx_code code;
6284 int unsignedp;
6285 enum machine_mode mode;
6286 rtx size;
6287 int align;
6288{
6289 /* If one operand is constant, make it the second one. */
6290
6291 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6292 {
6293 rtx tem = op0;
6294 op0 = op1;
6295 op1 = tem;
6296 code = swap_condition (code);
6297 }
6298
6299 if (flag_force_mem)
6300 {
6301 op0 = force_not_mem (op0);
6302 op1 = force_not_mem (op1);
6303 }
6304
6305 do_pending_stack_adjust ();
6306
6307 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6308 return simplify_relational_operation (code, mode, op0, op1);
6309
6310 /* If this is a signed equality comparison, we can do it as an
6311 unsigned comparison since zero-extension is cheaper than sign
6312 extension and comparisons with zero are done as unsigned. If we
6313 are comparing against a constant, we must convert it to what it
6314 would look like unsigned. */
6315 if ((code == EQ || code == NE) && ! unsignedp
6316 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_INT)
6317 {
6318 if (GET_CODE (op1) == CONST_INT
6319 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6320 op1 = gen_rtx (CONST_INT, VOIDmode,
6321 INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6322 unsignedp = 1;
6323 }
6324
6325 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6326
6327 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6328}
6329\f
6330/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
6331 and return an rtx for the result. EXP is either a comparison
6332 or a TRUTH_NOT_EXPR whose operand is a comparison.
6333
bbf6f052
RK
6334 If TARGET is nonzero, store the result there if convenient.
6335
6336 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6337 cheap.
6338
6339 Return zero if there is no suitable set-flag instruction
6340 available on this machine.
6341
6342 Once expand_expr has been called on the arguments of the comparison,
6343 we are committed to doing the store flag, since it is not safe to
6344 re-evaluate the expression. We emit the store-flag insn by calling
6345 emit_store_flag, but only expand the arguments if we have a reason
6346 to believe that emit_store_flag will be successful. If we think that
6347 it will, but it isn't, we have to simulate the store-flag with a
6348 set/jump/set sequence. */
6349
6350static rtx
6351do_store_flag (exp, target, mode, only_cheap)
6352 tree exp;
6353 rtx target;
6354 enum machine_mode mode;
6355 int only_cheap;
6356{
6357 enum rtx_code code;
e7c33f54 6358 tree arg0, arg1, type;
bbf6f052 6359 tree tem;
e7c33f54
RK
6360 enum machine_mode operand_mode;
6361 int invert = 0;
6362 int unsignedp;
bbf6f052
RK
6363 rtx op0, op1;
6364 enum insn_code icode;
6365 rtx subtarget = target;
6366 rtx result, label, pattern, jump_pat;
6367
e7c33f54
RK
6368 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6369 result at the end. We can't simply invert the test since it would
6370 have already been inverted if it were valid. This case occurs for
6371 some floating-point comparisons. */
6372
6373 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6374 invert = 1, exp = TREE_OPERAND (exp, 0);
6375
6376 arg0 = TREE_OPERAND (exp, 0);
6377 arg1 = TREE_OPERAND (exp, 1);
6378 type = TREE_TYPE (arg0);
6379 operand_mode = TYPE_MODE (type);
6380 unsignedp = TREE_UNSIGNED (type);
6381
bbf6f052
RK
6382 /* We won't bother with BLKmode store-flag operations because it would mean
6383 passing a lot of information to emit_store_flag. */
6384 if (operand_mode == BLKmode)
6385 return 0;
6386
6387 while (TREE_CODE (arg0) == NON_LVALUE_EXPR)
6388 arg0 = TREE_OPERAND (arg0, 0);
6389
6390 while (TREE_CODE (arg1) == NON_LVALUE_EXPR)
6391 arg1 = TREE_OPERAND (arg1, 0);
6392
6393 /* Get the rtx comparison code to use. We know that EXP is a comparison
6394 operation of some type. Some comparisons against 1 and -1 can be
6395 converted to comparisons with zero. Do so here so that the tests
6396 below will be aware that we have a comparison with zero. These
6397 tests will not catch constants in the first operand, but constants
6398 are rarely passed as the first operand. */
6399
6400 switch (TREE_CODE (exp))
6401 {
6402 case EQ_EXPR:
6403 code = EQ;
6404 break;
6405 case NE_EXPR:
6406 code = NE;
6407 break;
6408 case LT_EXPR:
6409 if (integer_onep (arg1))
6410 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6411 else
6412 code = unsignedp ? LTU : LT;
6413 break;
6414 case LE_EXPR:
6415 if (integer_all_onesp (arg1))
6416 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6417 else
6418 code = unsignedp ? LEU : LE;
6419 break;
6420 case GT_EXPR:
6421 if (integer_all_onesp (arg1))
6422 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6423 else
6424 code = unsignedp ? GTU : GT;
6425 break;
6426 case GE_EXPR:
6427 if (integer_onep (arg1))
6428 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6429 else
6430 code = unsignedp ? GEU : GE;
6431 break;
6432 default:
6433 abort ();
6434 }
6435
6436 /* Put a constant second. */
6437 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6438 {
6439 tem = arg0; arg0 = arg1; arg1 = tem;
6440 code = swap_condition (code);
6441 }
6442
6443 /* If this is an equality or inequality test of a single bit, we can
6444 do this by shifting the bit being tested to the low-order bit and
6445 masking the result with the constant 1. If the condition was EQ,
6446 we xor it with 1. This does not require an scc insn and is faster
6447 than an scc insn even if we have it. */
6448
6449 if ((code == NE || code == EQ)
6450 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6451 && integer_pow2p (TREE_OPERAND (arg0, 1))
6452 && TYPE_PRECISION (type) <= HOST_BITS_PER_INT)
6453 {
6454 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6455 0, VOIDmode, 0)));
6456
6457 if (subtarget == 0 || GET_CODE (subtarget) != REG
6458 || GET_MODE (subtarget) != operand_mode
6459 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6460 subtarget = 0;
6461
6462 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6463
6464 if (bitnum != 0)
6465 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6466 size_int (bitnum), target, 1);
6467
6468 if (GET_MODE (op0) != mode)
6469 op0 = convert_to_mode (mode, op0, 1);
6470
6471 if (bitnum != TYPE_PRECISION (type) - 1)
6472 op0 = expand_and (op0, const1_rtx, target);
6473
e7c33f54 6474 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
6475 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6476 OPTAB_LIB_WIDEN);
6477
6478 return op0;
6479 }
6480
6481 /* Now see if we are likely to be able to do this. Return if not. */
6482 if (! can_compare_p (operand_mode))
6483 return 0;
6484 icode = setcc_gen_code[(int) code];
6485 if (icode == CODE_FOR_nothing
6486 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6487 {
6488 /* We can only do this if it is one of the special cases that
6489 can be handled without an scc insn. */
6490 if ((code == LT && integer_zerop (arg1))
6491 || (! only_cheap && code == GE && integer_zerop (arg1)))
6492 ;
6493 else if (BRANCH_COST >= 0
6494 && ! only_cheap && (code == NE || code == EQ)
6495 && TREE_CODE (type) != REAL_TYPE
6496 && ((abs_optab->handlers[(int) operand_mode].insn_code
6497 != CODE_FOR_nothing)
6498 || (ffs_optab->handlers[(int) operand_mode].insn_code
6499 != CODE_FOR_nothing)))
6500 ;
6501 else
6502 return 0;
6503 }
6504
6505 preexpand_calls (exp);
6506 if (subtarget == 0 || GET_CODE (subtarget) != REG
6507 || GET_MODE (subtarget) != operand_mode
6508 || ! safe_from_p (subtarget, arg1))
6509 subtarget = 0;
6510
6511 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6512 op1 = expand_expr (arg1, 0, VOIDmode, 0);
6513
6514 if (target == 0)
6515 target = gen_reg_rtx (mode);
6516
6517 result = emit_store_flag (target, code, op0, op1, operand_mode,
6518 unsignedp, 1);
6519
6520 if (result)
e7c33f54
RK
6521 {
6522 if (invert)
6523 result = expand_binop (mode, xor_optab, result, const1_rtx,
6524 result, 0, OPTAB_LIB_WIDEN);
6525 return result;
6526 }
bbf6f052
RK
6527
6528 /* If this failed, we have to do this with set/compare/jump/set code. */
6529 if (target == 0 || GET_CODE (target) != REG
6530 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6531 target = gen_reg_rtx (GET_MODE (target));
6532
e7c33f54 6533 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
bbf6f052
RK
6534 result = compare_from_rtx (op0, op1, code, unsignedp, operand_mode, 0, 0);
6535 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
6536 return (((result == const0_rtx && ! invert)
6537 || (result != const0_rtx && invert))
6538 ? const0_rtx : const1_rtx);
bbf6f052
RK
6539
6540 label = gen_label_rtx ();
6541 if (bcc_gen_fctn[(int) code] == 0)
6542 abort ();
6543
6544 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 6545 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
6546 emit_label (label);
6547
6548 return target;
6549}
6550\f
6551/* Generate a tablejump instruction (used for switch statements). */
6552
6553#ifdef HAVE_tablejump
6554
6555/* INDEX is the value being switched on, with the lowest value
6556 in the table already subtracted.
88d3b7f0 6557 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
6558 RANGE is the length of the jump table.
6559 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6560
6561 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6562 index value is out of range. */
6563
6564void
e87b4f3f 6565do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 6566 rtx index, range, table_label, default_label;
e87b4f3f 6567 enum machine_mode mode;
bbf6f052
RK
6568{
6569 register rtx temp, vector;
6570
88d3b7f0
RS
6571 /* Do an unsigned comparison (in the proper mode) between the index
6572 expression and the value which represents the length of the range.
6573 Since we just finished subtracting the lower bound of the range
6574 from the index expression, this comparison allows us to simultaneously
6575 check that the original index expression value is both greater than
6576 or equal to the minimum value of the range and less than or equal to
6577 the maximum value of the range. */
e87b4f3f
RS
6578
6579 emit_cmp_insn (range, index, LTU, 0, mode, 0, 0);
bbf6f052 6580 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
6581
6582 /* If index is in range, it must fit in Pmode.
6583 Convert to Pmode so we can index with it. */
6584 if (mode != Pmode)
6585 index = convert_to_mode (Pmode, index, 1);
6586
bbf6f052
RK
6587 /* If flag_force_addr were to affect this address
6588 it could interfere with the tricky assumptions made
6589 about addresses that contain label-refs,
6590 which may be valid only very near the tablejump itself. */
6591 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6592 GET_MODE_SIZE, because this indicates how large insns are. The other
6593 uses should all be Pmode, because they are addresses. This code
6594 could fail if addresses and insns are not the same size. */
6595 index = memory_address_noforce
6596 (CASE_VECTOR_MODE,
6597 gen_rtx (PLUS, Pmode,
6598 gen_rtx (MULT, Pmode, index,
6599 gen_rtx (CONST_INT, VOIDmode,
6600 GET_MODE_SIZE (CASE_VECTOR_MODE))),
6601 gen_rtx (LABEL_REF, Pmode, table_label)));
6602 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6603 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6604 RTX_UNCHANGING_P (vector) = 1;
6605 convert_move (temp, vector, 0);
6606
6607 emit_jump_insn (gen_tablejump (temp, table_label));
6608
6609#ifndef CASE_VECTOR_PC_RELATIVE
6610 /* If we are generating PIC code or if the table is PC-relative, the
6611 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6612 if (! flag_pic)
6613 emit_barrier ();
6614#endif
6615}
6616
6617#endif /* HAVE_tablejump */
This page took 0.677826 seconds and 5 git commands to generate.