]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
entered into RCS
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
4fa52007
RK
105/* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109static char direct_load[NUM_MACHINE_MODES];
110static char direct_store[NUM_MACHINE_MODES];
111
bbf6f052
RK
112/* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115#ifndef MOVE_RATIO
116#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
117#define MOVE_RATIO 2
118#else
119/* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121#define MOVE_RATIO 15
122#endif
123#endif
e87b4f3f
RS
124
125/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
126
127#ifndef SLOW_UNALIGNED_ACCESS
128#define SLOW_UNALIGNED_ACCESS 0
129#endif
bbf6f052 130\f
4fa52007
RK
131/* This is run once per compilation to set up which modes can be used
132 directly in memory. */
133
134void
135init_expr_once ()
136{
137 rtx insn, pat;
138 enum machine_mode mode;
139 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
140
141 start_sequence ();
142 insn = emit_insn (gen_rtx (SET, 0, 0));
143 pat = PATTERN (insn);
144
145 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
146 mode = (enum machine_mode) ((int) mode + 1))
147 {
148 int regno;
149 rtx reg;
150 int num_clobbers;
151
152 direct_load[(int) mode] = direct_store[(int) mode] = 0;
153 PUT_MODE (mem, mode);
154
155 /* Find a register that can be used in this mode, if any. */
156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
157 if (HARD_REGNO_MODE_OK (regno, mode))
158 break;
159
160 if (regno == FIRST_PSEUDO_REGISTER)
161 continue;
162
163 reg = gen_rtx (REG, mode, regno);
164
165 SET_SRC (pat) = mem;
166 SET_DEST (pat) = reg;
167 direct_load[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
168
169 SET_SRC (pat) = reg;
170 SET_DEST (pat) = mem;
171 direct_store[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
172 }
173
174 end_sequence ();
175}
176
bbf6f052
RK
177/* This is run at the start of compiling a function. */
178
179void
180init_expr ()
181{
182 init_queue ();
183
184 pending_stack_adjust = 0;
185 inhibit_defer_pop = 0;
186 cleanups_this_call = 0;
187 saveregs_value = 0;
e87b4f3f 188 forced_labels = 0;
bbf6f052
RK
189}
190
191/* Save all variables describing the current status into the structure *P.
192 This is used before starting a nested function. */
193
194void
195save_expr_status (p)
196 struct function *p;
197{
198 /* Instead of saving the postincrement queue, empty it. */
199 emit_queue ();
200
201 p->pending_stack_adjust = pending_stack_adjust;
202 p->inhibit_defer_pop = inhibit_defer_pop;
203 p->cleanups_this_call = cleanups_this_call;
204 p->saveregs_value = saveregs_value;
e87b4f3f 205 p->forced_labels = forced_labels;
bbf6f052
RK
206
207 pending_stack_adjust = 0;
208 inhibit_defer_pop = 0;
209 cleanups_this_call = 0;
210 saveregs_value = 0;
e87b4f3f 211 forced_labels = 0;
bbf6f052
RK
212}
213
214/* Restore all variables describing the current status from the structure *P.
215 This is used after a nested function. */
216
217void
218restore_expr_status (p)
219 struct function *p;
220{
221 pending_stack_adjust = p->pending_stack_adjust;
222 inhibit_defer_pop = p->inhibit_defer_pop;
223 cleanups_this_call = p->cleanups_this_call;
224 saveregs_value = p->saveregs_value;
e87b4f3f 225 forced_labels = p->forced_labels;
bbf6f052
RK
226}
227\f
228/* Manage the queue of increment instructions to be output
229 for POSTINCREMENT_EXPR expressions, etc. */
230
231static rtx pending_chain;
232
233/* Queue up to increment (or change) VAR later. BODY says how:
234 BODY should be the same thing you would pass to emit_insn
235 to increment right away. It will go to emit_insn later on.
236
237 The value is a QUEUED expression to be used in place of VAR
238 where you want to guarantee the pre-incrementation value of VAR. */
239
240static rtx
241enqueue_insn (var, body)
242 rtx var, body;
243{
244 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
245 var, 0, 0, body, pending_chain);
246 return pending_chain;
247}
248
249/* Use protect_from_queue to convert a QUEUED expression
250 into something that you can put immediately into an instruction.
251 If the queued incrementation has not happened yet,
252 protect_from_queue returns the variable itself.
253 If the incrementation has happened, protect_from_queue returns a temp
254 that contains a copy of the old value of the variable.
255
256 Any time an rtx which might possibly be a QUEUED is to be put
257 into an instruction, it must be passed through protect_from_queue first.
258 QUEUED expressions are not meaningful in instructions.
259
260 Do not pass a value through protect_from_queue and then hold
261 on to it for a while before putting it in an instruction!
262 If the queue is flushed in between, incorrect code will result. */
263
264rtx
265protect_from_queue (x, modify)
266 register rtx x;
267 int modify;
268{
269 register RTX_CODE code = GET_CODE (x);
270
271#if 0 /* A QUEUED can hang around after the queue is forced out. */
272 /* Shortcut for most common case. */
273 if (pending_chain == 0)
274 return x;
275#endif
276
277 if (code != QUEUED)
278 {
279 /* A special hack for read access to (MEM (QUEUED ...))
280 to facilitate use of autoincrement.
281 Make a copy of the contents of the memory location
282 rather than a copy of the address, but not
283 if the value is of mode BLKmode. */
284 if (code == MEM && GET_MODE (x) != BLKmode
285 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
286 {
287 register rtx y = XEXP (x, 0);
288 XEXP (x, 0) = QUEUED_VAR (y);
289 if (QUEUED_INSN (y))
290 {
291 register rtx temp = gen_reg_rtx (GET_MODE (x));
292 emit_insn_before (gen_move_insn (temp, x),
293 QUEUED_INSN (y));
294 return temp;
295 }
296 return x;
297 }
298 /* Otherwise, recursively protect the subexpressions of all
299 the kinds of rtx's that can contain a QUEUED. */
300 if (code == MEM)
301 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
302 else if (code == PLUS || code == MULT)
303 {
304 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
305 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
306 }
307 return x;
308 }
309 /* If the increment has not happened, use the variable itself. */
310 if (QUEUED_INSN (x) == 0)
311 return QUEUED_VAR (x);
312 /* If the increment has happened and a pre-increment copy exists,
313 use that copy. */
314 if (QUEUED_COPY (x) != 0)
315 return QUEUED_COPY (x);
316 /* The increment has happened but we haven't set up a pre-increment copy.
317 Set one up now, and use it. */
318 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
319 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
320 QUEUED_INSN (x));
321 return QUEUED_COPY (x);
322}
323
324/* Return nonzero if X contains a QUEUED expression:
325 if it contains anything that will be altered by a queued increment.
326 We handle only combinations of MEM, PLUS, MINUS and MULT operators
327 since memory addresses generally contain only those. */
328
329static int
330queued_subexp_p (x)
331 rtx x;
332{
333 register enum rtx_code code = GET_CODE (x);
334 switch (code)
335 {
336 case QUEUED:
337 return 1;
338 case MEM:
339 return queued_subexp_p (XEXP (x, 0));
340 case MULT:
341 case PLUS:
342 case MINUS:
343 return queued_subexp_p (XEXP (x, 0))
344 || queued_subexp_p (XEXP (x, 1));
345 }
346 return 0;
347}
348
349/* Perform all the pending incrementations. */
350
351void
352emit_queue ()
353{
354 register rtx p;
355 while (p = pending_chain)
356 {
357 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
358 pending_chain = QUEUED_NEXT (p);
359 }
360}
361
362static void
363init_queue ()
364{
365 if (pending_chain)
366 abort ();
367}
368\f
369/* Copy data from FROM to TO, where the machine modes are not the same.
370 Both modes may be integer, or both may be floating.
371 UNSIGNEDP should be nonzero if FROM is an unsigned type.
372 This causes zero-extension instead of sign-extension. */
373
374void
375convert_move (to, from, unsignedp)
376 register rtx to, from;
377 int unsignedp;
378{
379 enum machine_mode to_mode = GET_MODE (to);
380 enum machine_mode from_mode = GET_MODE (from);
381 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
382 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
383 enum insn_code code;
384 rtx libcall;
385
386 /* rtx code for making an equivalent value. */
387 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
388
389 to = protect_from_queue (to, 1);
390 from = protect_from_queue (from, 0);
391
392 if (to_real != from_real)
393 abort ();
394
395 if (to_mode == from_mode
396 || (from_mode == VOIDmode && CONSTANT_P (from)))
397 {
398 emit_move_insn (to, from);
399 return;
400 }
401
402 if (to_real)
403 {
404#ifdef HAVE_extendsfdf2
405 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
406 {
407 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
408 return;
409 }
410#endif
b092b471
JW
411#ifdef HAVE_extendsfxf2
412 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
413 {
414 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
415 return;
416 }
417#endif
bbf6f052
RK
418#ifdef HAVE_extendsftf2
419 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
420 {
421 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
422 return;
423 }
424#endif
b092b471
JW
425#ifdef HAVE_extenddfxf2
426 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
427 {
428 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
429 return;
430 }
431#endif
bbf6f052
RK
432#ifdef HAVE_extenddftf2
433 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
434 {
435 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
436 return;
437 }
438#endif
439#ifdef HAVE_truncdfsf2
440 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
441 {
442 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
443 return;
444 }
445#endif
b092b471
JW
446#ifdef HAVE_truncxfsf2
447 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
448 {
449 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
450 return;
451 }
452#endif
bbf6f052
RK
453#ifdef HAVE_trunctfsf2
454 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
455 {
456 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
457 return;
458 }
459#endif
b092b471
JW
460#ifdef HAVE_truncxfdf2
461 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
462 {
463 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
464 return;
465 }
466#endif
bbf6f052
RK
467#ifdef HAVE_trunctfdf2
468 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
469 {
470 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
471 return;
472 }
473#endif
474
b092b471
JW
475 libcall = (rtx) 0;
476 switch (from_mode)
477 {
478 case SFmode:
479 switch (to_mode)
480 {
481 case DFmode:
482 libcall = extendsfdf2_libfunc;
483 break;
484
485 case XFmode:
486 libcall = extendsfxf2_libfunc;
487 break;
488
489 case TFmode:
490 libcall = extendsftf2_libfunc;
491 break;
492 }
493 break;
494
495 case DFmode:
496 switch (to_mode)
497 {
498 case SFmode:
499 libcall = truncdfsf2_libfunc;
500 break;
501
502 case XFmode:
503 libcall = extenddfxf2_libfunc;
504 break;
505
506 case TFmode:
507 libcall = extenddftf2_libfunc;
508 break;
509 }
510 break;
511
512 case XFmode:
513 switch (to_mode)
514 {
515 case SFmode:
516 libcall = truncxfsf2_libfunc;
517 break;
518
519 case DFmode:
520 libcall = truncxfdf2_libfunc;
521 break;
522 }
523 break;
524
525 case TFmode:
526 switch (to_mode)
527 {
528 case SFmode:
529 libcall = trunctfsf2_libfunc;
530 break;
531
532 case DFmode:
533 libcall = trunctfdf2_libfunc;
534 break;
535 }
536 break;
537 }
538
539 if (libcall == (rtx) 0)
540 /* This conversion is not implemented yet. */
bbf6f052
RK
541 abort ();
542
e87b4f3f 543 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
544 emit_move_insn (to, hard_libcall_value (to_mode));
545 return;
546 }
547
548 /* Now both modes are integers. */
549
550 /* Handle expanding beyond a word. */
551 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
552 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
553 {
554 rtx insns;
555 rtx lowpart;
556 rtx fill_value;
557 rtx lowfrom;
558 int i;
559 enum machine_mode lowpart_mode;
560 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
561
562 /* Try converting directly if the insn is supported. */
563 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
564 != CODE_FOR_nothing)
565 {
566 emit_unop_insn (code, to, from, equiv_code);
567 return;
568 }
569 /* Next, try converting via full word. */
570 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
571 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
572 != CODE_FOR_nothing))
573 {
574 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
575 emit_unop_insn (code, to,
576 gen_lowpart (word_mode, to), equiv_code);
577 return;
578 }
579
580 /* No special multiword conversion insn; do it by hand. */
581 start_sequence ();
582
583 /* Get a copy of FROM widened to a word, if necessary. */
584 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
585 lowpart_mode = word_mode;
586 else
587 lowpart_mode = from_mode;
588
589 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
590
591 lowpart = gen_lowpart (lowpart_mode, to);
592 emit_move_insn (lowpart, lowfrom);
593
594 /* Compute the value to put in each remaining word. */
595 if (unsignedp)
596 fill_value = const0_rtx;
597 else
598 {
599#ifdef HAVE_slt
600 if (HAVE_slt
601 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
602 && STORE_FLAG_VALUE == -1)
603 {
604 emit_cmp_insn (lowfrom, const0_rtx, NE, 0, lowpart_mode, 0, 0);
605 fill_value = gen_reg_rtx (word_mode);
606 emit_insn (gen_slt (fill_value));
607 }
608 else
609#endif
610 {
611 fill_value
612 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
613 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
614 0, 0);
615 fill_value = convert_to_mode (word_mode, fill_value, 1);
616 }
617 }
618
619 /* Fill the remaining words. */
620 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
621 {
622 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
623 rtx subword = operand_subword (to, index, 1, to_mode);
624
625 if (subword == 0)
626 abort ();
627
628 if (fill_value != subword)
629 emit_move_insn (subword, fill_value);
630 }
631
632 insns = get_insns ();
633 end_sequence ();
634
635 emit_no_conflict_block (insns, to, from, 0,
636 gen_rtx (equiv_code, to_mode, from));
637 return;
638 }
639
640 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
641 {
642 convert_move (to, gen_lowpart (word_mode, from), 0);
643 return;
644 }
645
646 /* Handle pointer conversion */ /* SPEE 900220 */
647 if (to_mode == PSImode)
648 {
649 if (from_mode != SImode)
650 from = convert_to_mode (SImode, from, unsignedp);
651
652#ifdef HAVE_truncsipsi
653 if (HAVE_truncsipsi)
654 {
655 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
656 return;
657 }
658#endif /* HAVE_truncsipsi */
659 abort ();
660 }
661
662 if (from_mode == PSImode)
663 {
664 if (to_mode != SImode)
665 {
666 from = convert_to_mode (SImode, from, unsignedp);
667 from_mode = SImode;
668 }
669 else
670 {
671#ifdef HAVE_extendpsisi
672 if (HAVE_extendpsisi)
673 {
674 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
675 return;
676 }
677#endif /* HAVE_extendpsisi */
678 abort ();
679 }
680 }
681
682 /* Now follow all the conversions between integers
683 no more than a word long. */
684
685 /* For truncation, usually we can just refer to FROM in a narrower mode. */
686 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
687 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
688 GET_MODE_BITSIZE (from_mode))
689 && ((GET_CODE (from) == MEM
690 && ! MEM_VOLATILE_P (from)
4fa52007 691 && direct_load[(int) to_mode]
bbf6f052
RK
692 && ! mode_dependent_address_p (XEXP (from, 0)))
693 || GET_CODE (from) == REG
694 || GET_CODE (from) == SUBREG))
695 {
696 emit_move_insn (to, gen_lowpart (to_mode, from));
697 return;
698 }
699
700 /* For truncation, usually we can just refer to FROM in a narrower mode. */
701 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
702 {
703 /* Convert directly if that works. */
704 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
705 != CODE_FOR_nothing)
706 {
707 emit_unop_insn (code, to, from, equiv_code);
708 return;
709 }
710 else
711 {
712 enum machine_mode intermediate;
713
714 /* Search for a mode to convert via. */
715 for (intermediate = from_mode; intermediate != VOIDmode;
716 intermediate = GET_MODE_WIDER_MODE (intermediate))
717 if ((can_extend_p (to_mode, intermediate, unsignedp)
718 != CODE_FOR_nothing)
719 && (can_extend_p (intermediate, from_mode, unsignedp)
720 != CODE_FOR_nothing))
721 {
722 convert_move (to, convert_to_mode (intermediate, from,
723 unsignedp), unsignedp);
724 return;
725 }
726
727 /* No suitable intermediate mode. */
728 abort ();
729 }
730 }
731
732 /* Support special truncate insns for certain modes. */
733
734 if (from_mode == DImode && to_mode == SImode)
735 {
736#ifdef HAVE_truncdisi2
737 if (HAVE_truncdisi2)
738 {
739 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
740 return;
741 }
742#endif
743 convert_move (to, force_reg (from_mode, from), unsignedp);
744 return;
745 }
746
747 if (from_mode == DImode && to_mode == HImode)
748 {
749#ifdef HAVE_truncdihi2
750 if (HAVE_truncdihi2)
751 {
752 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
753 return;
754 }
755#endif
756 convert_move (to, force_reg (from_mode, from), unsignedp);
757 return;
758 }
759
760 if (from_mode == DImode && to_mode == QImode)
761 {
762#ifdef HAVE_truncdiqi2
763 if (HAVE_truncdiqi2)
764 {
765 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
766 return;
767 }
768#endif
769 convert_move (to, force_reg (from_mode, from), unsignedp);
770 return;
771 }
772
773 if (from_mode == SImode && to_mode == HImode)
774 {
775#ifdef HAVE_truncsihi2
776 if (HAVE_truncsihi2)
777 {
778 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
779 return;
780 }
781#endif
782 convert_move (to, force_reg (from_mode, from), unsignedp);
783 return;
784 }
785
786 if (from_mode == SImode && to_mode == QImode)
787 {
788#ifdef HAVE_truncsiqi2
789 if (HAVE_truncsiqi2)
790 {
791 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
792 return;
793 }
794#endif
795 convert_move (to, force_reg (from_mode, from), unsignedp);
796 return;
797 }
798
799 if (from_mode == HImode && to_mode == QImode)
800 {
801#ifdef HAVE_trunchiqi2
802 if (HAVE_trunchiqi2)
803 {
804 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
805 return;
806 }
807#endif
808 convert_move (to, force_reg (from_mode, from), unsignedp);
809 return;
810 }
811
812 /* Handle truncation of volatile memrefs, and so on;
813 the things that couldn't be truncated directly,
814 and for which there was no special instruction. */
815 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
816 {
817 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
818 emit_move_insn (to, temp);
819 return;
820 }
821
822 /* Mode combination is not recognized. */
823 abort ();
824}
825
826/* Return an rtx for a value that would result
827 from converting X to mode MODE.
828 Both X and MODE may be floating, or both integer.
829 UNSIGNEDP is nonzero if X is an unsigned value.
830 This can be done by referring to a part of X in place
831 or by copying to a new temporary with conversion. */
832
833rtx
834convert_to_mode (mode, x, unsignedp)
835 enum machine_mode mode;
836 rtx x;
837 int unsignedp;
838{
839 register rtx temp;
840
841 x = protect_from_queue (x, 0);
842
843 if (mode == GET_MODE (x))
844 return x;
845
846 /* There is one case that we must handle specially: If we are converting
847 a CONST_INT into a mode whose size is twice HOST_BITS_PER_INT and
848 we are to interpret the constant as unsigned, gen_lowpart will do
849 the wrong if the constant appears negative. What we want to do is
850 make the high-order word of the constant zero, not all ones. */
851
852 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
853 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_INT
854 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
855 return immed_double_const (INTVAL (x), 0, mode);
856
857 /* We can do this with a gen_lowpart if both desired and current modes
858 are integer, and this is either a constant integer, a register, or a
859 non-volatile MEM. Except for the constant case, we must be narrowing
860 the operand. */
861
862 if (GET_CODE (x) == CONST_INT
863 || (GET_MODE_CLASS (mode) == MODE_INT
864 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
865 && (GET_CODE (x) == CONST_DOUBLE
866 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
867 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 868 && direct_load[(int) mode]
bbf6f052
RK
869 || GET_CODE (x) == REG)))))
870 return gen_lowpart (mode, x);
871
872 temp = gen_reg_rtx (mode);
873 convert_move (temp, x, unsignedp);
874 return temp;
875}
876\f
877/* Generate several move instructions to copy LEN bytes
878 from block FROM to block TO. (These are MEM rtx's with BLKmode).
879 The caller must pass FROM and TO
880 through protect_from_queue before calling.
881 ALIGN (in bytes) is maximum alignment we can assume. */
882
883struct move_by_pieces
884{
885 rtx to;
886 rtx to_addr;
887 int autinc_to;
888 int explicit_inc_to;
889 rtx from;
890 rtx from_addr;
891 int autinc_from;
892 int explicit_inc_from;
893 int len;
894 int offset;
895 int reverse;
896};
897
898static void move_by_pieces_1 ();
899static int move_by_pieces_ninsns ();
900
901static void
902move_by_pieces (to, from, len, align)
903 rtx to, from;
904 int len, align;
905{
906 struct move_by_pieces data;
907 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 908 int max_size = MOVE_MAX + 1;
bbf6f052
RK
909
910 data.offset = 0;
911 data.to_addr = to_addr;
912 data.from_addr = from_addr;
913 data.to = to;
914 data.from = from;
915 data.autinc_to
916 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
917 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
918 data.autinc_from
919 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
920 || GET_CODE (from_addr) == POST_INC
921 || GET_CODE (from_addr) == POST_DEC);
922
923 data.explicit_inc_from = 0;
924 data.explicit_inc_to = 0;
925 data.reverse
926 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
927 if (data.reverse) data.offset = len;
928 data.len = len;
929
930 /* If copying requires more than two move insns,
931 copy addresses to registers (to make displacements shorter)
932 and use post-increment if available. */
933 if (!(data.autinc_from && data.autinc_to)
934 && move_by_pieces_ninsns (len, align) > 2)
935 {
936#ifdef HAVE_PRE_DECREMENT
937 if (data.reverse && ! data.autinc_from)
938 {
939 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
940 data.autinc_from = 1;
941 data.explicit_inc_from = -1;
942 }
943#endif
944#ifdef HAVE_POST_INCREMENT
945 if (! data.autinc_from)
946 {
947 data.from_addr = copy_addr_to_reg (from_addr);
948 data.autinc_from = 1;
949 data.explicit_inc_from = 1;
950 }
951#endif
952 if (!data.autinc_from && CONSTANT_P (from_addr))
953 data.from_addr = copy_addr_to_reg (from_addr);
954#ifdef HAVE_PRE_DECREMENT
955 if (data.reverse && ! data.autinc_to)
956 {
957 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
958 data.autinc_to = 1;
959 data.explicit_inc_to = -1;
960 }
961#endif
962#ifdef HAVE_POST_INCREMENT
963 if (! data.reverse && ! data.autinc_to)
964 {
965 data.to_addr = copy_addr_to_reg (to_addr);
966 data.autinc_to = 1;
967 data.explicit_inc_to = 1;
968 }
969#endif
970 if (!data.autinc_to && CONSTANT_P (to_addr))
971 data.to_addr = copy_addr_to_reg (to_addr);
972 }
973
e87b4f3f
RS
974 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
975 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 976 align = MOVE_MAX;
bbf6f052
RK
977
978 /* First move what we can in the largest integer mode, then go to
979 successively smaller modes. */
980
981 while (max_size > 1)
982 {
983 enum machine_mode mode = VOIDmode, tmode;
984 enum insn_code icode;
985
e7c33f54
RK
986 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
987 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
988 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
989 mode = tmode;
990
991 if (mode == VOIDmode)
992 break;
993
994 icode = mov_optab->handlers[(int) mode].insn_code;
995 if (icode != CODE_FOR_nothing
996 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
997 GET_MODE_SIZE (mode)))
998 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
999
1000 max_size = GET_MODE_SIZE (mode);
1001 }
1002
1003 /* The code above should have handled everything. */
1004 if (data.len != 0)
1005 abort ();
1006}
1007
1008/* Return number of insns required to move L bytes by pieces.
1009 ALIGN (in bytes) is maximum alignment we can assume. */
1010
1011static int
1012move_by_pieces_ninsns (l, align)
1013 unsigned int l;
1014 int align;
1015{
1016 register int n_insns = 0;
e87b4f3f 1017 int max_size = MOVE_MAX + 1;
bbf6f052 1018
e87b4f3f
RS
1019 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1020 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1021 align = MOVE_MAX;
bbf6f052
RK
1022
1023 while (max_size > 1)
1024 {
1025 enum machine_mode mode = VOIDmode, tmode;
1026 enum insn_code icode;
1027
e7c33f54
RK
1028 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1029 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1030 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1031 mode = tmode;
1032
1033 if (mode == VOIDmode)
1034 break;
1035
1036 icode = mov_optab->handlers[(int) mode].insn_code;
1037 if (icode != CODE_FOR_nothing
1038 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1039 GET_MODE_SIZE (mode)))
1040 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1041
1042 max_size = GET_MODE_SIZE (mode);
1043 }
1044
1045 return n_insns;
1046}
1047
1048/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1051
1052static void
1053move_by_pieces_1 (genfun, mode, data)
1054 rtx (*genfun) ();
1055 enum machine_mode mode;
1056 struct move_by_pieces *data;
1057{
1058 register int size = GET_MODE_SIZE (mode);
1059 register rtx to1, from1;
1060
1061 while (data->len >= size)
1062 {
1063 if (data->reverse) data->offset -= size;
1064
1065 to1 = (data->autinc_to
1066 ? gen_rtx (MEM, mode, data->to_addr)
1067 : change_address (data->to, mode,
1068 plus_constant (data->to_addr, data->offset)));
1069 from1 =
1070 (data->autinc_from
1071 ? gen_rtx (MEM, mode, data->from_addr)
1072 : change_address (data->from, mode,
1073 plus_constant (data->from_addr, data->offset)));
1074
1075#ifdef HAVE_PRE_DECREMENT
1076 if (data->explicit_inc_to < 0)
e87b4f3f
RS
1077 emit_insn (gen_add2_insn (data->to_addr,
1078 gen_rtx (CONST_INT, VOIDmode, -size)));
bbf6f052 1079 if (data->explicit_inc_from < 0)
e87b4f3f
RS
1080 emit_insn (gen_add2_insn (data->from_addr,
1081 gen_rtx (CONST_INT, VOIDmode, -size)));
bbf6f052
RK
1082#endif
1083
1084 emit_insn ((*genfun) (to1, from1));
1085#ifdef HAVE_POST_INCREMENT
1086 if (data->explicit_inc_to > 0)
1087 emit_insn (gen_add2_insn (data->to_addr,
1088 gen_rtx (CONST_INT, VOIDmode, size)));
1089 if (data->explicit_inc_from > 0)
1090 emit_insn (gen_add2_insn (data->from_addr,
1091 gen_rtx (CONST_INT, VOIDmode, size)));
1092#endif
1093
1094 if (! data->reverse) data->offset += size;
1095
1096 data->len -= size;
1097 }
1098}
1099\f
1100/* Emit code to move a block Y to a block X.
1101 This may be done with string-move instructions,
1102 with multiple scalar move instructions, or with a library call.
1103
1104 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1105 with mode BLKmode.
1106 SIZE is an rtx that says how long they are.
1107 ALIGN is the maximum alignment we can assume they have,
1108 measured in bytes. */
1109
1110void
1111emit_block_move (x, y, size, align)
1112 rtx x, y;
1113 rtx size;
1114 int align;
1115{
1116 if (GET_MODE (x) != BLKmode)
1117 abort ();
1118
1119 if (GET_MODE (y) != BLKmode)
1120 abort ();
1121
1122 x = protect_from_queue (x, 1);
1123 y = protect_from_queue (y, 0);
1124
1125 if (GET_CODE (x) != MEM)
1126 abort ();
1127 if (GET_CODE (y) != MEM)
1128 abort ();
1129 if (size == 0)
1130 abort ();
1131
1132 if (GET_CODE (size) == CONST_INT
1133 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1134 < MOVE_RATIO))
1135 move_by_pieces (x, y, INTVAL (size), align);
1136 else
1137 {
1138 /* Try the most limited insn first, because there's no point
1139 including more than one in the machine description unless
1140 the more limited one has some advantage. */
1141#ifdef HAVE_movstrqi
1142 if (HAVE_movstrqi
1143 && GET_CODE (size) == CONST_INT
1144 && ((unsigned) INTVAL (size)
1145 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1146 {
1147 rtx insn = gen_movstrqi (x, y, size,
1148 gen_rtx (CONST_INT, VOIDmode, align));
1149 if (insn)
1150 {
1151 emit_insn (insn);
1152 return;
1153 }
1154 }
1155#endif
1156#ifdef HAVE_movstrhi
1157 if (HAVE_movstrhi
1158 && GET_CODE (size) == CONST_INT
1159 && ((unsigned) INTVAL (size)
1160 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1161 {
1162 rtx insn = gen_movstrhi (x, y, size,
1163 gen_rtx (CONST_INT, VOIDmode, align));
1164 if (insn)
1165 {
1166 emit_insn (insn);
1167 return;
1168 }
1169 }
1170#endif
1171#ifdef HAVE_movstrsi
1172 if (HAVE_movstrsi)
1173 {
1174 rtx insn = gen_movstrsi (x, y, size,
1175 gen_rtx (CONST_INT, VOIDmode, align));
1176 if (insn)
1177 {
1178 emit_insn (insn);
1179 return;
1180 }
1181 }
1182#endif
1183#ifdef HAVE_movstrdi
1184 if (HAVE_movstrdi)
1185 {
1186 rtx insn = gen_movstrdi (x, y, size,
1187 gen_rtx (CONST_INT, VOIDmode, align));
1188 if (insn)
1189 {
1190 emit_insn (insn);
1191 return;
1192 }
1193 }
1194#endif
1195
1196#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1197 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1198 VOIDmode, 3, XEXP (x, 0), Pmode,
1199 XEXP (y, 0), Pmode,
5a2724d7 1200 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052 1201#else
e87b4f3f 1202 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1203 VOIDmode, 3, XEXP (y, 0), Pmode,
1204 XEXP (x, 0), Pmode,
5a2724d7 1205 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052
RK
1206#endif
1207 }
1208}
1209\f
1210/* Copy all or part of a value X into registers starting at REGNO.
1211 The number of registers to be filled is NREGS. */
1212
1213void
1214move_block_to_reg (regno, x, nregs, mode)
1215 int regno;
1216 rtx x;
1217 int nregs;
1218 enum machine_mode mode;
1219{
1220 int i;
1221 rtx pat, last;
1222
1223 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1224 x = validize_mem (force_const_mem (mode, x));
1225
1226 /* See if the machine can do this with a load multiple insn. */
1227#ifdef HAVE_load_multiple
1228 last = get_last_insn ();
1229 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1230 gen_rtx (CONST_INT, VOIDmode, nregs));
1231 if (pat)
1232 {
1233 emit_insn (pat);
1234 return;
1235 }
1236 else
1237 delete_insns_since (last);
1238#endif
1239
1240 for (i = 0; i < nregs; i++)
1241 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1242 operand_subword_force (x, i, mode));
1243}
1244
1245/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1246 The number of registers to be filled is NREGS. */
1247
1248void
1249move_block_from_reg (regno, x, nregs)
1250 int regno;
1251 rtx x;
1252 int nregs;
1253{
1254 int i;
1255 rtx pat, last;
1256
1257 /* See if the machine can do this with a store multiple insn. */
1258#ifdef HAVE_store_multiple
1259 last = get_last_insn ();
1260 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1261 gen_rtx (CONST_INT, VOIDmode, nregs));
1262 if (pat)
1263 {
1264 emit_insn (pat);
1265 return;
1266 }
1267 else
1268 delete_insns_since (last);
1269#endif
1270
1271 for (i = 0; i < nregs; i++)
1272 {
1273 rtx tem = operand_subword (x, i, 1, BLKmode);
1274
1275 if (tem == 0)
1276 abort ();
1277
1278 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1279 }
1280}
1281
1282/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1283
1284void
1285use_regs (regno, nregs)
1286 int regno;
1287 int nregs;
1288{
1289 int i;
1290
1291 for (i = 0; i < nregs; i++)
1292 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1293}
1294\f
1295/* Write zeros through the storage of OBJECT.
1296 If OBJECT has BLKmode, SIZE is its length in bytes. */
1297
1298void
1299clear_storage (object, size)
1300 rtx object;
1301 int size;
1302{
1303 if (GET_MODE (object) == BLKmode)
1304 {
1305#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1306 emit_library_call (memset_libfunc, 1,
bbf6f052
RK
1307 VOIDmode, 3,
1308 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1309 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1310#else
e87b4f3f 1311 emit_library_call (bzero_libfunc, 1,
bbf6f052
RK
1312 VOIDmode, 2,
1313 XEXP (object, 0), Pmode,
1314 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1315#endif
1316 }
1317 else
1318 emit_move_insn (object, const0_rtx);
1319}
1320
1321/* Generate code to copy Y into X.
1322 Both Y and X must have the same mode, except that
1323 Y can be a constant with VOIDmode.
1324 This mode cannot be BLKmode; use emit_block_move for that.
1325
1326 Return the last instruction emitted. */
1327
1328rtx
1329emit_move_insn (x, y)
1330 rtx x, y;
1331{
1332 enum machine_mode mode = GET_MODE (x);
1333 int i;
1334
1335 x = protect_from_queue (x, 1);
1336 y = protect_from_queue (y, 0);
1337
1338 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1339 abort ();
1340
1341 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1342 y = force_const_mem (mode, y);
1343
1344 /* If X or Y are memory references, verify that their addresses are valid
1345 for the machine. */
1346 if (GET_CODE (x) == MEM
1347 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1348 && ! push_operand (x, GET_MODE (x)))
1349 || (flag_force_addr
1350 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1351 x = change_address (x, VOIDmode, XEXP (x, 0));
1352
1353 if (GET_CODE (y) == MEM
1354 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1355 || (flag_force_addr
1356 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1357 y = change_address (y, VOIDmode, XEXP (y, 0));
1358
1359 if (mode == BLKmode)
1360 abort ();
1361
1362 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1363 return
1364 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1365
1366 /* This will handle any multi-word mode that lacks a move_insn pattern.
1367 However, you will get better code if you define such patterns,
1368 even if they must turn into multiple assembler instructions. */
1369 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1370 {
1371 rtx last_insn = 0;
1372
1373 for (i = 0;
1374 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1375 i++)
1376 {
1377 rtx xpart = operand_subword (x, i, 1, mode);
1378 rtx ypart = operand_subword (y, i, 1, mode);
1379
1380 /* If we can't get a part of Y, put Y into memory if it is a
1381 constant. Otherwise, force it into a register. If we still
1382 can't get a part of Y, abort. */
1383 if (ypart == 0 && CONSTANT_P (y))
1384 {
1385 y = force_const_mem (mode, y);
1386 ypart = operand_subword (y, i, 1, mode);
1387 }
1388 else if (ypart == 0)
1389 ypart = operand_subword_force (y, i, mode);
1390
1391 if (xpart == 0 || ypart == 0)
1392 abort ();
1393
1394 last_insn = emit_move_insn (xpart, ypart);
1395 }
1396 return last_insn;
1397 }
1398 else
1399 abort ();
1400}
1401\f
1402/* Pushing data onto the stack. */
1403
1404/* Push a block of length SIZE (perhaps variable)
1405 and return an rtx to address the beginning of the block.
1406 Note that it is not possible for the value returned to be a QUEUED.
1407 The value may be virtual_outgoing_args_rtx.
1408
1409 EXTRA is the number of bytes of padding to push in addition to SIZE.
1410 BELOW nonzero means this padding comes at low addresses;
1411 otherwise, the padding comes at high addresses. */
1412
1413rtx
1414push_block (size, extra, below)
1415 rtx size;
1416 int extra, below;
1417{
1418 register rtx temp;
1419 if (CONSTANT_P (size))
1420 anti_adjust_stack (plus_constant (size, extra));
1421 else if (GET_CODE (size) == REG && extra == 0)
1422 anti_adjust_stack (size);
1423 else
1424 {
1425 rtx temp = copy_to_mode_reg (Pmode, size);
1426 if (extra != 0)
1427 temp = expand_binop (Pmode, add_optab,
1428 temp,
1429 gen_rtx (CONST_INT, VOIDmode, extra),
1430 temp, 0, OPTAB_LIB_WIDEN);
1431 anti_adjust_stack (temp);
1432 }
1433
1434#ifdef STACK_GROWS_DOWNWARD
1435 temp = virtual_outgoing_args_rtx;
1436 if (extra != 0 && below)
1437 temp = plus_constant (temp, extra);
1438#else
1439 if (GET_CODE (size) == CONST_INT)
1440 temp = plus_constant (virtual_outgoing_args_rtx,
1441 - INTVAL (size) - (below ? 0 : extra));
1442 else if (extra != 0 && !below)
1443 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1444 negate_rtx (Pmode, plus_constant (size, extra)));
1445 else
1446 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1447 negate_rtx (Pmode, size));
1448#endif
1449
1450 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1451}
1452
1453static rtx
1454gen_push_operand ()
1455{
1456 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1457}
1458
1459/* Generate code to push X onto the stack, assuming it has mode MODE and
1460 type TYPE.
1461 MODE is redundant except when X is a CONST_INT (since they don't
1462 carry mode info).
1463 SIZE is an rtx for the size of data to be copied (in bytes),
1464 needed only if X is BLKmode.
1465
1466 ALIGN (in bytes) is maximum alignment we can assume.
1467
1468 If PARTIAL is nonzero, then copy that many of the first words
1469 of X into registers starting with REG, and push the rest of X.
1470 The amount of space pushed is decreased by PARTIAL words,
1471 rounded *down* to a multiple of PARM_BOUNDARY.
1472 REG must be a hard register in this case.
1473
1474 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1475 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1476
1477 On a machine that lacks real push insns, ARGS_ADDR is the address of
1478 the bottom of the argument block for this call. We use indexing off there
1479 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1480 argument block has not been preallocated.
1481
1482 ARGS_SO_FAR is the size of args previously pushed for this call. */
1483
1484void
1485emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1486 args_addr, args_so_far)
1487 register rtx x;
1488 enum machine_mode mode;
1489 tree type;
1490 rtx size;
1491 int align;
1492 int partial;
1493 rtx reg;
1494 int extra;
1495 rtx args_addr;
1496 rtx args_so_far;
1497{
1498 rtx xinner;
1499 enum direction stack_direction
1500#ifdef STACK_GROWS_DOWNWARD
1501 = downward;
1502#else
1503 = upward;
1504#endif
1505
1506 /* Decide where to pad the argument: `downward' for below,
1507 `upward' for above, or `none' for don't pad it.
1508 Default is below for small data on big-endian machines; else above. */
1509 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1510
1511 /* Invert direction if stack is post-update. */
1512 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1513 if (where_pad != none)
1514 where_pad = (where_pad == downward ? upward : downward);
1515
1516 xinner = x = protect_from_queue (x, 0);
1517
1518 if (mode == BLKmode)
1519 {
1520 /* Copy a block into the stack, entirely or partially. */
1521
1522 register rtx temp;
1523 int used = partial * UNITS_PER_WORD;
1524 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1525 int skip;
1526
1527 if (size == 0)
1528 abort ();
1529
1530 used -= offset;
1531
1532 /* USED is now the # of bytes we need not copy to the stack
1533 because registers will take care of them. */
1534
1535 if (partial != 0)
1536 xinner = change_address (xinner, BLKmode,
1537 plus_constant (XEXP (xinner, 0), used));
1538
1539 /* If the partial register-part of the arg counts in its stack size,
1540 skip the part of stack space corresponding to the registers.
1541 Otherwise, start copying to the beginning of the stack space,
1542 by setting SKIP to 0. */
1543#ifndef REG_PARM_STACK_SPACE
1544 skip = 0;
1545#else
1546 skip = used;
1547#endif
1548
1549#ifdef PUSH_ROUNDING
1550 /* Do it with several push insns if that doesn't take lots of insns
1551 and if there is no difficulty with push insns that skip bytes
1552 on the stack for alignment purposes. */
1553 if (args_addr == 0
1554 && GET_CODE (size) == CONST_INT
1555 && skip == 0
1556 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1557 < MOVE_RATIO)
bbf6f052
RK
1558 /* Here we avoid the case of a structure whose weak alignment
1559 forces many pushes of a small amount of data,
1560 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1561 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1562 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1563 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1564 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1565 {
1566 /* Push padding now if padding above and stack grows down,
1567 or if padding below and stack grows up.
1568 But if space already allocated, this has already been done. */
1569 if (extra && args_addr == 0
1570 && where_pad != none && where_pad != stack_direction)
1571 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1572
1573 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1574 INTVAL (size) - used, align);
1575 }
1576 else
1577#endif /* PUSH_ROUNDING */
1578 {
1579 /* Otherwise make space on the stack and copy the data
1580 to the address of that space. */
1581
1582 /* Deduct words put into registers from the size we must copy. */
1583 if (partial != 0)
1584 {
1585 if (GET_CODE (size) == CONST_INT)
1586 size = gen_rtx (CONST_INT, VOIDmode, INTVAL (size) - used);
1587 else
1588 size = expand_binop (GET_MODE (size), sub_optab, size,
1589 gen_rtx (CONST_INT, VOIDmode, used),
1590 0, 0, OPTAB_LIB_WIDEN);
1591 }
1592
1593 /* Get the address of the stack space.
1594 In this case, we do not deal with EXTRA separately.
1595 A single stack adjust will do. */
1596 if (! args_addr)
1597 {
1598 temp = push_block (size, extra, where_pad == downward);
1599 extra = 0;
1600 }
1601 else if (GET_CODE (args_so_far) == CONST_INT)
1602 temp = memory_address (BLKmode,
1603 plus_constant (args_addr,
1604 skip + INTVAL (args_so_far)));
1605 else
1606 temp = memory_address (BLKmode,
1607 plus_constant (gen_rtx (PLUS, Pmode,
1608 args_addr, args_so_far),
1609 skip));
1610
1611 /* TEMP is the address of the block. Copy the data there. */
1612 if (GET_CODE (size) == CONST_INT
1613 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1614 < MOVE_RATIO))
1615 {
1616 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1617 INTVAL (size), align);
1618 goto ret;
1619 }
1620 /* Try the most limited insn first, because there's no point
1621 including more than one in the machine description unless
1622 the more limited one has some advantage. */
1623#ifdef HAVE_movstrqi
1624 if (HAVE_movstrqi
1625 && GET_CODE (size) == CONST_INT
1626 && ((unsigned) INTVAL (size)
1627 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1628 {
1629 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1630 xinner, size,
1631 gen_rtx (CONST_INT, VOIDmode, align)));
1632 goto ret;
1633 }
1634#endif
1635#ifdef HAVE_movstrhi
1636 if (HAVE_movstrhi
1637 && GET_CODE (size) == CONST_INT
1638 && ((unsigned) INTVAL (size)
1639 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1640 {
1641 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1642 xinner, size,
1643 gen_rtx (CONST_INT, VOIDmode, align)));
1644 goto ret;
1645 }
1646#endif
1647#ifdef HAVE_movstrsi
1648 if (HAVE_movstrsi)
1649 {
1650 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1651 xinner, size,
1652 gen_rtx (CONST_INT, VOIDmode, align)));
1653 goto ret;
1654 }
1655#endif
1656#ifdef HAVE_movstrdi
1657 if (HAVE_movstrdi)
1658 {
1659 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1660 xinner, size,
1661 gen_rtx (CONST_INT, VOIDmode, align)));
1662 goto ret;
1663 }
1664#endif
1665
1666#ifndef ACCUMULATE_OUTGOING_ARGS
1667 /* If the source is referenced relative to the stack pointer,
1668 copy it to another register to stabilize it. We do not need
1669 to do this if we know that we won't be changing sp. */
1670
1671 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1672 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1673 temp = copy_to_reg (temp);
1674#endif
1675
1676 /* Make inhibit_defer_pop nonzero around the library call
1677 to force it to pop the bcopy-arguments right away. */
1678 NO_DEFER_POP;
1679#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1680 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1681 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1682 size, Pmode);
1683#else
e87b4f3f 1684 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1685 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1686 size, Pmode);
1687#endif
1688 OK_DEFER_POP;
1689 }
1690 }
1691 else if (partial > 0)
1692 {
1693 /* Scalar partly in registers. */
1694
1695 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1696 int i;
1697 int not_stack;
1698 /* # words of start of argument
1699 that we must make space for but need not store. */
1700 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1701 int args_offset = INTVAL (args_so_far);
1702 int skip;
1703
1704 /* Push padding now if padding above and stack grows down,
1705 or if padding below and stack grows up.
1706 But if space already allocated, this has already been done. */
1707 if (extra && args_addr == 0
1708 && where_pad != none && where_pad != stack_direction)
1709 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1710
1711 /* If we make space by pushing it, we might as well push
1712 the real data. Otherwise, we can leave OFFSET nonzero
1713 and leave the space uninitialized. */
1714 if (args_addr == 0)
1715 offset = 0;
1716
1717 /* Now NOT_STACK gets the number of words that we don't need to
1718 allocate on the stack. */
1719 not_stack = partial - offset;
1720
1721 /* If the partial register-part of the arg counts in its stack size,
1722 skip the part of stack space corresponding to the registers.
1723 Otherwise, start copying to the beginning of the stack space,
1724 by setting SKIP to 0. */
1725#ifndef REG_PARM_STACK_SPACE
1726 skip = 0;
1727#else
1728 skip = not_stack;
1729#endif
1730
1731 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1732 x = validize_mem (force_const_mem (mode, x));
1733
1734 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1735 SUBREGs of such registers are not allowed. */
1736 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1737 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1738 x = copy_to_reg (x);
1739
1740 /* Loop over all the words allocated on the stack for this arg. */
1741 /* We can do it by words, because any scalar bigger than a word
1742 has a size a multiple of a word. */
1743#ifndef PUSH_ARGS_REVERSED
1744 for (i = not_stack; i < size; i++)
1745#else
1746 for (i = size - 1; i >= not_stack; i--)
1747#endif
1748 if (i >= not_stack + offset)
1749 emit_push_insn (operand_subword_force (x, i, mode),
1750 word_mode, 0, 0, align, 0, 0, 0, args_addr,
1751 gen_rtx (CONST_INT, VOIDmode,
1752 args_offset + ((i - not_stack + skip)
1753 * UNITS_PER_WORD)));
1754 }
1755 else
1756 {
1757 rtx addr;
1758
1759 /* Push padding now if padding above and stack grows down,
1760 or if padding below and stack grows up.
1761 But if space already allocated, this has already been done. */
1762 if (extra && args_addr == 0
1763 && where_pad != none && where_pad != stack_direction)
1764 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1765
1766#ifdef PUSH_ROUNDING
1767 if (args_addr == 0)
1768 addr = gen_push_operand ();
1769 else
1770#endif
1771 if (GET_CODE (args_so_far) == CONST_INT)
1772 addr
1773 = memory_address (mode,
1774 plus_constant (args_addr, INTVAL (args_so_far)));
1775 else
1776 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1777 args_so_far));
1778
1779 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1780 }
1781
1782 ret:
1783 /* If part should go in registers, copy that part
1784 into the appropriate registers. Do this now, at the end,
1785 since mem-to-mem copies above may do function calls. */
1786 if (partial > 0)
1787 move_block_to_reg (REGNO (reg), x, partial, mode);
1788
1789 if (extra && args_addr == 0 && where_pad == stack_direction)
1790 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1791}
1792\f
1793/* Output a library call to function FUN (a SYMBOL_REF rtx)
1794 (emitting the queue unless NO_QUEUE is nonzero),
1795 for a value of mode OUTMODE,
1796 with NARGS different arguments, passed as alternating rtx values
1797 and machine_modes to convert them to.
1798 The rtx values should have been passed through protect_from_queue already.
1799
1800 NO_QUEUE will be true if and only if the library call is a `const' call
1801 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1802 to the variable is_const in expand_call. */
1803
1804void
1805emit_library_call (va_alist)
1806 va_dcl
1807{
1808 va_list p;
1809 struct args_size args_size;
1810 register int argnum;
1811 enum machine_mode outmode;
1812 int nargs;
1813 rtx fun;
1814 rtx orgfun;
1815 int inc;
1816 int count;
1817 rtx argblock = 0;
1818 CUMULATIVE_ARGS args_so_far;
1819 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1820 struct args_size offset; struct args_size size; };
1821 struct arg *argvec;
1822 int old_inhibit_defer_pop = inhibit_defer_pop;
1823 int no_queue = 0;
1824 rtx use_insns;
1825
1826 va_start (p);
1827 orgfun = fun = va_arg (p, rtx);
1828 no_queue = va_arg (p, int);
1829 outmode = va_arg (p, enum machine_mode);
1830 nargs = va_arg (p, int);
1831
1832 /* Copy all the libcall-arguments out of the varargs data
1833 and into a vector ARGVEC.
1834
1835 Compute how to pass each argument. We only support a very small subset
1836 of the full argument passing conventions to limit complexity here since
1837 library functions shouldn't have many args. */
1838
1839 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1840
1841 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1842
1843 args_size.constant = 0;
1844 args_size.var = 0;
1845
1846 for (count = 0; count < nargs; count++)
1847 {
1848 rtx val = va_arg (p, rtx);
1849 enum machine_mode mode = va_arg (p, enum machine_mode);
1850
1851 /* We cannot convert the arg value to the mode the library wants here;
1852 must do it earlier where we know the signedness of the arg. */
1853 if (mode == BLKmode
1854 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1855 abort ();
1856
1857 /* On some machines, there's no way to pass a float to a library fcn.
1858 Pass it as a double instead. */
1859#ifdef LIBGCC_NEEDS_DOUBLE
1860 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1861 val = convert_to_mode (DFmode, val), mode = DFmode;
1862#endif
1863
1864 /* Make sure it is a reasonable operand for a move or push insn. */
1865 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1866 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1867 val = force_operand (val, 0);
1868
1869 argvec[count].value = val;
1870 argvec[count].mode = mode;
1871
1872#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1873 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, (tree)0, 1))
1874 abort ();
1875#endif
1876
1877 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, (tree)0, 1);
1878 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1879 abort ();
1880#ifdef FUNCTION_ARG_PARTIAL_NREGS
1881 argvec[count].partial
1882 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, (tree)0, 1);
1883#else
1884 argvec[count].partial = 0;
1885#endif
1886
1887 locate_and_pad_parm (mode, 0,
1888 argvec[count].reg && argvec[count].partial == 0,
1889 0, &args_size, &argvec[count].offset,
1890 &argvec[count].size);
1891
1892 if (argvec[count].size.var)
1893 abort ();
1894
1895#ifndef REG_PARM_STACK_SPACE
1896 if (argvec[count].partial)
1897 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1898#endif
1899
1900 if (argvec[count].reg == 0 || argvec[count].partial != 0
1901#ifdef REG_PARM_STACK_SPACE
1902 || 1
1903#endif
1904 )
1905 args_size.constant += argvec[count].size.constant;
1906
1907#ifdef ACCUMULATE_OUTGOING_ARGS
1908 /* If this arg is actually passed on the stack, it might be
1909 clobbering something we already put there (this library call might
1910 be inside the evaluation of an argument to a function whose call
1911 requires the stack). This will only occur when the library call
1912 has sufficient args to run out of argument registers. Abort in
1913 this case; if this ever occurs, code must be added to save and
1914 restore the arg slot. */
1915
1916 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1917 abort ();
1918#endif
1919
1920 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1921 }
1922 va_end (p);
1923
1924 /* If this machine requires an external definition for library
1925 functions, write one out. */
1926 assemble_external_libcall (fun);
1927
1928#ifdef STACK_BOUNDARY
1929 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1930 / STACK_BYTES) * STACK_BYTES);
1931#endif
1932
1933#ifdef REG_PARM_STACK_SPACE
1934 args_size.constant = MAX (args_size.constant,
1935 REG_PARM_STACK_SPACE ((tree) 0));
1936#endif
1937
1938#ifdef ACCUMULATE_OUTGOING_ARGS
1939 if (args_size.constant > current_function_outgoing_args_size)
1940 current_function_outgoing_args_size = args_size.constant;
1941 args_size.constant = 0;
1942#endif
1943
1944#ifndef PUSH_ROUNDING
1945 argblock = push_block (gen_rtx (CONST_INT, VOIDmode, args_size.constant),
1946 0, 0);
1947#endif
1948
1949#ifdef PUSH_ARGS_REVERSED
1950 inc = -1;
1951 argnum = nargs - 1;
1952#else
1953 inc = 1;
1954 argnum = 0;
1955#endif
1956
1957 /* Push the args that need to be pushed. */
1958
1959 for (count = 0; count < nargs; count++, argnum += inc)
1960 {
1961 register enum machine_mode mode = argvec[argnum].mode;
1962 register rtx val = argvec[argnum].value;
1963 rtx reg = argvec[argnum].reg;
1964 int partial = argvec[argnum].partial;
1965
1966 if (! (reg != 0 && partial == 0))
1967 emit_push_insn (val, mode, 0, 0, 0, partial, reg, 0, argblock,
1968 gen_rtx (CONST_INT, VOIDmode,
1969 argvec[count].offset.constant));
1970 NO_DEFER_POP;
1971 }
1972
1973#ifdef PUSH_ARGS_REVERSED
1974 argnum = nargs - 1;
1975#else
1976 argnum = 0;
1977#endif
1978
1979 /* Now load any reg parms into their regs. */
1980
1981 for (count = 0; count < nargs; count++, argnum += inc)
1982 {
1983 register enum machine_mode mode = argvec[argnum].mode;
1984 register rtx val = argvec[argnum].value;
1985 rtx reg = argvec[argnum].reg;
1986 int partial = argvec[argnum].partial;
1987
1988 if (reg != 0 && partial == 0)
1989 emit_move_insn (reg, val);
1990 NO_DEFER_POP;
1991 }
1992
1993 /* For version 1.37, try deleting this entirely. */
1994 if (! no_queue)
1995 emit_queue ();
1996
1997 /* Any regs containing parms remain in use through the call. */
1998 start_sequence ();
1999 for (count = 0; count < nargs; count++)
2000 if (argvec[count].reg != 0)
2001 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2002
2003 use_insns = get_insns ();
2004 end_sequence ();
2005
2006 fun = prepare_call_address (fun, 0, &use_insns);
2007
2008 /* Don't allow popping to be deferred, since then
2009 cse'ing of library calls could delete a call and leave the pop. */
2010 NO_DEFER_POP;
2011
2012 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2013 will set inhibit_defer_pop to that value. */
2014
2015 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2016 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2017 outmode != VOIDmode ? hard_libcall_value (outmode) : 0,
2018 old_inhibit_defer_pop + 1, use_insns, no_queue);
2019
2020 /* Now restore inhibit_defer_pop to its actual original value. */
2021 OK_DEFER_POP;
2022}
2023\f
2024/* Expand an assignment that stores the value of FROM into TO.
2025 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2026 (This may contain a QUEUED rtx.)
2027 Otherwise, the returned value is not meaningful.
2028
2029 SUGGEST_REG is no longer actually used.
2030 It used to mean, copy the value through a register
2031 and return that register, if that is possible.
2032 But now we do this if WANT_VALUE.
2033
2034 If the value stored is a constant, we return the constant. */
2035
2036rtx
2037expand_assignment (to, from, want_value, suggest_reg)
2038 tree to, from;
2039 int want_value;
2040 int suggest_reg;
2041{
2042 register rtx to_rtx = 0;
2043 rtx result;
2044
2045 /* Don't crash if the lhs of the assignment was erroneous. */
2046
2047 if (TREE_CODE (to) == ERROR_MARK)
2048 return expand_expr (from, 0, VOIDmode, 0);
2049
2050 /* Assignment of a structure component needs special treatment
2051 if the structure component's rtx is not simply a MEM.
2052 Assignment of an array element at a constant index
2053 has the same problem. */
2054
2055 if (TREE_CODE (to) == COMPONENT_REF
2056 || TREE_CODE (to) == BIT_FIELD_REF
2057 || (TREE_CODE (to) == ARRAY_REF
2058 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2059 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2060 {
2061 enum machine_mode mode1;
2062 int bitsize;
2063 int bitpos;
7bb0943f 2064 tree offset;
bbf6f052
RK
2065 int unsignedp;
2066 int volatilep = 0;
7bb0943f 2067 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2068 &mode1, &unsignedp, &volatilep);
2069
2070 /* If we are going to use store_bit_field and extract_bit_field,
2071 make sure to_rtx will be safe for multiple use. */
2072
2073 if (mode1 == VOIDmode && want_value)
2074 tem = stabilize_reference (tem);
2075
2076 to_rtx = expand_expr (tem, 0, VOIDmode, 0);
7bb0943f
RS
2077 if (offset != 0)
2078 {
2079 rtx offset_rtx = expand_expr (offset, 0, VOIDmode, 0);
2080
2081 if (GET_CODE (to_rtx) != MEM)
2082 abort ();
2083 to_rtx = change_address (to_rtx, VOIDmode,
2084 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2085 force_reg (Pmode, offset_rtx)));
2086 }
bbf6f052
RK
2087 if (volatilep)
2088 {
2089 if (GET_CODE (to_rtx) == MEM)
2090 MEM_VOLATILE_P (to_rtx) = 1;
2091#if 0 /* This was turned off because, when a field is volatile
2092 in an object which is not volatile, the object may be in a register,
2093 and then we would abort over here. */
2094 else
2095 abort ();
2096#endif
2097 }
2098
2099 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2100 (want_value
2101 /* Spurious cast makes HPUX compiler happy. */
2102 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2103 : VOIDmode),
2104 unsignedp,
2105 /* Required alignment of containing datum. */
2106 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2107 int_size_in_bytes (TREE_TYPE (tem)));
2108 preserve_temp_slots (result);
2109 free_temp_slots ();
2110
2111 return result;
2112 }
2113
2114 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2115 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2116
2117 if (to_rtx == 0)
2118 to_rtx = expand_expr (to, 0, VOIDmode, 0);
2119
2120 /* In case we are returning the contents of an object which overlaps
2121 the place the value is being stored, use a safe function when copying
2122 a value through a pointer into a structure value return block. */
2123 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2124 && current_function_returns_struct
2125 && !current_function_returns_pcc_struct)
2126 {
2127 rtx from_rtx = expand_expr (from, 0, VOIDmode, 0);
2128 rtx size = expr_size (from);
2129
2130#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 2131 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
2132 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2133 XEXP (from_rtx, 0), Pmode,
2134 size, Pmode);
2135#else
e87b4f3f 2136 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
2137 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2138 XEXP (to_rtx, 0), Pmode,
2139 size, Pmode);
2140#endif
2141
2142 preserve_temp_slots (to_rtx);
2143 free_temp_slots ();
2144 return to_rtx;
2145 }
2146
2147 /* Compute FROM and store the value in the rtx we got. */
2148
2149 result = store_expr (from, to_rtx, want_value);
2150 preserve_temp_slots (result);
2151 free_temp_slots ();
2152 return result;
2153}
2154
2155/* Generate code for computing expression EXP,
2156 and storing the value into TARGET.
2157 Returns TARGET or an equivalent value.
2158 TARGET may contain a QUEUED rtx.
2159
2160 If SUGGEST_REG is nonzero, copy the value through a register
2161 and return that register, if that is possible.
2162
2163 If the value stored is a constant, we return the constant. */
2164
2165rtx
2166store_expr (exp, target, suggest_reg)
2167 register tree exp;
2168 register rtx target;
2169 int suggest_reg;
2170{
2171 register rtx temp;
2172 int dont_return_target = 0;
2173
2174 if (TREE_CODE (exp) == COMPOUND_EXPR)
2175 {
2176 /* Perform first part of compound expression, then assign from second
2177 part. */
2178 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2179 emit_queue ();
2180 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2181 }
2182 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2183 {
2184 /* For conditional expression, get safe form of the target. Then
2185 test the condition, doing the appropriate assignment on either
2186 side. This avoids the creation of unnecessary temporaries.
2187 For non-BLKmode, it is more efficient not to do this. */
2188
2189 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2190
2191 emit_queue ();
2192 target = protect_from_queue (target, 1);
2193
2194 NO_DEFER_POP;
2195 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2196 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2197 emit_queue ();
2198 emit_jump_insn (gen_jump (lab2));
2199 emit_barrier ();
2200 emit_label (lab1);
2201 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2202 emit_queue ();
2203 emit_label (lab2);
2204 OK_DEFER_POP;
2205 return target;
2206 }
2207 else if (suggest_reg && GET_CODE (target) == MEM
2208 && GET_MODE (target) != BLKmode)
2209 /* If target is in memory and caller wants value in a register instead,
2210 arrange that. Pass TARGET as target for expand_expr so that,
2211 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2212 We know expand_expr will not use the target in that case. */
2213 {
2214 temp = expand_expr (exp, cse_not_expected ? 0 : target,
2215 GET_MODE (target), 0);
2216 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2217 temp = copy_to_reg (temp);
2218 dont_return_target = 1;
2219 }
2220 else if (queued_subexp_p (target))
2221 /* If target contains a postincrement, it is not safe
2222 to use as the returned value. It would access the wrong
2223 place by the time the queued increment gets output.
2224 So copy the value through a temporary and use that temp
2225 as the result. */
2226 {
2227 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2228 {
2229 /* Expand EXP into a new pseudo. */
2230 temp = gen_reg_rtx (GET_MODE (target));
2231 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2232 }
2233 else
2234 temp = expand_expr (exp, 0, GET_MODE (target), 0);
2235 dont_return_target = 1;
2236 }
2237 else
2238 {
2239 temp = expand_expr (exp, target, GET_MODE (target), 0);
2240 /* DO return TARGET if it's a specified hardware register.
2241 expand_return relies on this. */
2242 if (!(target && GET_CODE (target) == REG
2243 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2244 && CONSTANT_P (temp))
2245 dont_return_target = 1;
2246 }
2247
2248 /* If value was not generated in the target, store it there.
2249 Convert the value to TARGET's type first if nec. */
2250
2251 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2252 {
2253 target = protect_from_queue (target, 1);
2254 if (GET_MODE (temp) != GET_MODE (target)
2255 && GET_MODE (temp) != VOIDmode)
2256 {
2257 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2258 if (dont_return_target)
2259 {
2260 /* In this case, we will return TEMP,
2261 so make sure it has the proper mode.
2262 But don't forget to store the value into TARGET. */
2263 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2264 emit_move_insn (target, temp);
2265 }
2266 else
2267 convert_move (target, temp, unsignedp);
2268 }
2269
2270 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2271 {
2272 /* Handle copying a string constant into an array.
2273 The string constant may be shorter than the array.
2274 So copy just the string's actual length, and clear the rest. */
2275 rtx size;
2276
e87b4f3f
RS
2277 /* Get the size of the data type of the string,
2278 which is actually the size of the target. */
2279 size = expr_size (exp);
2280 if (GET_CODE (size) == CONST_INT
2281 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2282 emit_block_move (target, temp, size,
2283 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2284 else
bbf6f052 2285 {
e87b4f3f
RS
2286 /* Compute the size of the data to copy from the string. */
2287 tree copy_size
2288 = fold (build (MIN_EXPR, sizetype,
2289 size_binop (CEIL_DIV_EXPR,
2290 TYPE_SIZE (TREE_TYPE (exp)),
2291 size_int (BITS_PER_UNIT)),
2292 convert (sizetype,
2293 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2294 rtx copy_size_rtx = expand_expr (copy_size, 0, VOIDmode, 0);
2295 rtx label = 0;
2296
2297 /* Copy that much. */
2298 emit_block_move (target, temp, copy_size_rtx,
2299 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2300
2301 /* Figure out how much is left in TARGET
2302 that we have to clear. */
2303 if (GET_CODE (copy_size_rtx) == CONST_INT)
2304 {
2305 temp = plus_constant (XEXP (target, 0),
2306 TREE_STRING_LENGTH (exp));
2307 size = plus_constant (size,
2308 - TREE_STRING_LENGTH (exp));
2309 }
2310 else
2311 {
2312 enum machine_mode size_mode = Pmode;
2313
2314 temp = force_reg (Pmode, XEXP (target, 0));
2315 temp = expand_binop (size_mode, add_optab, temp,
2316 copy_size_rtx, 0, 0, OPTAB_LIB_WIDEN);
2317
2318 size = expand_binop (size_mode, sub_optab, size,
2319 copy_size_rtx, 0, 0, OPTAB_LIB_WIDEN);
2320
2321 emit_cmp_insn (size, const0_rtx, LT, 0,
2322 GET_MODE (size), 0, 0);
2323 label = gen_label_rtx ();
2324 emit_jump_insn (gen_blt (label));
2325 }
2326
2327 if (size != const0_rtx)
2328 {
bbf6f052 2329#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f
RS
2330 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2331 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2332#else
e87b4f3f
RS
2333 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2334 temp, Pmode, size, Pmode);
bbf6f052 2335#endif
e87b4f3f
RS
2336 }
2337 if (label)
2338 emit_label (label);
bbf6f052
RK
2339 }
2340 }
2341 else if (GET_MODE (temp) == BLKmode)
2342 emit_block_move (target, temp, expr_size (exp),
2343 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2344 else
2345 emit_move_insn (target, temp);
2346 }
2347 if (dont_return_target)
2348 return temp;
2349 return target;
2350}
2351\f
2352/* Store the value of constructor EXP into the rtx TARGET.
2353 TARGET is either a REG or a MEM. */
2354
2355static void
2356store_constructor (exp, target)
2357 tree exp;
2358 rtx target;
2359{
4af3895e
JVA
2360 tree type = TREE_TYPE (exp);
2361
bbf6f052
RK
2362 /* We know our target cannot conflict, since safe_from_p has been called. */
2363#if 0
2364 /* Don't try copying piece by piece into a hard register
2365 since that is vulnerable to being clobbered by EXP.
2366 Instead, construct in a pseudo register and then copy it all. */
2367 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2368 {
2369 rtx temp = gen_reg_rtx (GET_MODE (target));
2370 store_constructor (exp, temp);
2371 emit_move_insn (target, temp);
2372 return;
2373 }
2374#endif
2375
4af3895e 2376 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2377 {
2378 register tree elt;
2379
4af3895e
JVA
2380 /* Inform later passes that the whole union value is dead. */
2381 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2382 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2383
2384 /* If we are building a static constructor into a register,
2385 set the initial value as zero so we can fold the value into
2386 a constant. */
2387 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2388 emit_move_insn (target, const0_rtx);
2389
bbf6f052
RK
2390 /* If the constructor has fewer fields than the structure,
2391 clear the whole structure first. */
2392 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2393 != list_length (TYPE_FIELDS (type)))
2394 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2395 else
2396 /* Inform later passes that the old value is dead. */
2397 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2398
2399 /* Store each element of the constructor into
2400 the corresponding field of TARGET. */
2401
2402 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2403 {
2404 register tree field = TREE_PURPOSE (elt);
2405 register enum machine_mode mode;
2406 int bitsize;
2407 int bitpos;
2408 int unsignedp;
2409
f32fd778
RS
2410 /* Just ignore missing fields.
2411 We cleared the whole structure, above,
2412 if any fields are missing. */
2413 if (field == 0)
2414 continue;
2415
bbf6f052
RK
2416 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2417 unsignedp = TREE_UNSIGNED (field);
2418 mode = DECL_MODE (field);
2419 if (DECL_BIT_FIELD (field))
2420 mode = VOIDmode;
2421
2422 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2423 /* ??? This case remains to be written. */
2424 abort ();
2425
2426 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2427
2428 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2429 /* The alignment of TARGET is
2430 at least what its type requires. */
2431 VOIDmode, 0,
4af3895e
JVA
2432 TYPE_ALIGN (type) / BITS_PER_UNIT,
2433 int_size_in_bytes (type));
bbf6f052
RK
2434 }
2435 }
4af3895e 2436 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2437 {
2438 register tree elt;
2439 register int i;
4af3895e 2440 tree domain = TYPE_DOMAIN (type);
bbf6f052
RK
2441 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2442 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2443 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2444
2445 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2446 clear the whole structure first. Similarly if this this is
2447 static constructor of a non-BLKmode object. */
bbf6f052 2448
4af3895e
JVA
2449 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2450 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2451 clear_storage (target, maxelt - minelt + 1);
2452 else
2453 /* Inform later passes that the old value is dead. */
2454 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2455
2456 /* Store each element of the constructor into
2457 the corresponding element of TARGET, determined
2458 by counting the elements. */
2459 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2460 elt;
2461 elt = TREE_CHAIN (elt), i++)
2462 {
2463 register enum machine_mode mode;
2464 int bitsize;
2465 int bitpos;
2466 int unsignedp;
2467
2468 mode = TYPE_MODE (elttype);
2469 bitsize = GET_MODE_BITSIZE (mode);
2470 unsignedp = TREE_UNSIGNED (elttype);
2471
2472 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2473
2474 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2475 /* The alignment of TARGET is
2476 at least what its type requires. */
2477 VOIDmode, 0,
4af3895e
JVA
2478 TYPE_ALIGN (type) / BITS_PER_UNIT,
2479 int_size_in_bytes (type));
bbf6f052
RK
2480 }
2481 }
2482
2483 else
2484 abort ();
2485}
2486
2487/* Store the value of EXP (an expression tree)
2488 into a subfield of TARGET which has mode MODE and occupies
2489 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2490 If MODE is VOIDmode, it means that we are storing into a bit-field.
2491
2492 If VALUE_MODE is VOIDmode, return nothing in particular.
2493 UNSIGNEDP is not used in this case.
2494
2495 Otherwise, return an rtx for the value stored. This rtx
2496 has mode VALUE_MODE if that is convenient to do.
2497 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2498
2499 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2500 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2501
2502static rtx
2503store_field (target, bitsize, bitpos, mode, exp, value_mode,
2504 unsignedp, align, total_size)
2505 rtx target;
2506 int bitsize, bitpos;
2507 enum machine_mode mode;
2508 tree exp;
2509 enum machine_mode value_mode;
2510 int unsignedp;
2511 int align;
2512 int total_size;
2513{
2514 int width_mask = 0;
2515
2516 if (bitsize < HOST_BITS_PER_INT)
2517 width_mask = (1 << bitsize) - 1;
2518
2519 /* If we are storing into an unaligned field of an aligned union that is
2520 in a register, we may have the mode of TARGET being an integer mode but
2521 MODE == BLKmode. In that case, get an aligned object whose size and
2522 alignment are the same as TARGET and store TARGET into it (we can avoid
2523 the store if the field being stored is the entire width of TARGET). Then
2524 call ourselves recursively to store the field into a BLKmode version of
2525 that object. Finally, load from the object into TARGET. This is not
2526 very efficient in general, but should only be slightly more expensive
2527 than the otherwise-required unaligned accesses. Perhaps this can be
2528 cleaned up later. */
2529
2530 if (mode == BLKmode
2531 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2532 {
2533 rtx object = assign_stack_temp (GET_MODE (target),
2534 GET_MODE_SIZE (GET_MODE (target)), 0);
2535 rtx blk_object = copy_rtx (object);
2536
2537 PUT_MODE (blk_object, BLKmode);
2538
2539 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2540 emit_move_insn (object, target);
2541
2542 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2543 align, total_size);
2544
2545 emit_move_insn (target, object);
2546
2547 return target;
2548 }
2549
2550 /* If the structure is in a register or if the component
2551 is a bit field, we cannot use addressing to access it.
2552 Use bit-field techniques or SUBREG to store in it. */
2553
4fa52007
RK
2554 if (mode == VOIDmode
2555 || (mode != BLKmode && ! direct_store[(int) mode])
2556 || GET_CODE (target) == REG
bbf6f052
RK
2557 || GET_CODE (target) == SUBREG)
2558 {
2559 rtx temp = expand_expr (exp, 0, VOIDmode, 0);
2560 /* Store the value in the bitfield. */
2561 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2562 if (value_mode != VOIDmode)
2563 {
2564 /* The caller wants an rtx for the value. */
2565 /* If possible, avoid refetching from the bitfield itself. */
2566 if (width_mask != 0
2567 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2568 return expand_and (temp,
2569 gen_rtx (CONST_INT, VOIDmode, width_mask), 0);
2570 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2571 0, value_mode, 0, align, total_size);
2572 }
2573 return const0_rtx;
2574 }
2575 else
2576 {
2577 rtx addr = XEXP (target, 0);
2578 rtx to_rtx;
2579
2580 /* If a value is wanted, it must be the lhs;
2581 so make the address stable for multiple use. */
2582
2583 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2584 && ! CONSTANT_ADDRESS_P (addr)
2585 /* A frame-pointer reference is already stable. */
2586 && ! (GET_CODE (addr) == PLUS
2587 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2588 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2589 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2590 addr = copy_to_reg (addr);
2591
2592 /* Now build a reference to just the desired component. */
2593
2594 to_rtx = change_address (target, mode,
2595 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2596 MEM_IN_STRUCT_P (to_rtx) = 1;
2597
2598 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2599 }
2600}
2601\f
2602/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2603 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2604 ARRAY_REFs at constant positions and find the ultimate containing object,
2605 which we return.
2606
2607 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2608 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2609 If the position of the field is variable, we store a tree
2610 giving the variable offset (in units) in *POFFSET.
2611 This offset is in addition to the bit position.
2612 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2613
2614 If any of the extraction expressions is volatile,
2615 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2616
2617 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2618 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2619 is redundant.
2620
2621 If the field describes a variable-sized object, *PMODE is set to
2622 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2623 this case, but the address of the object can be found. */
bbf6f052
RK
2624
2625tree
7bb0943f 2626get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
bbf6f052
RK
2627 tree exp;
2628 int *pbitsize;
2629 int *pbitpos;
7bb0943f 2630 tree *poffset;
bbf6f052
RK
2631 enum machine_mode *pmode;
2632 int *punsignedp;
2633 int *pvolatilep;
2634{
2635 tree size_tree = 0;
2636 enum machine_mode mode = VOIDmode;
7bb0943f 2637 tree offset = 0;
bbf6f052
RK
2638
2639 if (TREE_CODE (exp) == COMPONENT_REF)
2640 {
2641 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2642 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2643 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2644 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2645 }
2646 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2647 {
2648 size_tree = TREE_OPERAND (exp, 1);
2649 *punsignedp = TREE_UNSIGNED (exp);
2650 }
2651 else
2652 {
2653 mode = TYPE_MODE (TREE_TYPE (exp));
2654 *pbitsize = GET_MODE_BITSIZE (mode);
2655 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2656 }
2657
2658 if (size_tree)
2659 {
2660 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2661 mode = BLKmode, *pbitsize = -1;
2662 else
2663 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2664 }
2665
2666 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2667 and find the ultimate containing object. */
2668
2669 *pbitpos = 0;
2670
2671 while (1)
2672 {
7bb0943f 2673 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2674 {
7bb0943f
RS
2675 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2676 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2677 : TREE_OPERAND (exp, 2));
bbf6f052 2678
7bb0943f
RS
2679 if (TREE_CODE (pos) == PLUS_EXPR)
2680 {
2681 tree constant, var;
2682 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2683 {
2684 constant = TREE_OPERAND (pos, 0);
2685 var = TREE_OPERAND (pos, 1);
2686 }
2687 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2688 {
2689 constant = TREE_OPERAND (pos, 1);
2690 var = TREE_OPERAND (pos, 0);
2691 }
2692 else
2693 abort ();
2694 *pbitpos += TREE_INT_CST_LOW (constant);
2695 if (offset)
2696 offset = size_binop (PLUS_EXPR, offset,
2697 size_binop (FLOOR_DIV_EXPR, var,
2698 size_int (BITS_PER_UNIT)));
2699 else
2700 offset = size_binop (FLOOR_DIV_EXPR, var,
2701 size_int (BITS_PER_UNIT));
2702 }
2703 else if (TREE_CODE (pos) == INTEGER_CST)
2704 *pbitpos += TREE_INT_CST_LOW (pos);
2705 else
2706 {
2707 /* Assume here that the offset is a multiple of a unit.
2708 If not, there should be an explicitly added constant. */
2709 if (offset)
2710 offset = size_binop (PLUS_EXPR, offset,
2711 size_binop (FLOOR_DIV_EXPR, pos,
2712 size_int (BITS_PER_UNIT)));
2713 else
2714 offset = size_binop (FLOOR_DIV_EXPR, pos,
2715 size_int (BITS_PER_UNIT));
2716 }
bbf6f052 2717 }
bbf6f052 2718
bbf6f052
RK
2719 else if (TREE_CODE (exp) == ARRAY_REF
2720 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2721 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2722 {
2723 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2724 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
2725 }
2726 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2727 && ! ((TREE_CODE (exp) == NOP_EXPR
2728 || TREE_CODE (exp) == CONVERT_EXPR)
2729 && (TYPE_MODE (TREE_TYPE (exp))
2730 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2731 break;
7bb0943f
RS
2732
2733 /* If any reference in the chain is volatile, the effect is volatile. */
2734 if (TREE_THIS_VOLATILE (exp))
2735 *pvolatilep = 1;
bbf6f052
RK
2736 exp = TREE_OPERAND (exp, 0);
2737 }
2738
2739 /* If this was a bit-field, see if there is a mode that allows direct
2740 access in case EXP is in memory. */
2741 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2742 {
2743 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2744 if (mode == BLKmode)
2745 mode = VOIDmode;
2746 }
2747
2748 *pmode = mode;
7bb0943f
RS
2749 *poffset = offset;
2750#if 0
2751 /* We aren't finished fixing the callers to really handle nonzero offset. */
2752 if (offset != 0)
2753 abort ();
2754#endif
bbf6f052
RK
2755
2756 return exp;
2757}
2758\f
2759/* Given an rtx VALUE that may contain additions and multiplications,
2760 return an equivalent value that just refers to a register or memory.
2761 This is done by generating instructions to perform the arithmetic
2762 and returning a pseudo-register containing the value. */
2763
2764rtx
2765force_operand (value, target)
2766 rtx value, target;
2767{
2768 register optab binoptab = 0;
2769 /* Use a temporary to force order of execution of calls to
2770 `force_operand'. */
2771 rtx tmp;
2772 register rtx op2;
2773 /* Use subtarget as the target for operand 0 of a binary operation. */
2774 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2775
2776 if (GET_CODE (value) == PLUS)
2777 binoptab = add_optab;
2778 else if (GET_CODE (value) == MINUS)
2779 binoptab = sub_optab;
2780 else if (GET_CODE (value) == MULT)
2781 {
2782 op2 = XEXP (value, 1);
2783 if (!CONSTANT_P (op2)
2784 && !(GET_CODE (op2) == REG && op2 != subtarget))
2785 subtarget = 0;
2786 tmp = force_operand (XEXP (value, 0), subtarget);
2787 return expand_mult (GET_MODE (value), tmp,
2788 force_operand (op2, 0),
2789 target, 0);
2790 }
2791
2792 if (binoptab)
2793 {
2794 op2 = XEXP (value, 1);
2795 if (!CONSTANT_P (op2)
2796 && !(GET_CODE (op2) == REG && op2 != subtarget))
2797 subtarget = 0;
2798 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2799 {
2800 binoptab = add_optab;
2801 op2 = negate_rtx (GET_MODE (value), op2);
2802 }
2803
2804 /* Check for an addition with OP2 a constant integer and our first
2805 operand a PLUS of a virtual register and something else. In that
2806 case, we want to emit the sum of the virtual register and the
2807 constant first and then add the other value. This allows virtual
2808 register instantiation to simply modify the constant rather than
2809 creating another one around this addition. */
2810 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2811 && GET_CODE (XEXP (value, 0)) == PLUS
2812 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2813 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2814 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2815 {
2816 rtx temp = expand_binop (GET_MODE (value), binoptab,
2817 XEXP (XEXP (value, 0), 0), op2,
2818 subtarget, 0, OPTAB_LIB_WIDEN);
2819 return expand_binop (GET_MODE (value), binoptab, temp,
2820 force_operand (XEXP (XEXP (value, 0), 1), 0),
2821 target, 0, OPTAB_LIB_WIDEN);
2822 }
2823
2824 tmp = force_operand (XEXP (value, 0), subtarget);
2825 return expand_binop (GET_MODE (value), binoptab, tmp,
2826 force_operand (op2, 0),
2827 target, 0, OPTAB_LIB_WIDEN);
2828 /* We give UNSIGNEP = 0 to expand_binop
2829 because the only operations we are expanding here are signed ones. */
2830 }
2831 return value;
2832}
2833\f
2834/* Subroutine of expand_expr:
2835 save the non-copied parts (LIST) of an expr (LHS), and return a list
2836 which can restore these values to their previous values,
2837 should something modify their storage. */
2838
2839static tree
2840save_noncopied_parts (lhs, list)
2841 tree lhs;
2842 tree list;
2843{
2844 tree tail;
2845 tree parts = 0;
2846
2847 for (tail = list; tail; tail = TREE_CHAIN (tail))
2848 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2849 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2850 else
2851 {
2852 tree part = TREE_VALUE (tail);
2853 tree part_type = TREE_TYPE (part);
2854 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part, 0);
2855 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2856 int_size_in_bytes (part_type), 0);
2857 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2858 target = change_address (target, TYPE_MODE (part_type), 0);
2859 parts = tree_cons (to_be_saved,
2860 build (RTL_EXPR, part_type, 0, (tree) target),
2861 parts);
2862 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2863 }
2864 return parts;
2865}
2866
2867/* Subroutine of expand_expr:
2868 record the non-copied parts (LIST) of an expr (LHS), and return a list
2869 which specifies the initial values of these parts. */
2870
2871static tree
2872init_noncopied_parts (lhs, list)
2873 tree lhs;
2874 tree list;
2875{
2876 tree tail;
2877 tree parts = 0;
2878
2879 for (tail = list; tail; tail = TREE_CHAIN (tail))
2880 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2881 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2882 else
2883 {
2884 tree part = TREE_VALUE (tail);
2885 tree part_type = TREE_TYPE (part);
2886 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part, 0);
2887 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2888 }
2889 return parts;
2890}
2891
2892/* Subroutine of expand_expr: return nonzero iff there is no way that
2893 EXP can reference X, which is being modified. */
2894
2895static int
2896safe_from_p (x, exp)
2897 rtx x;
2898 tree exp;
2899{
2900 rtx exp_rtl = 0;
2901 int i, nops;
2902
2903 if (x == 0)
2904 return 1;
2905
2906 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2907 find the underlying pseudo. */
2908 if (GET_CODE (x) == SUBREG)
2909 {
2910 x = SUBREG_REG (x);
2911 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2912 return 0;
2913 }
2914
2915 /* If X is a location in the outgoing argument area, it is always safe. */
2916 if (GET_CODE (x) == MEM
2917 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2918 || (GET_CODE (XEXP (x, 0)) == PLUS
2919 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2920 return 1;
2921
2922 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2923 {
2924 case 'd':
2925 exp_rtl = DECL_RTL (exp);
2926 break;
2927
2928 case 'c':
2929 return 1;
2930
2931 case 'x':
2932 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
2933 return ((TREE_VALUE (exp) == 0
2934 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
2935 && (TREE_CHAIN (exp) == 0
2936 || safe_from_p (x, TREE_CHAIN (exp))));
2937 else
2938 return 0;
2939
2940 case '1':
2941 return safe_from_p (x, TREE_OPERAND (exp, 0));
2942
2943 case '2':
2944 case '<':
2945 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2946 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2947
2948 case 'e':
2949 case 'r':
2950 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2951 the expression. If it is set, we conflict iff we are that rtx or
2952 both are in memory. Otherwise, we check all operands of the
2953 expression recursively. */
2954
2955 switch (TREE_CODE (exp))
2956 {
2957 case ADDR_EXPR:
2958 return staticp (TREE_OPERAND (exp, 0));
2959
2960 case INDIRECT_REF:
2961 if (GET_CODE (x) == MEM)
2962 return 0;
2963 break;
2964
2965 case CALL_EXPR:
2966 exp_rtl = CALL_EXPR_RTL (exp);
2967 if (exp_rtl == 0)
2968 {
2969 /* Assume that the call will clobber all hard registers and
2970 all of memory. */
2971 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2972 || GET_CODE (x) == MEM)
2973 return 0;
2974 }
2975
2976 break;
2977
2978 case RTL_EXPR:
2979 exp_rtl = RTL_EXPR_RTL (exp);
2980 if (exp_rtl == 0)
2981 /* We don't know what this can modify. */
2982 return 0;
2983
2984 break;
2985
2986 case WITH_CLEANUP_EXPR:
2987 exp_rtl = RTL_EXPR_RTL (exp);
2988 break;
2989
2990 case SAVE_EXPR:
2991 exp_rtl = SAVE_EXPR_RTL (exp);
2992 break;
2993
8129842c
RS
2994 case BIND_EXPR:
2995 /* The only operand we look at is operand 1. The rest aren't
2996 part of the expression. */
2997 return safe_from_p (x, TREE_OPERAND (exp, 1));
2998
bbf6f052
RK
2999 case METHOD_CALL_EXPR:
3000 /* This takes a rtx argument, but shouldn't appear here. */
3001 abort ();
3002 }
3003
3004 /* If we have an rtx, we do not need to scan our operands. */
3005 if (exp_rtl)
3006 break;
3007
3008 nops = tree_code_length[(int) TREE_CODE (exp)];
3009 for (i = 0; i < nops; i++)
3010 if (TREE_OPERAND (exp, i) != 0
3011 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3012 return 0;
3013 }
3014
3015 /* If we have an rtl, find any enclosed object. Then see if we conflict
3016 with it. */
3017 if (exp_rtl)
3018 {
3019 if (GET_CODE (exp_rtl) == SUBREG)
3020 {
3021 exp_rtl = SUBREG_REG (exp_rtl);
3022 if (GET_CODE (exp_rtl) == REG
3023 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3024 return 0;
3025 }
3026
3027 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3028 are memory and EXP is not readonly. */
3029 return ! (rtx_equal_p (x, exp_rtl)
3030 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3031 && ! TREE_READONLY (exp)));
3032 }
3033
3034 /* If we reach here, it is safe. */
3035 return 1;
3036}
3037
3038/* Subroutine of expand_expr: return nonzero iff EXP is an
3039 expression whose type is statically determinable. */
3040
3041static int
3042fixed_type_p (exp)
3043 tree exp;
3044{
3045 if (TREE_CODE (exp) == PARM_DECL
3046 || TREE_CODE (exp) == VAR_DECL
3047 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3048 || TREE_CODE (exp) == COMPONENT_REF
3049 || TREE_CODE (exp) == ARRAY_REF)
3050 return 1;
3051 return 0;
3052}
3053\f
3054/* expand_expr: generate code for computing expression EXP.
3055 An rtx for the computed value is returned. The value is never null.
3056 In the case of a void EXP, const0_rtx is returned.
3057
3058 The value may be stored in TARGET if TARGET is nonzero.
3059 TARGET is just a suggestion; callers must assume that
3060 the rtx returned may not be the same as TARGET.
3061
3062 If TARGET is CONST0_RTX, it means that the value will be ignored.
3063
3064 If TMODE is not VOIDmode, it suggests generating the
3065 result in mode TMODE. But this is done only when convenient.
3066 Otherwise, TMODE is ignored and the value generated in its natural mode.
3067 TMODE is just a suggestion; callers must assume that
3068 the rtx returned may not have mode TMODE.
3069
3070 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3071 with a constant address even if that address is not normally legitimate.
3072 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3073
3074 If MODIFIER is EXPAND_SUM then when EXP is an addition
3075 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3076 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3077 products as above, or REG or MEM, or constant.
3078 Ordinarily in such cases we would output mul or add instructions
3079 and then return a pseudo reg containing the sum.
3080
3081 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3082 it also marks a label as absolutely required (it can't be dead).
6dc42e49 3083 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3084
3085rtx
3086expand_expr (exp, target, tmode, modifier)
3087 register tree exp;
3088 rtx target;
3089 enum machine_mode tmode;
3090 enum expand_modifier modifier;
3091{
3092 register rtx op0, op1, temp;
3093 tree type = TREE_TYPE (exp);
3094 int unsignedp = TREE_UNSIGNED (type);
3095 register enum machine_mode mode = TYPE_MODE (type);
3096 register enum tree_code code = TREE_CODE (exp);
3097 optab this_optab;
3098 /* Use subtarget as the target for operand 0 of a binary operation. */
3099 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3100 rtx original_target = target;
3101 int ignore = target == const0_rtx;
3102 tree context;
3103
3104 /* Don't use hard regs as subtargets, because the combiner
3105 can only handle pseudo regs. */
3106 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3107 subtarget = 0;
3108 /* Avoid subtargets inside loops,
3109 since they hide some invariant expressions. */
3110 if (preserve_subexpressions_p ())
3111 subtarget = 0;
3112
3113 if (ignore) target = 0, original_target = 0;
3114
3115 /* If will do cse, generate all results into pseudo registers
3116 since 1) that allows cse to find more things
3117 and 2) otherwise cse could produce an insn the machine
3118 cannot support. */
3119
3120 if (! cse_not_expected && mode != BLKmode && target
3121 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3122 target = subtarget;
3123
3124 /* Ensure we reference a volatile object even if value is ignored. */
3125 if (ignore && TREE_THIS_VOLATILE (exp)
3126 && mode != VOIDmode && mode != BLKmode)
3127 {
3128 target = gen_reg_rtx (mode);
3129 temp = expand_expr (exp, target, VOIDmode, modifier);
3130 if (temp != target)
3131 emit_move_insn (target, temp);
3132 return target;
3133 }
3134
3135 switch (code)
3136 {
3137 case LABEL_DECL:
b552441b
RS
3138 {
3139 tree function = decl_function_context (exp);
3140 /* Handle using a label in a containing function. */
3141 if (function != current_function_decl && function != 0)
3142 {
3143 struct function *p = find_function_data (function);
3144 /* Allocate in the memory associated with the function
3145 that the label is in. */
3146 push_obstacks (p->function_obstack,
3147 p->function_maybepermanent_obstack);
3148
3149 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3150 label_rtx (exp), p->forced_labels);
3151 pop_obstacks ();
3152 }
3153 else if (modifier == EXPAND_INITIALIZER)
3154 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3155 label_rtx (exp), forced_labels);
3156 return gen_rtx (MEM, FUNCTION_MODE,
3157 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3158 }
bbf6f052
RK
3159
3160 case PARM_DECL:
3161 if (DECL_RTL (exp) == 0)
3162 {
3163 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3164 return CONST0_RTX (mode);
bbf6f052
RK
3165 }
3166
3167 case FUNCTION_DECL:
3168 case VAR_DECL:
3169 case RESULT_DECL:
3170 if (DECL_RTL (exp) == 0)
3171 abort ();
3172 /* Ensure variable marked as used
3173 even if it doesn't go through a parser. */
3174 TREE_USED (exp) = 1;
3175 /* Handle variables inherited from containing functions. */
3176 context = decl_function_context (exp);
3177
3178 /* We treat inline_function_decl as an alias for the current function
3179 because that is the inline function whose vars, types, etc.
3180 are being merged into the current function.
3181 See expand_inline_function. */
3182 if (context != 0 && context != current_function_decl
3183 && context != inline_function_decl
3184 /* If var is static, we don't need a static chain to access it. */
3185 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3186 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3187 {
3188 rtx addr;
3189
3190 /* Mark as non-local and addressable. */
3191 TREE_NONLOCAL (exp) = 1;
3192 mark_addressable (exp);
3193 if (GET_CODE (DECL_RTL (exp)) != MEM)
3194 abort ();
3195 addr = XEXP (DECL_RTL (exp), 0);
3196 if (GET_CODE (addr) == MEM)
3197 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3198 else
3199 addr = fix_lexical_addr (addr, exp);
3200 return change_address (DECL_RTL (exp), mode, addr);
3201 }
4af3895e 3202
bbf6f052
RK
3203 /* This is the case of an array whose size is to be determined
3204 from its initializer, while the initializer is still being parsed.
3205 See expand_decl. */
3206 if (GET_CODE (DECL_RTL (exp)) == MEM
3207 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3208 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3209 XEXP (DECL_RTL (exp), 0));
3210 if (GET_CODE (DECL_RTL (exp)) == MEM
3211 && modifier != EXPAND_CONST_ADDRESS
3212 && modifier != EXPAND_SUM
3213 && modifier != EXPAND_INITIALIZER)
3214 {
3215 /* DECL_RTL probably contains a constant address.
3216 On RISC machines where a constant address isn't valid,
3217 make some insns to get that address into a register. */
3218 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3219 || (flag_force_addr
3220 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3221 return change_address (DECL_RTL (exp), VOIDmode,
3222 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3223 }
3224 return DECL_RTL (exp);
3225
3226 case INTEGER_CST:
3227 return immed_double_const (TREE_INT_CST_LOW (exp),
3228 TREE_INT_CST_HIGH (exp),
3229 mode);
3230
3231 case CONST_DECL:
3232 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3233
3234 case REAL_CST:
3235 /* If optimized, generate immediate CONST_DOUBLE
3236 which will be turned into memory by reload if necessary.
3237
3238 We used to force a register so that loop.c could see it. But
3239 this does not allow gen_* patterns to perform optimizations with
3240 the constants. It also produces two insns in cases like "x = 1.0;".
3241 On most machines, floating-point constants are not permitted in
3242 many insns, so we'd end up copying it to a register in any case.
3243
3244 Now, we do the copying in expand_binop, if appropriate. */
3245 return immed_real_const (exp);
3246
3247 case COMPLEX_CST:
3248 case STRING_CST:
3249 if (! TREE_CST_RTL (exp))
3250 output_constant_def (exp);
3251
3252 /* TREE_CST_RTL probably contains a constant address.
3253 On RISC machines where a constant address isn't valid,
3254 make some insns to get that address into a register. */
3255 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3256 && modifier != EXPAND_CONST_ADDRESS
3257 && modifier != EXPAND_INITIALIZER
3258 && modifier != EXPAND_SUM
3259 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3260 return change_address (TREE_CST_RTL (exp), VOIDmode,
3261 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3262 return TREE_CST_RTL (exp);
3263
3264 case SAVE_EXPR:
3265 context = decl_function_context (exp);
3266 /* We treat inline_function_decl as an alias for the current function
3267 because that is the inline function whose vars, types, etc.
3268 are being merged into the current function.
3269 See expand_inline_function. */
3270 if (context == current_function_decl || context == inline_function_decl)
3271 context = 0;
3272
3273 /* If this is non-local, handle it. */
3274 if (context)
3275 {
3276 temp = SAVE_EXPR_RTL (exp);
3277 if (temp && GET_CODE (temp) == REG)
3278 {
3279 put_var_into_stack (exp);
3280 temp = SAVE_EXPR_RTL (exp);
3281 }
3282 if (temp == 0 || GET_CODE (temp) != MEM)
3283 abort ();
3284 return change_address (temp, mode,
3285 fix_lexical_addr (XEXP (temp, 0), exp));
3286 }
3287 if (SAVE_EXPR_RTL (exp) == 0)
3288 {
3289 if (mode == BLKmode)
3290 temp
3291 = assign_stack_temp (mode,
3292 int_size_in_bytes (TREE_TYPE (exp)), 0);
3293 else
3294 temp = gen_reg_rtx (mode);
3295 SAVE_EXPR_RTL (exp) = temp;
3296 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3297 if (!optimize && GET_CODE (temp) == REG)
3298 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3299 save_expr_regs);
3300 }
3301 return SAVE_EXPR_RTL (exp);
3302
3303 case EXIT_EXPR:
3304 /* Exit the current loop if the body-expression is true. */
3305 {
3306 rtx label = gen_label_rtx ();
3307 do_jump (TREE_OPERAND (exp, 0), label, 0);
3308 expand_exit_loop (0);
3309 emit_label (label);
3310 }
3311 return const0_rtx;
3312
3313 case LOOP_EXPR:
3314 expand_start_loop (1);
3315 expand_expr_stmt (TREE_OPERAND (exp, 0));
3316 expand_end_loop ();
3317
3318 return const0_rtx;
3319
3320 case BIND_EXPR:
3321 {
3322 tree vars = TREE_OPERAND (exp, 0);
3323 int vars_need_expansion = 0;
3324
3325 /* Need to open a binding contour here because
3326 if there are any cleanups they most be contained here. */
3327 expand_start_bindings (0);
3328
3329 /* Mark the corresponding BLOCK for output. */
3330 if (TREE_OPERAND (exp, 2) != 0)
3331 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3332
3333 /* If VARS have not yet been expanded, expand them now. */
3334 while (vars)
3335 {
3336 if (DECL_RTL (vars) == 0)
3337 {
3338 vars_need_expansion = 1;
3339 expand_decl (vars);
3340 }
3341 expand_decl_init (vars);
3342 vars = TREE_CHAIN (vars);
3343 }
3344
3345 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3346
3347 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3348
3349 return temp;
3350 }
3351
3352 case RTL_EXPR:
3353 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3354 abort ();
3355 emit_insns (RTL_EXPR_SEQUENCE (exp));
3356 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3357 return RTL_EXPR_RTL (exp);
3358
3359 case CONSTRUCTOR:
4af3895e
JVA
3360 /* All elts simple constants => refer to a constant in memory. But
3361 if this is a non-BLKmode mode, let it store a field at a time
3362 since that should make a CONST_INT or CONST_DOUBLE when we
3363 fold. */
3364 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3365 {
3366 rtx constructor = output_constant_def (exp);
b552441b
RS
3367 if (modifier != EXPAND_CONST_ADDRESS
3368 && modifier != EXPAND_INITIALIZER
3369 && modifier != EXPAND_SUM
3370 && !memory_address_p (GET_MODE (constructor),
3371 XEXP (constructor, 0)))
bbf6f052
RK
3372 constructor = change_address (constructor, VOIDmode,
3373 XEXP (constructor, 0));
3374 return constructor;
3375 }
3376
3377 if (ignore)
3378 {
3379 tree elt;
3380 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3381 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3382 return const0_rtx;
3383 }
3384 else
3385 {
3386 if (target == 0 || ! safe_from_p (target, exp))
3387 {
3388 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3389 target = gen_reg_rtx (mode);
3390 else
3391 {
3392 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3393 if (target)
3394 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3395 target = safe_target;
3396 }
3397 }
3398 store_constructor (exp, target);
3399 return target;
3400 }
3401
3402 case INDIRECT_REF:
3403 {
3404 tree exp1 = TREE_OPERAND (exp, 0);
3405 tree exp2;
3406
3407 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3408 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3409 This code has the same general effect as simply doing
3410 expand_expr on the save expr, except that the expression PTR
3411 is computed for use as a memory address. This means different
3412 code, suitable for indexing, may be generated. */
3413 if (TREE_CODE (exp1) == SAVE_EXPR
3414 && SAVE_EXPR_RTL (exp1) == 0
3415 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3416 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3417 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3418 {
3419 temp = expand_expr (TREE_OPERAND (exp1, 0), 0, VOIDmode, EXPAND_SUM);
3420 op0 = memory_address (mode, temp);
3421 op0 = copy_all_regs (op0);
3422 SAVE_EXPR_RTL (exp1) = op0;
3423 }
3424 else
3425 {
3426 op0 = expand_expr (exp1, 0, VOIDmode, EXPAND_SUM);
3427 op0 = memory_address (mode, op0);
3428 }
8c8a8e34
JW
3429
3430 temp = gen_rtx (MEM, mode, op0);
3431 /* If address was computed by addition,
3432 mark this as an element of an aggregate. */
3433 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3434 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3435 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3436 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3437 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3438 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3439 || (TREE_CODE (exp1) == ADDR_EXPR
3440 && (exp2 = TREE_OPERAND (exp1, 0))
3441 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3442 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3443 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3444 MEM_IN_STRUCT_P (temp) = 1;
3445 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
bbf6f052
RK
3446#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3447 a location is accessed through a pointer to const does not mean
3448 that the value there can never change. */
8c8a8e34 3449 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3450#endif
8c8a8e34
JW
3451 return temp;
3452 }
bbf6f052
RK
3453
3454 case ARRAY_REF:
3455 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3456 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3457 {
3458 /* Nonconstant array index or nonconstant element size.
3459 Generate the tree for *(&array+index) and expand that,
3460 except do it in a language-independent way
3461 and don't complain about non-lvalue arrays.
3462 `mark_addressable' should already have been called
3463 for any array for which this case will be reached. */
3464
3465 /* Don't forget the const or volatile flag from the array element. */
3466 tree variant_type = build_type_variant (type,
3467 TREE_READONLY (exp),
3468 TREE_THIS_VOLATILE (exp));
3469 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3470 TREE_OPERAND (exp, 0));
3471 tree index = TREE_OPERAND (exp, 1);
3472 tree elt;
3473
3474 /* Convert the integer argument to a type the same size as a pointer
3475 so the multiply won't overflow spuriously. */
3476 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3477 index = convert (type_for_size (POINTER_SIZE, 0), index);
3478
3479 /* Don't think the address has side effects
3480 just because the array does.
3481 (In some cases the address might have side effects,
3482 and we fail to record that fact here. However, it should not
3483 matter, since expand_expr should not care.) */
3484 TREE_SIDE_EFFECTS (array_adr) = 0;
3485
3486 elt = build1 (INDIRECT_REF, type,
3487 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3488 array_adr,
3489 fold (build (MULT_EXPR,
3490 TYPE_POINTER_TO (variant_type),
3491 index, size_in_bytes (type))))));
3492
3493 /* Volatility, etc., of new expression is same as old expression. */
3494 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3495 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3496 TREE_READONLY (elt) = TREE_READONLY (exp);
3497
3498 return expand_expr (elt, target, tmode, modifier);
3499 }
3500
3501 /* Fold an expression like: "foo"[2].
3502 This is not done in fold so it won't happen inside &. */
3503 {
3504 int i;
3505 tree arg0 = TREE_OPERAND (exp, 0);
3506 tree arg1 = TREE_OPERAND (exp, 1);
3507
3508 if (TREE_CODE (arg0) == STRING_CST
3509 && TREE_CODE (arg1) == INTEGER_CST
3510 && !TREE_INT_CST_HIGH (arg1)
3511 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3512 {
3513 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3514 {
3515 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3516 TREE_TYPE (exp) = integer_type_node;
3517 return expand_expr (exp, target, tmode, modifier);
3518 }
3519 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3520 {
3521 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3522 TREE_TYPE (exp) = integer_type_node;
3523 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3524 }
3525 }
3526 }
3527
3528 /* If this is a constant index into a constant array,
4af3895e
JVA
3529 just get the value from the array. Handle both the cases when
3530 we have an explicit constructor and when our operand is a variable
3531 that was declared const. */
3532
3533 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3534 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3535 {
3536 tree index = fold (TREE_OPERAND (exp, 1));
3537 if (TREE_CODE (index) == INTEGER_CST
3538 && TREE_INT_CST_HIGH (index) == 0)
3539 {
3540 int i = TREE_INT_CST_LOW (index);
3541 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3542
3543 while (elem && i--)
3544 elem = TREE_CHAIN (elem);
3545 if (elem)
3546 return expand_expr (fold (TREE_VALUE (elem)), target,
3547 tmode, modifier);
3548 }
3549 }
3550
3551 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3552 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3553 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3554 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3555 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3556 && optimize >= 1
3557 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3558 != ERROR_MARK))
bbf6f052
RK
3559 {
3560 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3561 if (TREE_CODE (index) == INTEGER_CST
3562 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3563 {
3564 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3565 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3566
8c8a8e34
JW
3567 if (TREE_CODE (init) == CONSTRUCTOR)
3568 {
3569 tree elem = CONSTRUCTOR_ELTS (init);
3570
3571 while (elem && i--)
3572 elem = TREE_CHAIN (elem);
3573 if (elem)
3574 return expand_expr (fold (TREE_VALUE (elem)), target,
3575 tmode, modifier);
3576 }
3577 else if (TREE_CODE (init) == STRING_CST
3578 && i < TREE_STRING_LENGTH (init))
3579 {
3580 temp = gen_rtx (CONST_INT, VOIDmode,
3581 TREE_STRING_POINTER (init)[i]);
3582 return convert_to_mode (mode, temp, 0);
3583 }
bbf6f052
RK
3584 }
3585 }
3586 /* Treat array-ref with constant index as a component-ref. */
3587
3588 case COMPONENT_REF:
3589 case BIT_FIELD_REF:
4af3895e
JVA
3590 /* If the operand is a CONSTRUCTOR, we can just extract the
3591 appropriate field if it is present. */
3592 if (code != ARRAY_REF
3593 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3594 {
3595 tree elt;
3596
3597 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3598 elt = TREE_CHAIN (elt))
3599 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3600 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3601 }
3602
bbf6f052
RK
3603 {
3604 enum machine_mode mode1;
3605 int bitsize;
3606 int bitpos;
7bb0943f 3607 tree offset;
bbf6f052 3608 int volatilep = 0;
7bb0943f 3609 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3610 &mode1, &unsignedp, &volatilep);
3611
3612 /* In some cases, we will be offsetting OP0's address by a constant.
3613 So get it as a sum, if possible. If we will be using it
3614 directly in an insn, we validate it. */
3615 op0 = expand_expr (tem, 0, VOIDmode, EXPAND_SUM);
3616
8c8a8e34
JW
3617 /* If this is a constant, put it into a register if it is a
3618 legimate constant and memory if it isn't. */
3619 if (CONSTANT_P (op0))
3620 {
3621 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3622 if (LEGITIMATE_CONSTANT_P (op0))
3623 op0 = force_reg (mode, op0);
3624 else
3625 op0 = validize_mem (force_const_mem (mode, op0));
3626 }
3627
7bb0943f
RS
3628 if (offset != 0)
3629 {
3630 rtx offset_rtx = expand_expr (offset, 0, VOIDmode, 0);
3631
3632 if (GET_CODE (op0) != MEM)
3633 abort ();
3634 op0 = change_address (op0, VOIDmode,
3635 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3636 force_reg (Pmode, offset_rtx)));
3637 }
3638
bbf6f052
RK
3639 /* Don't forget about volatility even if this is a bitfield. */
3640 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3641 {
3642 op0 = copy_rtx (op0);
3643 MEM_VOLATILE_P (op0) = 1;
3644 }
3645
3646 if (mode1 == VOIDmode
4fa52007 3647 || (mode1 != BLKmode && ! direct_load[(int) mode1])
bbf6f052
RK
3648 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3649 {
3650 /* In cases where an aligned union has an unaligned object
3651 as a field, we might be extracting a BLKmode value from
3652 an integer-mode (e.g., SImode) object. Handle this case
3653 by doing the extract into an object as wide as the field
3654 (which we know to be the width of a basic mode), then
3655 storing into memory, and changing the mode to BLKmode. */
3656 enum machine_mode ext_mode = mode;
3657
3658 if (ext_mode == BLKmode)
3659 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3660
3661 if (ext_mode == BLKmode)
3662 abort ();
3663
3664 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3665 unsignedp, target, ext_mode, ext_mode,
3666 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3667 int_size_in_bytes (TREE_TYPE (tem)));
3668 if (mode == BLKmode)
3669 {
3670 rtx new = assign_stack_temp (ext_mode,
3671 bitsize / BITS_PER_UNIT, 0);
3672
3673 emit_move_insn (new, op0);
3674 op0 = copy_rtx (new);
3675 PUT_MODE (op0, BLKmode);
3676 }
3677
3678 return op0;
3679 }
3680
3681 /* Get a reference to just this component. */
3682 if (modifier == EXPAND_CONST_ADDRESS
3683 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3684 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3685 (bitpos / BITS_PER_UNIT)));
3686 else
3687 op0 = change_address (op0, mode1,
3688 plus_constant (XEXP (op0, 0),
3689 (bitpos / BITS_PER_UNIT)));
3690 MEM_IN_STRUCT_P (op0) = 1;
3691 MEM_VOLATILE_P (op0) |= volatilep;
3692 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3693 return op0;
3694 if (target == 0)
3695 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3696 convert_move (target, op0, unsignedp);
3697 return target;
3698 }
3699
3700 case OFFSET_REF:
3701 {
3702 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3703 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3704 op0 = expand_expr (addr, 0, VOIDmode, EXPAND_SUM);
3705 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3706 MEM_IN_STRUCT_P (temp) = 1;
3707 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3708#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3709 a location is accessed through a pointer to const does not mean
3710 that the value there can never change. */
3711 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3712#endif
3713 return temp;
3714 }
3715
3716 /* Intended for a reference to a buffer of a file-object in Pascal.
3717 But it's not certain that a special tree code will really be
3718 necessary for these. INDIRECT_REF might work for them. */
3719 case BUFFER_REF:
3720 abort ();
3721
3722 case WITH_CLEANUP_EXPR:
3723 if (RTL_EXPR_RTL (exp) == 0)
3724 {
3725 RTL_EXPR_RTL (exp)
3726 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3727 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2), cleanups_this_call);
3728 /* That's it for this cleanup. */
3729 TREE_OPERAND (exp, 2) = 0;
3730 }
3731 return RTL_EXPR_RTL (exp);
3732
3733 case CALL_EXPR:
3734 /* Check for a built-in function. */
3735 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3736 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3737 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3738 return expand_builtin (exp, target, subtarget, tmode, ignore);
3739 /* If this call was expanded already by preexpand_calls,
3740 just return the result we got. */
3741 if (CALL_EXPR_RTL (exp) != 0)
3742 return CALL_EXPR_RTL (exp);
8129842c 3743 return expand_call (exp, target, ignore);
bbf6f052
RK
3744
3745 case NON_LVALUE_EXPR:
3746 case NOP_EXPR:
3747 case CONVERT_EXPR:
3748 case REFERENCE_EXPR:
3749 if (TREE_CODE (type) == VOID_TYPE || ignore)
3750 {
3751 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3752 return const0_rtx;
3753 }
3754 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3755 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3756 if (TREE_CODE (type) == UNION_TYPE)
3757 {
3758 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3759 if (target == 0)
3760 {
3761 if (mode == BLKmode)
3762 {
3763 if (TYPE_SIZE (type) == 0
3764 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3765 abort ();
3766 target = assign_stack_temp (BLKmode,
3767 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3768 + BITS_PER_UNIT - 1)
3769 / BITS_PER_UNIT, 0);
3770 }
3771 else
3772 target = gen_reg_rtx (mode);
3773 }
3774 if (GET_CODE (target) == MEM)
3775 /* Store data into beginning of memory target. */
3776 store_expr (TREE_OPERAND (exp, 0),
3777 change_address (target, TYPE_MODE (valtype), 0), 0);
3778 else if (GET_CODE (target) == REG)
3779 /* Store this field into a union of the proper type. */
3780 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3781 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3782 VOIDmode, 0, 1,
3783 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3784 else
3785 abort ();
3786
3787 /* Return the entire union. */
3788 return target;
3789 }
3790 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, 0);
3791 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3792 return op0;
3793 if (flag_force_mem && GET_CODE (op0) == MEM)
3794 op0 = copy_to_reg (op0);
3795
3796 if (target == 0)
3797 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3798 else
3799 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3800 return target;
3801
3802 case PLUS_EXPR:
3803 /* We come here from MINUS_EXPR when the second operand is a constant. */
3804 plus_expr:
3805 this_optab = add_optab;
3806
3807 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3808 something else, make sure we add the register to the constant and
3809 then to the other thing. This case can occur during strength
3810 reduction and doing it this way will produce better code if the
3811 frame pointer or argument pointer is eliminated.
3812
3813 fold-const.c will ensure that the constant is always in the inner
3814 PLUS_EXPR, so the only case we need to do anything about is if
3815 sp, ap, or fp is our second argument, in which case we must swap
3816 the innermost first argument and our second argument. */
3817
3818 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3819 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3820 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3821 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3822 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3823 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3824 {
3825 tree t = TREE_OPERAND (exp, 1);
3826
3827 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3828 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3829 }
3830
3831 /* If the result is to be Pmode and we are adding an integer to
3832 something, we might be forming a constant. So try to use
3833 plus_constant. If it produces a sum and we can't accept it,
3834 use force_operand. This allows P = &ARR[const] to generate
3835 efficient code on machines where a SYMBOL_REF is not a valid
3836 address.
3837
3838 If this is an EXPAND_SUM call, always return the sum. */
3839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3840 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3841 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3842 || mode == Pmode))
3843 {
3844 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3845 EXPAND_SUM);
3846 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3847 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3848 op1 = force_operand (op1, target);
3849 return op1;
3850 }
3851
3852 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3853 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3854 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3855 || mode == Pmode))
3856 {
3857 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3858 EXPAND_SUM);
3859 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3860 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3861 op0 = force_operand (op0, target);
3862 return op0;
3863 }
3864
3865 /* No sense saving up arithmetic to be done
3866 if it's all in the wrong mode to form part of an address.
3867 And force_operand won't know whether to sign-extend or
3868 zero-extend. */
3869 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3870 || mode != Pmode) goto binop;
3871
3872 preexpand_calls (exp);
3873 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3874 subtarget = 0;
3875
3876 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3877 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3878
3879 /* Make sure any term that's a sum with a constant comes last. */
3880 if (GET_CODE (op0) == PLUS
3881 && CONSTANT_P (XEXP (op0, 1)))
3882 {
3883 temp = op0;
3884 op0 = op1;
3885 op1 = temp;
3886 }
3887 /* If adding to a sum including a constant,
3888 associate it to put the constant outside. */
3889 if (GET_CODE (op1) == PLUS
3890 && CONSTANT_P (XEXP (op1, 1)))
3891 {
3892 rtx constant_term = const0_rtx;
3893
3894 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3895 if (temp != 0)
3896 op0 = temp;
6f90e075
JW
3897 /* Ensure that MULT comes first if there is one. */
3898 else if (GET_CODE (op0) == MULT)
3899 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
3900 else
3901 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3902
3903 /* Let's also eliminate constants from op0 if possible. */
3904 op0 = eliminate_constant_term (op0, &constant_term);
3905
3906 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3907 their sum should be a constant. Form it into OP1, since the
3908 result we want will then be OP0 + OP1. */
3909
3910 temp = simplify_binary_operation (PLUS, mode, constant_term,
3911 XEXP (op1, 1));
3912 if (temp != 0)
3913 op1 = temp;
3914 else
3915 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3916 }
3917
3918 /* Put a constant term last and put a multiplication first. */
3919 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3920 temp = op1, op1 = op0, op0 = temp;
3921
3922 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3923 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3924
3925 case MINUS_EXPR:
3926 /* Handle difference of two symbolic constants,
3927 for the sake of an initializer. */
3928 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3929 && really_constant_p (TREE_OPERAND (exp, 0))
3930 && really_constant_p (TREE_OPERAND (exp, 1)))
3931 {
3932 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, modifier);
3933 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3934 return gen_rtx (MINUS, mode, op0, op1);
3935 }
3936 /* Convert A - const to A + (-const). */
3937 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3938 {
3939 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3940 fold (build1 (NEGATE_EXPR, type,
3941 TREE_OPERAND (exp, 1))));
3942 goto plus_expr;
3943 }
3944 this_optab = sub_optab;
3945 goto binop;
3946
3947 case MULT_EXPR:
3948 preexpand_calls (exp);
3949 /* If first operand is constant, swap them.
3950 Thus the following special case checks need only
3951 check the second operand. */
3952 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3953 {
3954 register tree t1 = TREE_OPERAND (exp, 0);
3955 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3956 TREE_OPERAND (exp, 1) = t1;
3957 }
3958
3959 /* Attempt to return something suitable for generating an
3960 indexed address, for machines that support that. */
3961
3962 if (modifier == EXPAND_SUM && mode == Pmode
3963 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3964 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT)
3965 {
3966 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3967
3968 /* Apply distributive law if OP0 is x+c. */
3969 if (GET_CODE (op0) == PLUS
3970 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3971 return gen_rtx (PLUS, mode,
3972 gen_rtx (MULT, mode, XEXP (op0, 0),
3973 gen_rtx (CONST_INT, VOIDmode,
3974 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3975 gen_rtx (CONST_INT, VOIDmode,
3976 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3977 * INTVAL (XEXP (op0, 1)))));
3978
3979 if (GET_CODE (op0) != REG)
3980 op0 = force_operand (op0, 0);
3981 if (GET_CODE (op0) != REG)
3982 op0 = copy_to_mode_reg (mode, op0);
3983
3984 return gen_rtx (MULT, mode, op0,
3985 gen_rtx (CONST_INT, VOIDmode,
3986 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3987 }
3988
3989 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3990 subtarget = 0;
3991
3992 /* Check for multiplying things that have been extended
3993 from a narrower type. If this machine supports multiplying
3994 in that narrower type with a result in the desired type,
3995 do it that way, and avoid the explicit type-conversion. */
3996 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
3997 && TREE_CODE (type) == INTEGER_TYPE
3998 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3999 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4000 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4001 && int_fits_type_p (TREE_OPERAND (exp, 1),
4002 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4003 /* Don't use a widening multiply if a shift will do. */
4004 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4005 > HOST_BITS_PER_INT)
4006 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4007 ||
4008 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4009 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4010 ==
4011 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4012 /* If both operands are extended, they must either both
4013 be zero-extended or both be sign-extended. */
4014 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4015 ==
4016 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4017 {
4018 enum machine_mode innermode
4019 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4020 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4021 ? umul_widen_optab : smul_widen_optab);
4022 if (mode == GET_MODE_WIDER_MODE (innermode)
4023 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4024 {
4025 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4026 0, VOIDmode, 0);
4027 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4028 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4029 else
4030 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4031 0, VOIDmode, 0);
4032 goto binop2;
4033 }
4034 }
4035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4036 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4037 return expand_mult (mode, op0, op1, target, unsignedp);
4038
4039 case TRUNC_DIV_EXPR:
4040 case FLOOR_DIV_EXPR:
4041 case CEIL_DIV_EXPR:
4042 case ROUND_DIV_EXPR:
4043 case EXACT_DIV_EXPR:
4044 preexpand_calls (exp);
4045 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4046 subtarget = 0;
4047 /* Possible optimization: compute the dividend with EXPAND_SUM
4048 then if the divisor is constant can optimize the case
4049 where some terms of the dividend have coeffs divisible by it. */
4050 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4051 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4052 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4053
4054 case RDIV_EXPR:
4055 this_optab = flodiv_optab;
4056 goto binop;
4057
4058 case TRUNC_MOD_EXPR:
4059 case FLOOR_MOD_EXPR:
4060 case CEIL_MOD_EXPR:
4061 case ROUND_MOD_EXPR:
4062 preexpand_calls (exp);
4063 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4064 subtarget = 0;
4065 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4066 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4067 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4068
4069 case FIX_ROUND_EXPR:
4070 case FIX_FLOOR_EXPR:
4071 case FIX_CEIL_EXPR:
4072 abort (); /* Not used for C. */
4073
4074 case FIX_TRUNC_EXPR:
4075 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
4076 if (target == 0)
4077 target = gen_reg_rtx (mode);
4078 expand_fix (target, op0, unsignedp);
4079 return target;
4080
4081 case FLOAT_EXPR:
4082 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
4083 if (target == 0)
4084 target = gen_reg_rtx (mode);
4085 /* expand_float can't figure out what to do if FROM has VOIDmode.
4086 So give it the correct mode. With -O, cse will optimize this. */
4087 if (GET_MODE (op0) == VOIDmode)
4088 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4089 op0);
4090 expand_float (target, op0,
4091 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4092 return target;
4093
4094 case NEGATE_EXPR:
4095 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4096 temp = expand_unop (mode, neg_optab, op0, target, 0);
4097 if (temp == 0)
4098 abort ();
4099 return temp;
4100
4101 case ABS_EXPR:
4102 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4103
4104 /* Unsigned abs is simply the operand. Testing here means we don't
4105 risk generating incorrect code below. */
4106 if (TREE_UNSIGNED (type))
4107 return op0;
4108
4109 /* First try to do it with a special abs instruction. */
4110 temp = expand_unop (mode, abs_optab, op0, target, 0);
4111 if (temp != 0)
4112 return temp;
4113
4114 /* If this machine has expensive jumps, we can do integer absolute
4115 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4116 where W is the width of MODE. */
4117
4118 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4119 {
4120 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4121 size_int (GET_MODE_BITSIZE (mode) - 1),
4122 0, 0);
4123
4124 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4125 OPTAB_LIB_WIDEN);
4126 if (temp != 0)
4127 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4128 OPTAB_LIB_WIDEN);
4129
4130 if (temp != 0)
4131 return temp;
4132 }
4133
4134 /* If that does not win, use conditional jump and negate. */
4135 target = original_target;
4136 temp = gen_label_rtx ();
4137 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4138 || (GET_CODE (target) == REG
4139 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4140 target = gen_reg_rtx (mode);
4141 emit_move_insn (target, op0);
4142 emit_cmp_insn (target,
4143 expand_expr (convert (type, integer_zero_node),
4144 0, VOIDmode, 0),
4145 GE, 0, mode, 0, 0);
4146 NO_DEFER_POP;
4147 emit_jump_insn (gen_bge (temp));
4148 op0 = expand_unop (mode, neg_optab, target, target, 0);
4149 if (op0 != target)
4150 emit_move_insn (target, op0);
4151 emit_label (temp);
4152 OK_DEFER_POP;
4153 return target;
4154
4155 case MAX_EXPR:
4156 case MIN_EXPR:
4157 target = original_target;
4158 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4159 || (GET_CODE (target) == REG
4160 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4161 target = gen_reg_rtx (mode);
4162 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4163 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4164
4165 /* First try to do it with a special MIN or MAX instruction.
4166 If that does not win, use a conditional jump to select the proper
4167 value. */
4168 this_optab = (TREE_UNSIGNED (type)
4169 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4170 : (code == MIN_EXPR ? smin_optab : smax_optab));
4171
4172 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4173 OPTAB_WIDEN);
4174 if (temp != 0)
4175 return temp;
4176
4177 if (target != op0)
4178 emit_move_insn (target, op0);
4179 op0 = gen_label_rtx ();
4180 if (code == MAX_EXPR)
4181 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4182 ? compare_from_rtx (target, op1, GEU, 1, mode, 0, 0)
4183 : compare_from_rtx (target, op1, GE, 0, mode, 0, 0));
4184 else
4185 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4186 ? compare_from_rtx (target, op1, LEU, 1, mode, 0, 0)
4187 : compare_from_rtx (target, op1, LE, 0, mode, 0, 0));
4188 if (temp == const0_rtx)
4189 emit_move_insn (target, op1);
4190 else if (temp != const_true_rtx)
4191 {
4192 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4193 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4194 else
4195 abort ();
4196 emit_move_insn (target, op1);
4197 }
4198 emit_label (op0);
4199 return target;
4200
4201/* ??? Can optimize when the operand of this is a bitwise operation,
4202 by using a different bitwise operation. */
4203 case BIT_NOT_EXPR:
4204 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4205 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4206 if (temp == 0)
4207 abort ();
4208 return temp;
4209
4210 case FFS_EXPR:
4211 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4212 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4213 if (temp == 0)
4214 abort ();
4215 return temp;
4216
4217/* ??? Can optimize bitwise operations with one arg constant.
4218 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4219 and (a bitwise1 b) bitwise2 b (etc)
4220 but that is probably not worth while. */
4221
4222/* BIT_AND_EXPR is for bitwise anding.
4223 TRUTH_AND_EXPR is for anding two boolean values
4224 when we want in all cases to compute both of them.
4225 In general it is fastest to do TRUTH_AND_EXPR by
4226 computing both operands as actual zero-or-1 values
4227 and then bitwise anding. In cases where there cannot
4228 be any side effects, better code would be made by
4229 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4230 but the question is how to recognize those cases. */
4231
4232 case TRUTH_AND_EXPR:
4233 case BIT_AND_EXPR:
4234 this_optab = and_optab;
4235 goto binop;
4236
4237/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4238 case TRUTH_OR_EXPR:
4239 case BIT_IOR_EXPR:
4240 this_optab = ior_optab;
4241 goto binop;
4242
4243 case BIT_XOR_EXPR:
4244 this_optab = xor_optab;
4245 goto binop;
4246
4247 case LSHIFT_EXPR:
4248 case RSHIFT_EXPR:
4249 case LROTATE_EXPR:
4250 case RROTATE_EXPR:
4251 preexpand_calls (exp);
4252 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4253 subtarget = 0;
4254 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4255 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4256 unsignedp);
4257
4258/* Could determine the answer when only additive constants differ.
4259 Also, the addition of one can be handled by changing the condition. */
4260 case LT_EXPR:
4261 case LE_EXPR:
4262 case GT_EXPR:
4263 case GE_EXPR:
4264 case EQ_EXPR:
4265 case NE_EXPR:
4266 preexpand_calls (exp);
4267 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4268 if (temp != 0)
4269 return temp;
4270 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4271 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4272 && original_target
4273 && GET_CODE (original_target) == REG
4274 && (GET_MODE (original_target)
4275 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4276 {
4277 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4278 if (temp != original_target)
4279 temp = copy_to_reg (temp);
4280 op1 = gen_label_rtx ();
4281 emit_cmp_insn (temp, const0_rtx, EQ, 0,
4282 GET_MODE (temp), unsignedp, 0);
4283 emit_jump_insn (gen_beq (op1));
4284 emit_move_insn (temp, const1_rtx);
4285 emit_label (op1);
4286 return temp;
4287 }
4288 /* If no set-flag instruction, must generate a conditional
4289 store into a temporary variable. Drop through
4290 and handle this like && and ||. */
4291
4292 case TRUTH_ANDIF_EXPR:
4293 case TRUTH_ORIF_EXPR:
4294 if (target == 0 || ! safe_from_p (target, exp)
4295 /* Make sure we don't have a hard reg (such as function's return
4296 value) live across basic blocks, if not optimizing. */
4297 || (!optimize && GET_CODE (target) == REG
4298 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4299 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4300 emit_clr_insn (target);
4301 op1 = gen_label_rtx ();
4302 jumpifnot (exp, op1);
4303 emit_0_to_1_insn (target);
4304 emit_label (op1);
4305 return target;
4306
4307 case TRUTH_NOT_EXPR:
4308 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4309 /* The parser is careful to generate TRUTH_NOT_EXPR
4310 only with operands that are always zero or one. */
4311 temp = expand_binop (mode, xor_optab, op0,
4312 gen_rtx (CONST_INT, mode, 1),
4313 target, 1, OPTAB_LIB_WIDEN);
4314 if (temp == 0)
4315 abort ();
4316 return temp;
4317
4318 case COMPOUND_EXPR:
4319 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4320 emit_queue ();
4321 return expand_expr (TREE_OPERAND (exp, 1),
4322 (ignore ? const0_rtx : target),
4323 VOIDmode, 0);
4324
4325 case COND_EXPR:
4326 {
4327 /* Note that COND_EXPRs whose type is a structure or union
4328 are required to be constructed to contain assignments of
4329 a temporary variable, so that we can evaluate them here
4330 for side effect only. If type is void, we must do likewise. */
4331
4332 /* If an arm of the branch requires a cleanup,
4333 only that cleanup is performed. */
4334
4335 tree singleton = 0;
4336 tree binary_op = 0, unary_op = 0;
4337 tree old_cleanups = cleanups_this_call;
4338 cleanups_this_call = 0;
4339
4340 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4341 convert it to our mode, if necessary. */
4342 if (integer_onep (TREE_OPERAND (exp, 1))
4343 && integer_zerop (TREE_OPERAND (exp, 2))
4344 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4345 {
4346 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4347 if (GET_MODE (op0) == mode)
4348 return op0;
4349 if (target == 0)
4350 target = gen_reg_rtx (mode);
4351 convert_move (target, op0, unsignedp);
4352 return target;
4353 }
4354
4355 /* If we are not to produce a result, we have no target. Otherwise,
4356 if a target was specified use it; it will not be used as an
4357 intermediate target unless it is safe. If no target, use a
4358 temporary. */
4359
4360 if (mode == VOIDmode || ignore)
4361 temp = 0;
4362 else if (original_target
4363 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4364 temp = original_target;
4365 else if (mode == BLKmode)
4366 {
4367 if (TYPE_SIZE (type) == 0
4368 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4369 abort ();
4370 temp = assign_stack_temp (BLKmode,
4371 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4372 + BITS_PER_UNIT - 1)
4373 / BITS_PER_UNIT, 0);
4374 }
4375 else
4376 temp = gen_reg_rtx (mode);
4377
4378 /* Check for X ? A + B : A. If we have this, we can copy
4379 A to the output and conditionally add B. Similarly for unary
4380 operations. Don't do this if X has side-effects because
4381 those side effects might affect A or B and the "?" operation is
4382 a sequence point in ANSI. (We test for side effects later.) */
4383
4384 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4385 && operand_equal_p (TREE_OPERAND (exp, 2),
4386 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4387 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4388 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4389 && operand_equal_p (TREE_OPERAND (exp, 1),
4390 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4391 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4392 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4393 && operand_equal_p (TREE_OPERAND (exp, 2),
4394 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4395 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4396 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4397 && operand_equal_p (TREE_OPERAND (exp, 1),
4398 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4399 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4400
4401 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4402 operation, do this as A + (X != 0). Similarly for other simple
4403 binary operators. */
4404 if (singleton && binary_op
4405 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4406 && (TREE_CODE (binary_op) == PLUS_EXPR
4407 || TREE_CODE (binary_op) == MINUS_EXPR
4408 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4409 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4410 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4411 && integer_onep (TREE_OPERAND (binary_op, 1))
4412 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4413 {
4414 rtx result;
4415 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4416 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4417 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4418 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4419 : and_optab);
4420
4421 /* If we had X ? A : A + 1, do this as A + (X == 0).
4422
4423 We have to invert the truth value here and then put it
4424 back later if do_store_flag fails. We cannot simply copy
4425 TREE_OPERAND (exp, 0) to another variable and modify that
4426 because invert_truthvalue can modify the tree pointed to
4427 by its argument. */
4428 if (singleton == TREE_OPERAND (exp, 1))
4429 TREE_OPERAND (exp, 0)
4430 = invert_truthvalue (TREE_OPERAND (exp, 0));
4431
4432 result = do_store_flag (TREE_OPERAND (exp, 0),
4433 safe_from_p (temp, singleton) ? temp : 0,
4434 mode, BRANCH_COST <= 1);
4435
4436 if (result)
4437 {
4438 op1 = expand_expr (singleton, 0, VOIDmode, 0);
4439 return expand_binop (mode, boptab, op1, result, temp,
4440 unsignedp, OPTAB_LIB_WIDEN);
4441 }
4442 else if (singleton == TREE_OPERAND (exp, 1))
4443 TREE_OPERAND (exp, 0)
4444 = invert_truthvalue (TREE_OPERAND (exp, 0));
4445 }
4446
4447 NO_DEFER_POP;
4448 op0 = gen_label_rtx ();
4449
4450 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4451 {
4452 if (temp != 0)
4453 {
4454 /* If the target conflicts with the other operand of the
4455 binary op, we can't use it. Also, we can't use the target
4456 if it is a hard register, because evaluating the condition
4457 might clobber it. */
4458 if ((binary_op
4459 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4460 || (GET_CODE (temp) == REG
4461 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4462 temp = gen_reg_rtx (mode);
4463 store_expr (singleton, temp, 0);
4464 }
4465 else
4466 expand_expr (singleton, ignore ? const1_rtx : 0, VOIDmode, 0);
4467 if (cleanups_this_call)
4468 {
4469 sorry ("aggregate value in COND_EXPR");
4470 cleanups_this_call = 0;
4471 }
4472 if (singleton == TREE_OPERAND (exp, 1))
4473 jumpif (TREE_OPERAND (exp, 0), op0);
4474 else
4475 jumpifnot (TREE_OPERAND (exp, 0), op0);
4476
4477 if (binary_op && temp == 0)
4478 /* Just touch the other operand. */
4479 expand_expr (TREE_OPERAND (binary_op, 1),
4480 ignore ? const0_rtx : 0, VOIDmode, 0);
4481 else if (binary_op)
4482 store_expr (build (TREE_CODE (binary_op), type,
4483 make_tree (type, temp),
4484 TREE_OPERAND (binary_op, 1)),
4485 temp, 0);
4486 else
4487 store_expr (build1 (TREE_CODE (unary_op), type,
4488 make_tree (type, temp)),
4489 temp, 0);
4490 op1 = op0;
4491 }
4492#if 0
4493 /* This is now done in jump.c and is better done there because it
4494 produces shorter register lifetimes. */
4495
4496 /* Check for both possibilities either constants or variables
4497 in registers (but not the same as the target!). If so, can
4498 save branches by assigning one, branching, and assigning the
4499 other. */
4500 else if (temp && GET_MODE (temp) != BLKmode
4501 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4502 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4503 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4504 && DECL_RTL (TREE_OPERAND (exp, 1))
4505 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4506 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4507 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4508 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4509 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4510 && DECL_RTL (TREE_OPERAND (exp, 2))
4511 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4512 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4513 {
4514 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4515 temp = gen_reg_rtx (mode);
4516 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4517 jumpifnot (TREE_OPERAND (exp, 0), op0);
4518 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4519 op1 = op0;
4520 }
4521#endif
4522 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4523 comparison operator. If we have one of these cases, set the
4524 output to A, branch on A (cse will merge these two references),
4525 then set the output to FOO. */
4526 else if (temp
4527 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4528 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4529 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4530 TREE_OPERAND (exp, 1), 0)
4531 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4532 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4533 {
4534 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4535 temp = gen_reg_rtx (mode);
4536 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4537 jumpif (TREE_OPERAND (exp, 0), op0);
4538 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4539 op1 = op0;
4540 }
4541 else if (temp
4542 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4543 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4544 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4545 TREE_OPERAND (exp, 2), 0)
4546 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4547 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4548 {
4549 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4550 temp = gen_reg_rtx (mode);
4551 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4552 jumpifnot (TREE_OPERAND (exp, 0), op0);
4553 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4554 op1 = op0;
4555 }
4556 else
4557 {
4558 op1 = gen_label_rtx ();
4559 jumpifnot (TREE_OPERAND (exp, 0), op0);
4560 if (temp != 0)
4561 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4562 else
4563 expand_expr (TREE_OPERAND (exp, 1), ignore ? const0_rtx : 0,
4564 VOIDmode, 0);
4565 if (cleanups_this_call)
4566 {
4567 sorry ("aggregate value in COND_EXPR");
4568 cleanups_this_call = 0;
4569 }
4570
4571 emit_queue ();
4572 emit_jump_insn (gen_jump (op1));
4573 emit_barrier ();
4574 emit_label (op0);
4575 if (temp != 0)
4576 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4577 else
4578 expand_expr (TREE_OPERAND (exp, 2), ignore ? const0_rtx : 0,
4579 VOIDmode, 0);
4580 }
4581
4582 if (cleanups_this_call)
4583 {
4584 sorry ("aggregate value in COND_EXPR");
4585 cleanups_this_call = 0;
4586 }
4587
4588 emit_queue ();
4589 emit_label (op1);
4590 OK_DEFER_POP;
4591 cleanups_this_call = old_cleanups;
4592 return temp;
4593 }
4594
4595 case TARGET_EXPR:
4596 {
4597 /* Something needs to be initialized, but we didn't know
4598 where that thing was when building the tree. For example,
4599 it could be the return value of a function, or a parameter
4600 to a function which lays down in the stack, or a temporary
4601 variable which must be passed by reference.
4602
4603 We guarantee that the expression will either be constructed
4604 or copied into our original target. */
4605
4606 tree slot = TREE_OPERAND (exp, 0);
4607
4608 if (TREE_CODE (slot) != VAR_DECL)
4609 abort ();
4610
4611 if (target == 0)
4612 {
4613 if (DECL_RTL (slot) != 0)
4614 target = DECL_RTL (slot);
4615 else
4616 {
4617 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4618 /* All temp slots at this level must not conflict. */
4619 preserve_temp_slots (target);
4620 DECL_RTL (slot) = target;
4621 }
4622
4623#if 0
4624 /* Since SLOT is not known to the called function
4625 to belong to its stack frame, we must build an explicit
4626 cleanup. This case occurs when we must build up a reference
4627 to pass the reference as an argument. In this case,
4628 it is very likely that such a reference need not be
4629 built here. */
4630
4631 if (TREE_OPERAND (exp, 2) == 0)
4632 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4633 if (TREE_OPERAND (exp, 2))
4634 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2),
4635 cleanups_this_call);
4636#endif
4637 }
4638 else
4639 {
4640 /* This case does occur, when expanding a parameter which
4641 needs to be constructed on the stack. The target
4642 is the actual stack address that we want to initialize.
4643 The function we call will perform the cleanup in this case. */
4644
4645 DECL_RTL (slot) = target;
4646 }
4647
4648 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4649 }
4650
4651 case INIT_EXPR:
4652 {
4653 tree lhs = TREE_OPERAND (exp, 0);
4654 tree rhs = TREE_OPERAND (exp, 1);
4655 tree noncopied_parts = 0;
4656 tree lhs_type = TREE_TYPE (lhs);
4657
4658 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4659 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4660 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4661 TYPE_NONCOPIED_PARTS (lhs_type));
4662 while (noncopied_parts != 0)
4663 {
4664 expand_assignment (TREE_VALUE (noncopied_parts),
4665 TREE_PURPOSE (noncopied_parts), 0, 0);
4666 noncopied_parts = TREE_CHAIN (noncopied_parts);
4667 }
4668 return temp;
4669 }
4670
4671 case MODIFY_EXPR:
4672 {
4673 /* If lhs is complex, expand calls in rhs before computing it.
4674 That's so we don't compute a pointer and save it over a call.
4675 If lhs is simple, compute it first so we can give it as a
4676 target if the rhs is just a call. This avoids an extra temp and copy
4677 and that prevents a partial-subsumption which makes bad code.
4678 Actually we could treat component_ref's of vars like vars. */
4679
4680 tree lhs = TREE_OPERAND (exp, 0);
4681 tree rhs = TREE_OPERAND (exp, 1);
4682 tree noncopied_parts = 0;
4683 tree lhs_type = TREE_TYPE (lhs);
4684
4685 temp = 0;
4686
4687 if (TREE_CODE (lhs) != VAR_DECL
4688 && TREE_CODE (lhs) != RESULT_DECL
4689 && TREE_CODE (lhs) != PARM_DECL)
4690 preexpand_calls (exp);
4691
4692 /* Check for |= or &= of a bitfield of size one into another bitfield
4693 of size 1. In this case, (unless we need the result of the
4694 assignment) we can do this more efficiently with a
4695 test followed by an assignment, if necessary.
4696
4697 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4698 things change so we do, this code should be enhanced to
4699 support it. */
4700 if (ignore
4701 && TREE_CODE (lhs) == COMPONENT_REF
4702 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4703 || TREE_CODE (rhs) == BIT_AND_EXPR)
4704 && TREE_OPERAND (rhs, 0) == lhs
4705 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4706 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4707 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4708 {
4709 rtx label = gen_label_rtx ();
4710
4711 do_jump (TREE_OPERAND (rhs, 1),
4712 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4713 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4714 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4715 (TREE_CODE (rhs) == BIT_IOR_EXPR
4716 ? integer_one_node
4717 : integer_zero_node)),
4718 0, 0);
e7c33f54 4719 do_pending_stack_adjust ();
bbf6f052
RK
4720 emit_label (label);
4721 return const0_rtx;
4722 }
4723
4724 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4725 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4726 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4727 TYPE_NONCOPIED_PARTS (lhs_type));
4728
4729 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4730 while (noncopied_parts != 0)
4731 {
4732 expand_assignment (TREE_PURPOSE (noncopied_parts),
4733 TREE_VALUE (noncopied_parts), 0, 0);
4734 noncopied_parts = TREE_CHAIN (noncopied_parts);
4735 }
4736 return temp;
4737 }
4738
4739 case PREINCREMENT_EXPR:
4740 case PREDECREMENT_EXPR:
4741 return expand_increment (exp, 0);
4742
4743 case POSTINCREMENT_EXPR:
4744 case POSTDECREMENT_EXPR:
4745 /* Faster to treat as pre-increment if result is not used. */
4746 return expand_increment (exp, ! ignore);
4747
4748 case ADDR_EXPR:
4749 /* Are we taking the address of a nested function? */
4750 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4751 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4752 {
4753 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4754 op0 = force_operand (op0, target);
4755 }
4756 else
4757 {
4758 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode,
4759 (modifier == EXPAND_INITIALIZER
4760 ? modifier : EXPAND_CONST_ADDRESS));
4761 if (GET_CODE (op0) != MEM)
4762 abort ();
4763
4764 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4765 return XEXP (op0, 0);
4766 op0 = force_operand (XEXP (op0, 0), target);
4767 }
4768 if (flag_force_addr && GET_CODE (op0) != REG)
4769 return force_reg (Pmode, op0);
4770 return op0;
4771
4772 case ENTRY_VALUE_EXPR:
4773 abort ();
4774
4775 case ERROR_MARK:
4776 return const0_rtx;
4777
4778 default:
4779 return (*lang_expand_expr) (exp, target, tmode, modifier);
4780 }
4781
4782 /* Here to do an ordinary binary operator, generating an instruction
4783 from the optab already placed in `this_optab'. */
4784 binop:
4785 preexpand_calls (exp);
4786 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4787 subtarget = 0;
4788 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4789 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4790 binop2:
4791 temp = expand_binop (mode, this_optab, op0, op1, target,
4792 unsignedp, OPTAB_LIB_WIDEN);
4793 if (temp == 0)
4794 abort ();
4795 return temp;
4796}
4797\f
e87b4f3f
RS
4798/* Return the alignment in bits of EXP, a pointer valued expression.
4799 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
4800 The alignment returned is, by default, the alignment of the thing that
4801 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4802
4803 Otherwise, look at the expression to see if we can do better, i.e., if the
4804 expression is actually pointing at an object whose alignment is tighter. */
4805
4806static int
4807get_pointer_alignment (exp, max_align)
4808 tree exp;
4809 unsigned max_align;
4810{
4811 unsigned align, inner;
4812
4813 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4814 return 0;
4815
4816 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4817 align = MIN (align, max_align);
4818
4819 while (1)
4820 {
4821 switch (TREE_CODE (exp))
4822 {
4823 case NOP_EXPR:
4824 case CONVERT_EXPR:
4825 case NON_LVALUE_EXPR:
4826 exp = TREE_OPERAND (exp, 0);
4827 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4828 return align;
4829 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4830 inner = MIN (inner, max_align);
4831 align = MAX (align, inner);
4832 break;
4833
4834 case PLUS_EXPR:
4835 /* If sum of pointer + int, restrict our maximum alignment to that
4836 imposed by the integer. If not, we can't do any better than
4837 ALIGN. */
4838 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4839 return align;
4840
e87b4f3f
RS
4841 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4842 & (max_align - 1))
4843 != 0)
bbf6f052
RK
4844 max_align >>= 1;
4845
4846 exp = TREE_OPERAND (exp, 0);
4847 break;
4848
4849 case ADDR_EXPR:
4850 /* See what we are pointing at and look at its alignment. */
4851 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
4852 if (TREE_CODE (exp) == FUNCTION_DECL)
4853 align = MAX (align, FUNCTION_BOUNDARY);
4854 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
4855 align = MAX (align, DECL_ALIGN (exp));
4856#ifdef CONSTANT_ALIGNMENT
4857 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4858 align = CONSTANT_ALIGNMENT (exp, align);
4859#endif
4860 return MIN (align, max_align);
4861
4862 default:
4863 return align;
4864 }
4865 }
4866}
4867\f
4868/* Return the tree node and offset if a given argument corresponds to
4869 a string constant. */
4870
4871static tree
4872string_constant (arg, ptr_offset)
4873 tree arg;
4874 tree *ptr_offset;
4875{
4876 STRIP_NOPS (arg);
4877
4878 if (TREE_CODE (arg) == ADDR_EXPR
4879 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4880 {
4881 *ptr_offset = integer_zero_node;
4882 return TREE_OPERAND (arg, 0);
4883 }
4884 else if (TREE_CODE (arg) == PLUS_EXPR)
4885 {
4886 tree arg0 = TREE_OPERAND (arg, 0);
4887 tree arg1 = TREE_OPERAND (arg, 1);
4888
4889 STRIP_NOPS (arg0);
4890 STRIP_NOPS (arg1);
4891
4892 if (TREE_CODE (arg0) == ADDR_EXPR
4893 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4894 {
4895 *ptr_offset = arg1;
4896 return TREE_OPERAND (arg0, 0);
4897 }
4898 else if (TREE_CODE (arg1) == ADDR_EXPR
4899 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4900 {
4901 *ptr_offset = arg0;
4902 return TREE_OPERAND (arg1, 0);
4903 }
4904 }
4905
4906 return 0;
4907}
4908
4909/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4910 way, because it could contain a zero byte in the middle.
4911 TREE_STRING_LENGTH is the size of the character array, not the string.
4912
4913 Unfortunately, string_constant can't access the values of const char
4914 arrays with initializers, so neither can we do so here. */
4915
4916static tree
4917c_strlen (src)
4918 tree src;
4919{
4920 tree offset_node;
4921 int offset, max;
4922 char *ptr;
4923
4924 src = string_constant (src, &offset_node);
4925 if (src == 0)
4926 return 0;
4927 max = TREE_STRING_LENGTH (src);
4928 ptr = TREE_STRING_POINTER (src);
4929 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4930 {
4931 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4932 compute the offset to the following null if we don't know where to
4933 start searching for it. */
4934 int i;
4935 for (i = 0; i < max; i++)
4936 if (ptr[i] == 0)
4937 return 0;
4938 /* We don't know the starting offset, but we do know that the string
4939 has no internal zero bytes. We can assume that the offset falls
4940 within the bounds of the string; otherwise, the programmer deserves
4941 what he gets. Subtract the offset from the length of the string,
4942 and return that. */
4943 /* This would perhaps not be valid if we were dealing with named
4944 arrays in addition to literal string constants. */
4945 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4946 }
4947
4948 /* We have a known offset into the string. Start searching there for
4949 a null character. */
4950 if (offset_node == 0)
4951 offset = 0;
4952 else
4953 {
4954 /* Did we get a long long offset? If so, punt. */
4955 if (TREE_INT_CST_HIGH (offset_node) != 0)
4956 return 0;
4957 offset = TREE_INT_CST_LOW (offset_node);
4958 }
4959 /* If the offset is known to be out of bounds, warn, and call strlen at
4960 runtime. */
4961 if (offset < 0 || offset > max)
4962 {
4963 warning ("offset outside bounds of constant string");
4964 return 0;
4965 }
4966 /* Use strlen to search for the first zero byte. Since any strings
4967 constructed with build_string will have nulls appended, we win even
4968 if we get handed something like (char[4])"abcd".
4969
4970 Since OFFSET is our starting index into the string, no further
4971 calculation is needed. */
4972 return size_int (strlen (ptr + offset));
4973}
4974\f
4975/* Expand an expression EXP that calls a built-in function,
4976 with result going to TARGET if that's convenient
4977 (and in mode MODE if that's convenient).
4978 SUBTARGET may be used as the target for computing one of EXP's operands.
4979 IGNORE is nonzero if the value is to be ignored. */
4980
4981static rtx
4982expand_builtin (exp, target, subtarget, mode, ignore)
4983 tree exp;
4984 rtx target;
4985 rtx subtarget;
4986 enum machine_mode mode;
4987 int ignore;
4988{
4989 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4990 tree arglist = TREE_OPERAND (exp, 1);
4991 rtx op0;
e7c33f54 4992 rtx lab1, lab2, insns;
bbf6f052
RK
4993 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
4994
4995 switch (DECL_FUNCTION_CODE (fndecl))
4996 {
4997 case BUILT_IN_ABS:
4998 case BUILT_IN_LABS:
4999 case BUILT_IN_FABS:
5000 /* build_function_call changes these into ABS_EXPR. */
5001 abort ();
5002
e87b4f3f
RS
5003 case BUILT_IN_FSQRT:
5004 /* If not optimizing, call the library function. */
8c8a8e34 5005 if (! optimize)
e87b4f3f
RS
5006 break;
5007
5008 if (arglist == 0
19deaec9 5009 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5010 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5011 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5012
db0e6d01
RS
5013 /* Stabilize and compute the argument. */
5014 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5015 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5016 {
5017 exp = copy_node (exp);
5018 arglist = copy_node (arglist);
5019 TREE_OPERAND (exp, 1) = arglist;
5020 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5021 }
e87b4f3f 5022 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5023
5024 /* Make a suitable register to place result in. */
5025 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5026
5027 /* Test the argument to make sure it is in the proper domain for
5028 the sqrt function. If it is not in the domain, branch to a
5029 library call. */
c1f7c223 5030 emit_queue ();
8c8a8e34
JW
5031 start_sequence ();
5032 lab1 = gen_label_rtx ();
5033 lab2 = gen_label_rtx ();
e7c33f54
RK
5034
5035 /* By default check the arguments. If flag_fast_math is turned on,
4af3895e
JVA
5036 then assume sqrt will always be called with valid arguments.
5037 Note changing the test below from "> 0" to ">= 0" would cause
5038 incorrect results when computing sqrt(-0.0). */
5039
e7c33f54
RK
5040 if (! flag_fast_math)
5041 {
8c8a8e34 5042 /* By checking op > 0 we are able to catch all of the
e7c33f54 5043 IEEE special cases with a single if conditional. */
8c8a8e34
JW
5044 emit_cmp_insn (op0, CONST0_RTX (GET_MODE (op0)), GT, 0,
5045 GET_MODE (op0), 0, 0);
e7c33f54
RK
5046 emit_jump_insn (gen_bgt (lab1));
5047
4af3895e
JVA
5048 /* The argument was not in the domain; do this via library call.
5049 Pop the arguments right away in case the call gets deleted. */
5050 NO_DEFER_POP;
8129842c 5051 expand_call (exp, target, 0);
4af3895e 5052 OK_DEFER_POP;
e7c33f54
RK
5053
5054 /* Branch around open coded version */
5055 emit_jump_insn (gen_jump (lab2));
5056 }
5057
5058 emit_label (lab1);
5059 /* Arg is in the domain, compute sqrt, into TARGET.
e87b4f3f
RS
5060 Set TARGET to wherever the result comes back. */
5061 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8c8a8e34 5062 sqrt_optab, op0, target, 0);
e7c33f54
RK
5063
5064 /* If we were unable to expand via the builtin, stop the
5065 sequence (without outputting the insns) and break, causing
5066 a call the the library function. */
e87b4f3f 5067 if (target == 0)
e7c33f54 5068 {
8c8a8e34 5069 end_sequence ();
e7c33f54
RK
5070 break;
5071 }
5072 emit_label (lab2);
e87b4f3f
RS
5073
5074
e7c33f54 5075 /* Output the entire sequence. */
8c8a8e34
JW
5076 insns = get_insns ();
5077 end_sequence ();
5078 emit_insns (insns);
e7c33f54
RK
5079
5080 return target;
5081
bbf6f052
RK
5082 case BUILT_IN_SAVEREGS:
5083 /* Don't do __builtin_saveregs more than once in a function.
5084 Save the result of the first call and reuse it. */
5085 if (saveregs_value != 0)
5086 return saveregs_value;
5087 {
5088 /* When this function is called, it means that registers must be
5089 saved on entry to this function. So we migrate the
5090 call to the first insn of this function. */
5091 rtx temp;
5092 rtx seq;
5093 rtx valreg, saved_valreg;
5094
5095 /* Now really call the function. `expand_call' does not call
5096 expand_builtin, so there is no danger of infinite recursion here. */
5097 start_sequence ();
5098
5099#ifdef EXPAND_BUILTIN_SAVEREGS
5100 /* Do whatever the machine needs done in this case. */
5101 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5102#else
5103 /* The register where the function returns its value
5104 is likely to have something else in it, such as an argument.
5105 So preserve that register around the call. */
5106 if (value_mode != VOIDmode)
5107 {
5108 valreg = hard_libcall_value (value_mode);
5109 saved_valreg = gen_reg_rtx (value_mode);
5110 emit_move_insn (saved_valreg, valreg);
5111 }
5112
5113 /* Generate the call, putting the value in a pseudo. */
5114 temp = expand_call (exp, target, ignore);
5115
5116 if (value_mode != VOIDmode)
5117 emit_move_insn (valreg, saved_valreg);
5118#endif
5119
5120 seq = get_insns ();
5121 end_sequence ();
5122
5123 saveregs_value = temp;
5124
5125 /* This won't work inside a SEQUENCE--it really has to be
5126 at the start of the function. */
5127 if (in_sequence_p ())
5128 {
5129 /* Better to do this than to crash. */
5130 error ("`va_start' used within `({...})'");
5131 return temp;
5132 }
5133
5134 /* Put the sequence after the NOTE that starts the function. */
5135 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5136 return temp;
5137 }
5138
5139 /* __builtin_args_info (N) returns word N of the arg space info
5140 for the current function. The number and meanings of words
5141 is controlled by the definition of CUMULATIVE_ARGS. */
5142 case BUILT_IN_ARGS_INFO:
5143 {
5144 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5145 int i;
5146 int *word_ptr = (int *) &current_function_args_info;
5147 tree type, elts, result;
5148
5149 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5150 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5151 __FILE__, __LINE__);
5152
5153 if (arglist != 0)
5154 {
5155 tree arg = TREE_VALUE (arglist);
5156 if (TREE_CODE (arg) != INTEGER_CST)
5157 error ("argument of __builtin_args_info must be constant");
5158 else
5159 {
5160 int wordnum = TREE_INT_CST_LOW (arg);
5161
5162 if (wordnum < 0 || wordnum >= nwords)
5163 error ("argument of __builtin_args_info out of range");
5164 else
5165 return gen_rtx (CONST_INT, VOIDmode, word_ptr[wordnum]);
5166 }
5167 }
5168 else
5169 error ("missing argument in __builtin_args_info");
5170
5171 return const0_rtx;
5172
5173#if 0
5174 for (i = 0; i < nwords; i++)
5175 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5176
5177 type = build_array_type (integer_type_node,
5178 build_index_type (build_int_2 (nwords, 0)));
5179 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5180 TREE_CONSTANT (result) = 1;
5181 TREE_STATIC (result) = 1;
5182 result = build (INDIRECT_REF, build_pointer_type (type), result);
5183 TREE_CONSTANT (result) = 1;
5184 return expand_expr (result, 0, VOIDmode, 0);
5185#endif
5186 }
5187
5188 /* Return the address of the first anonymous stack arg. */
5189 case BUILT_IN_NEXT_ARG:
5190 {
5191 tree fntype = TREE_TYPE (current_function_decl);
5192 if (!(TYPE_ARG_TYPES (fntype) != 0
5193 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5194 != void_type_node)))
5195 {
5196 error ("`va_start' used in function with fixed args");
5197 return const0_rtx;
5198 }
5199 }
5200
5201 return expand_binop (Pmode, add_optab,
5202 current_function_internal_arg_pointer,
5203 current_function_arg_offset_rtx,
5204 0, 0, OPTAB_LIB_WIDEN);
5205
5206 case BUILT_IN_CLASSIFY_TYPE:
5207 if (arglist != 0)
5208 {
5209 tree type = TREE_TYPE (TREE_VALUE (arglist));
5210 enum tree_code code = TREE_CODE (type);
5211 if (code == VOID_TYPE)
5212 return gen_rtx (CONST_INT, VOIDmode, void_type_class);
5213 if (code == INTEGER_TYPE)
5214 return gen_rtx (CONST_INT, VOIDmode, integer_type_class);
5215 if (code == CHAR_TYPE)
5216 return gen_rtx (CONST_INT, VOIDmode, char_type_class);
5217 if (code == ENUMERAL_TYPE)
5218 return gen_rtx (CONST_INT, VOIDmode, enumeral_type_class);
5219 if (code == BOOLEAN_TYPE)
5220 return gen_rtx (CONST_INT, VOIDmode, boolean_type_class);
5221 if (code == POINTER_TYPE)
5222 return gen_rtx (CONST_INT, VOIDmode, pointer_type_class);
5223 if (code == REFERENCE_TYPE)
5224 return gen_rtx (CONST_INT, VOIDmode, reference_type_class);
5225 if (code == OFFSET_TYPE)
5226 return gen_rtx (CONST_INT, VOIDmode, offset_type_class);
5227 if (code == REAL_TYPE)
5228 return gen_rtx (CONST_INT, VOIDmode, real_type_class);
5229 if (code == COMPLEX_TYPE)
5230 return gen_rtx (CONST_INT, VOIDmode, complex_type_class);
5231 if (code == FUNCTION_TYPE)
5232 return gen_rtx (CONST_INT, VOIDmode, function_type_class);
5233 if (code == METHOD_TYPE)
5234 return gen_rtx (CONST_INT, VOIDmode, method_type_class);
5235 if (code == RECORD_TYPE)
5236 return gen_rtx (CONST_INT, VOIDmode, record_type_class);
5237 if (code == UNION_TYPE)
5238 return gen_rtx (CONST_INT, VOIDmode, union_type_class);
5239 if (code == ARRAY_TYPE)
5240 return gen_rtx (CONST_INT, VOIDmode, array_type_class);
5241 if (code == STRING_TYPE)
5242 return gen_rtx (CONST_INT, VOIDmode, string_type_class);
5243 if (code == SET_TYPE)
5244 return gen_rtx (CONST_INT, VOIDmode, set_type_class);
5245 if (code == FILE_TYPE)
5246 return gen_rtx (CONST_INT, VOIDmode, file_type_class);
5247 if (code == LANG_TYPE)
5248 return gen_rtx (CONST_INT, VOIDmode, lang_type_class);
5249 }
5250 return gen_rtx (CONST_INT, VOIDmode, no_type_class);
5251
5252 case BUILT_IN_CONSTANT_P:
5253 if (arglist == 0)
5254 return const0_rtx;
5255 else
5256 return (TREE_CODE_CLASS (TREE_VALUE (arglist)) == 'c'
5257 ? const1_rtx : const0_rtx);
5258
5259 case BUILT_IN_FRAME_ADDRESS:
5260 /* The argument must be a nonnegative integer constant.
5261 It counts the number of frames to scan up the stack.
5262 The value is the address of that frame. */
5263 case BUILT_IN_RETURN_ADDRESS:
5264 /* The argument must be a nonnegative integer constant.
5265 It counts the number of frames to scan up the stack.
5266 The value is the return address saved in that frame. */
5267 if (arglist == 0)
5268 /* Warning about missing arg was already issued. */
5269 return const0_rtx;
5270 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5271 {
5272 error ("invalid arg to __builtin_return_address");
5273 return const0_rtx;
5274 }
5275 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5276 {
5277 error ("invalid arg to __builtin_return_address");
5278 return const0_rtx;
5279 }
5280 else
5281 {
5282 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5283 rtx tem = frame_pointer_rtx;
5284 int i;
5285
5286 /* Scan back COUNT frames to the specified frame. */
5287 for (i = 0; i < count; i++)
5288 {
5289 /* Assume the dynamic chain pointer is in the word that
5290 the frame address points to, unless otherwise specified. */
5291#ifdef DYNAMIC_CHAIN_ADDRESS
5292 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5293#endif
5294 tem = memory_address (Pmode, tem);
5295 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5296 }
5297
5298 /* For __builtin_frame_address, return what we've got. */
5299 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5300 return tem;
5301
5302 /* For __builtin_return_address,
5303 Get the return address from that frame. */
5304#ifdef RETURN_ADDR_RTX
5305 return RETURN_ADDR_RTX (count, tem);
5306#else
5307 tem = memory_address (Pmode,
5308 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5309 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5310#endif
5311 }
5312
5313 case BUILT_IN_ALLOCA:
5314 if (arglist == 0
5315 /* Arg could be non-integer if user redeclared this fcn wrong. */
5316 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5317 return const0_rtx;
5318 current_function_calls_alloca = 1;
5319 /* Compute the argument. */
5320 op0 = expand_expr (TREE_VALUE (arglist), 0, VOIDmode, 0);
5321
5322 /* Allocate the desired space. */
8c8a8e34 5323 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5324
5325 /* Record the new stack level for nonlocal gotos. */
6dc42e49
RS
5326 if (nonlocal_goto_handler_slot != 0)
5327 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
bbf6f052
RK
5328 return target;
5329
5330 case BUILT_IN_FFS:
5331 /* If not optimizing, call the library function. */
5332 if (!optimize)
5333 break;
5334
5335 if (arglist == 0
5336 /* Arg could be non-integer if user redeclared this fcn wrong. */
5337 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5338 return const0_rtx;
5339
5340 /* Compute the argument. */
5341 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5342 /* Compute ffs, into TARGET if possible.
5343 Set TARGET to wherever the result comes back. */
5344 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5345 ffs_optab, op0, target, 1);
5346 if (target == 0)
5347 abort ();
5348 return target;
5349
5350 case BUILT_IN_STRLEN:
5351 /* If not optimizing, call the library function. */
5352 if (!optimize)
5353 break;
5354
5355 if (arglist == 0
5356 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5357 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5358 return const0_rtx;
5359 else
5360 {
e7c33f54
RK
5361 tree src = TREE_VALUE (arglist);
5362 tree len = c_strlen (src);
bbf6f052 5363
e7c33f54
RK
5364 int align
5365 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5366
5367 rtx result, src_rtx, char_rtx;
5368 enum machine_mode insn_mode = value_mode, char_mode;
5369 enum insn_code icode;
5370
5371 /* If the length is known, just return it. */
5372 if (len != 0)
5373 return expand_expr (len, target, mode, 0);
5374
5375 /* If SRC is not a pointer type, don't do this operation inline. */
5376 if (align == 0)
5377 break;
5378
5379 /* Call a function if we can't compute strlen in the right mode. */
5380
5381 while (insn_mode != VOIDmode)
5382 {
5383 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5384 if (icode != CODE_FOR_nothing)
5385 break;
5386
5387 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5388 }
5389 if (insn_mode == VOIDmode)
bbf6f052 5390 break;
e7c33f54
RK
5391
5392 /* Make a place to write the result of the instruction. */
5393 result = target;
5394 if (! (result != 0
5395 && GET_CODE (result) == REG
5396 && GET_MODE (result) == insn_mode
5397 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5398 result = gen_reg_rtx (insn_mode);
5399
4d613828 5400 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 5401
4d613828 5402 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
5403 result = gen_reg_rtx (insn_mode);
5404
5405 src_rtx = memory_address (BLKmode,
5406 expand_expr (src, 0, Pmode,
5407 EXPAND_NORMAL));
4d613828 5408 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
5409 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5410
5411 char_rtx = const0_rtx;
4d613828
RS
5412 char_mode = insn_operand_mode[(int)icode][2];
5413 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
5414 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5415
5416 emit_insn (GEN_FCN (icode) (result,
5417 gen_rtx (MEM, BLKmode, src_rtx),
5418 char_rtx,
5419 gen_rtx (CONST_INT, VOIDmode, align)));
5420
5421 /* Return the value in the proper mode for this function. */
5422 if (GET_MODE (result) == value_mode)
5423 return result;
5424 else if (target != 0)
5425 {
5426 convert_move (target, result, 0);
5427 return target;
5428 }
5429 else
5430 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
5431 }
5432
5433 case BUILT_IN_STRCPY:
5434 /* If not optimizing, call the library function. */
5435 if (!optimize)
5436 break;
5437
5438 if (arglist == 0
5439 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5440 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5441 || TREE_CHAIN (arglist) == 0
5442 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5443 return const0_rtx;
5444 else
5445 {
5446 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5447
5448 if (len == 0)
5449 break;
5450
5451 len = size_binop (PLUS_EXPR, len, integer_one_node);
5452
5453 chainon (arglist, build_tree_list (0, len));
5454 }
5455
5456 /* Drops in. */
5457 case BUILT_IN_MEMCPY:
5458 /* If not optimizing, call the library function. */
5459 if (!optimize)
5460 break;
5461
5462 if (arglist == 0
5463 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5464 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5465 || TREE_CHAIN (arglist) == 0
5466 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5467 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5468 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5469 return const0_rtx;
5470 else
5471 {
5472 tree dest = TREE_VALUE (arglist);
5473 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5474 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5475
5476 int src_align
5477 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5478 int dest_align
5479 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5480 rtx dest_rtx;
5481
5482 /* If either SRC or DEST is not a pointer type, don't do
5483 this operation in-line. */
5484 if (src_align == 0 || dest_align == 0)
5485 {
5486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5487 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5488 break;
5489 }
5490
5491 dest_rtx = expand_expr (dest, 0, Pmode, EXPAND_NORMAL);
5492
5493 /* Copy word part most expediently. */
5494 emit_block_move (gen_rtx (MEM, BLKmode,
5495 memory_address (BLKmode, dest_rtx)),
5496 gen_rtx (MEM, BLKmode,
5497 memory_address (BLKmode,
5498 expand_expr (src, 0, Pmode,
5499 EXPAND_NORMAL))),
5500 expand_expr (len, 0, VOIDmode, 0),
5501 MIN (src_align, dest_align));
5502 return dest_rtx;
5503 }
5504
5505/* These comparison functions need an instruction that returns an actual
5506 index. An ordinary compare that just sets the condition codes
5507 is not enough. */
5508#ifdef HAVE_cmpstrsi
5509 case BUILT_IN_STRCMP:
5510 /* If not optimizing, call the library function. */
5511 if (!optimize)
5512 break;
5513
5514 if (arglist == 0
5515 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5516 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5517 || TREE_CHAIN (arglist) == 0
5518 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5519 return const0_rtx;
5520 else if (!HAVE_cmpstrsi)
5521 break;
5522 {
5523 tree arg1 = TREE_VALUE (arglist);
5524 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5525 tree offset;
5526 tree len, len2;
5527
5528 len = c_strlen (arg1);
5529 if (len)
5530 len = size_binop (PLUS_EXPR, integer_one_node, len);
5531 len2 = c_strlen (arg2);
5532 if (len2)
5533 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5534
5535 /* If we don't have a constant length for the first, use the length
5536 of the second, if we know it. We don't require a constant for
5537 this case; some cost analysis could be done if both are available
5538 but neither is constant. For now, assume they're equally cheap.
5539
5540 If both strings have constant lengths, use the smaller. This
5541 could arise if optimization results in strcpy being called with
5542 two fixed strings, or if the code was machine-generated. We should
5543 add some code to the `memcmp' handler below to deal with such
5544 situations, someday. */
5545 if (!len || TREE_CODE (len) != INTEGER_CST)
5546 {
5547 if (len2)
5548 len = len2;
5549 else if (len == 0)
5550 break;
5551 }
5552 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5553 {
5554 if (tree_int_cst_lt (len2, len))
5555 len = len2;
5556 }
5557
5558 chainon (arglist, build_tree_list (0, len));
5559 }
5560
5561 /* Drops in. */
5562 case BUILT_IN_MEMCMP:
5563 /* If not optimizing, call the library function. */
5564 if (!optimize)
5565 break;
5566
5567 if (arglist == 0
5568 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5569 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5570 || TREE_CHAIN (arglist) == 0
5571 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5572 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5573 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5574 return const0_rtx;
5575 else if (!HAVE_cmpstrsi)
5576 break;
5577 {
5578 tree arg1 = TREE_VALUE (arglist);
5579 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5580 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5581 rtx result;
5582
5583 int arg1_align
5584 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5585 int arg2_align
5586 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5587 enum machine_mode insn_mode
5588 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5589
5590 /* If we don't have POINTER_TYPE, call the function. */
5591 if (arg1_align == 0 || arg2_align == 0)
5592 {
5593 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5594 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5595 break;
5596 }
5597
5598 /* Make a place to write the result of the instruction. */
5599 result = target;
5600 if (! (result != 0
5601 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5602 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5603 result = gen_reg_rtx (insn_mode);
5604
5605 emit_insn (gen_cmpstrsi (result,
5606 gen_rtx (MEM, BLKmode,
5607 expand_expr (arg1, 0, Pmode, EXPAND_NORMAL)),
5608 gen_rtx (MEM, BLKmode,
5609 expand_expr (arg2, 0, Pmode, EXPAND_NORMAL)),
5610 expand_expr (len, 0, VOIDmode, 0),
5611 gen_rtx (CONST_INT, VOIDmode,
5612 MIN (arg1_align, arg2_align))));
5613
5614 /* Return the value in the proper mode for this function. */
5615 mode = TYPE_MODE (TREE_TYPE (exp));
5616 if (GET_MODE (result) == mode)
5617 return result;
5618 else if (target != 0)
5619 {
5620 convert_move (target, result, 0);
5621 return target;
5622 }
5623 else
5624 return convert_to_mode (mode, result, 0);
5625 }
5626#else
5627 case BUILT_IN_STRCMP:
5628 case BUILT_IN_MEMCMP:
5629 break;
5630#endif
5631
5632 default: /* just do library call, if unknown builtin */
5633 error ("built-in function %s not currently supported",
5634 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5635 }
5636
5637 /* The switch statement above can drop through to cause the function
5638 to be called normally. */
5639
5640 return expand_call (exp, target, ignore);
5641}
5642\f
5643/* Expand code for a post- or pre- increment or decrement
5644 and return the RTX for the result.
5645 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5646
5647static rtx
5648expand_increment (exp, post)
5649 register tree exp;
5650 int post;
5651{
5652 register rtx op0, op1;
5653 register rtx temp, value;
5654 register tree incremented = TREE_OPERAND (exp, 0);
5655 optab this_optab = add_optab;
5656 int icode;
5657 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5658 int op0_is_copy = 0;
5659
5660 /* Stabilize any component ref that might need to be
5661 evaluated more than once below. */
5662 if (TREE_CODE (incremented) == BIT_FIELD_REF
5663 || (TREE_CODE (incremented) == COMPONENT_REF
5664 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5665 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5666 incremented = stabilize_reference (incremented);
5667
5668 /* Compute the operands as RTX.
5669 Note whether OP0 is the actual lvalue or a copy of it:
94a58076
RS
5670 I believe it is a copy iff it is a register or subreg
5671 and insns were generated in computing it. */
bbf6f052
RK
5672 temp = get_last_insn ();
5673 op0 = expand_expr (incremented, 0, VOIDmode, 0);
94a58076
RS
5674 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5675 && temp != get_last_insn ());
bbf6f052
RK
5676 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5677
5678 /* Decide whether incrementing or decrementing. */
5679 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5680 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5681 this_optab = sub_optab;
5682
5683 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5684 then we cannot just increment OP0. We must
5685 therefore contrive to increment the original value.
5686 Then we can return OP0 since it is a copy of the old value. */
5687 if (op0_is_copy)
5688 {
5689 /* This is the easiest way to increment the value wherever it is.
5690 Problems with multiple evaluation of INCREMENTED
5691 are prevented because either (1) it is a component_ref,
5692 in which case it was stabilized above, or (2) it is an array_ref
5693 with constant index in an array in a register, which is
5694 safe to reevaluate. */
5695 tree newexp = build ((this_optab == add_optab
5696 ? PLUS_EXPR : MINUS_EXPR),
5697 TREE_TYPE (exp),
5698 incremented,
5699 TREE_OPERAND (exp, 1));
5700 temp = expand_assignment (incremented, newexp, ! post, 0);
5701 return post ? op0 : temp;
5702 }
5703
5704 /* Convert decrement by a constant into a negative increment. */
5705 if (this_optab == sub_optab
5706 && GET_CODE (op1) == CONST_INT)
5707 {
5708 op1 = gen_rtx (CONST_INT, VOIDmode, - INTVAL (op1));
5709 this_optab = add_optab;
5710 }
5711
5712 if (post)
5713 {
5714 /* We have a true reference to the value in OP0.
5715 If there is an insn to add or subtract in this mode, queue it. */
5716
5717#if 0 /* Turned off to avoid making extra insn for indexed memref. */
5718 op0 = stabilize (op0);
5719#endif
5720
5721 icode = (int) this_optab->handlers[(int) mode].insn_code;
5722 if (icode != (int) CODE_FOR_nothing
5723 /* Make sure that OP0 is valid for operands 0 and 1
5724 of the insn we want to queue. */
5725 && (*insn_operand_predicate[icode][0]) (op0, mode)
5726 && (*insn_operand_predicate[icode][1]) (op0, mode))
5727 {
5728 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5729 op1 = force_reg (mode, op1);
5730
5731 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5732 }
5733 }
5734
5735 /* Preincrement, or we can't increment with one simple insn. */
5736 if (post)
5737 /* Save a copy of the value before inc or dec, to return it later. */
5738 temp = value = copy_to_reg (op0);
5739 else
5740 /* Arrange to return the incremented value. */
5741 /* Copy the rtx because expand_binop will protect from the queue,
5742 and the results of that would be invalid for us to return
5743 if our caller does emit_queue before using our result. */
5744 temp = copy_rtx (value = op0);
5745
5746 /* Increment however we can. */
5747 op1 = expand_binop (mode, this_optab, value, op1, op0,
5748 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5749 /* Make sure the value is stored into OP0. */
5750 if (op1 != op0)
5751 emit_move_insn (op0, op1);
5752
5753 return temp;
5754}
5755\f
5756/* Expand all function calls contained within EXP, innermost ones first.
5757 But don't look within expressions that have sequence points.
5758 For each CALL_EXPR, record the rtx for its value
5759 in the CALL_EXPR_RTL field. */
5760
5761static void
5762preexpand_calls (exp)
5763 tree exp;
5764{
5765 register int nops, i;
5766 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5767
5768 if (! do_preexpand_calls)
5769 return;
5770
5771 /* Only expressions and references can contain calls. */
5772
5773 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5774 return;
5775
5776 switch (TREE_CODE (exp))
5777 {
5778 case CALL_EXPR:
5779 /* Do nothing if already expanded. */
5780 if (CALL_EXPR_RTL (exp) != 0)
5781 return;
5782
5783 /* Do nothing to built-in functions. */
5784 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5785 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5786 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8129842c 5787 CALL_EXPR_RTL (exp) = expand_call (exp, 0, 0);
bbf6f052
RK
5788 return;
5789
5790 case COMPOUND_EXPR:
5791 case COND_EXPR:
5792 case TRUTH_ANDIF_EXPR:
5793 case TRUTH_ORIF_EXPR:
5794 /* If we find one of these, then we can be sure
5795 the adjust will be done for it (since it makes jumps).
5796 Do it now, so that if this is inside an argument
5797 of a function, we don't get the stack adjustment
5798 after some other args have already been pushed. */
5799 do_pending_stack_adjust ();
5800 return;
5801
5802 case BLOCK:
5803 case RTL_EXPR:
5804 case WITH_CLEANUP_EXPR:
5805 return;
5806
5807 case SAVE_EXPR:
5808 if (SAVE_EXPR_RTL (exp) != 0)
5809 return;
5810 }
5811
5812 nops = tree_code_length[(int) TREE_CODE (exp)];
5813 for (i = 0; i < nops; i++)
5814 if (TREE_OPERAND (exp, i) != 0)
5815 {
5816 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5817 if (type == 'e' || type == '<' || type == '1' || type == '2'
5818 || type == 'r')
5819 preexpand_calls (TREE_OPERAND (exp, i));
5820 }
5821}
5822\f
5823/* At the start of a function, record that we have no previously-pushed
5824 arguments waiting to be popped. */
5825
5826void
5827init_pending_stack_adjust ()
5828{
5829 pending_stack_adjust = 0;
5830}
5831
5832/* When exiting from function, if safe, clear out any pending stack adjust
5833 so the adjustment won't get done. */
5834
5835void
5836clear_pending_stack_adjust ()
5837{
5838#ifdef EXIT_IGNORE_STACK
5839 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5840 && ! (TREE_INLINE (current_function_decl) && ! flag_no_inline)
5841 && ! flag_inline_functions)
5842 pending_stack_adjust = 0;
5843#endif
5844}
5845
5846/* Pop any previously-pushed arguments that have not been popped yet. */
5847
5848void
5849do_pending_stack_adjust ()
5850{
5851 if (inhibit_defer_pop == 0)
5852 {
5853 if (pending_stack_adjust != 0)
5854 adjust_stack (gen_rtx (CONST_INT, VOIDmode, pending_stack_adjust));
5855 pending_stack_adjust = 0;
5856 }
5857}
5858
5859/* Expand all cleanups up to OLD_CLEANUPS.
5860 Needed here, and also for language-dependent calls. */
5861
5862void
5863expand_cleanups_to (old_cleanups)
5864 tree old_cleanups;
5865{
5866 while (cleanups_this_call != old_cleanups)
5867 {
5868 expand_expr (TREE_VALUE (cleanups_this_call), 0, VOIDmode, 0);
5869 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5870 }
5871}
5872\f
5873/* Expand conditional expressions. */
5874
5875/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5876 LABEL is an rtx of code CODE_LABEL, in this function and all the
5877 functions here. */
5878
5879void
5880jumpifnot (exp, label)
5881 tree exp;
5882 rtx label;
5883{
5884 do_jump (exp, label, 0);
5885}
5886
5887/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5888
5889void
5890jumpif (exp, label)
5891 tree exp;
5892 rtx label;
5893{
5894 do_jump (exp, 0, label);
5895}
5896
5897/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5898 the result is zero, or IF_TRUE_LABEL if the result is one.
5899 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5900 meaning fall through in that case.
5901
e7c33f54
RK
5902 do_jump always does any pending stack adjust except when it does not
5903 actually perform a jump. An example where there is no jump
5904 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5905
bbf6f052
RK
5906 This function is responsible for optimizing cases such as
5907 &&, || and comparison operators in EXP. */
5908
5909void
5910do_jump (exp, if_false_label, if_true_label)
5911 tree exp;
5912 rtx if_false_label, if_true_label;
5913{
5914 register enum tree_code code = TREE_CODE (exp);
5915 /* Some cases need to create a label to jump to
5916 in order to properly fall through.
5917 These cases set DROP_THROUGH_LABEL nonzero. */
5918 rtx drop_through_label = 0;
5919 rtx temp;
5920 rtx comparison = 0;
5921 int i;
5922 tree type;
5923
5924 emit_queue ();
5925
5926 switch (code)
5927 {
5928 case ERROR_MARK:
5929 break;
5930
5931 case INTEGER_CST:
5932 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5933 if (temp)
5934 emit_jump (temp);
5935 break;
5936
5937#if 0
5938 /* This is not true with #pragma weak */
5939 case ADDR_EXPR:
5940 /* The address of something can never be zero. */
5941 if (if_true_label)
5942 emit_jump (if_true_label);
5943 break;
5944#endif
5945
5946 case NOP_EXPR:
5947 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5948 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5949 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5950 goto normal;
5951 case CONVERT_EXPR:
5952 /* If we are narrowing the operand, we have to do the compare in the
5953 narrower mode. */
5954 if ((TYPE_PRECISION (TREE_TYPE (exp))
5955 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5956 goto normal;
5957 case NON_LVALUE_EXPR:
5958 case REFERENCE_EXPR:
5959 case ABS_EXPR:
5960 case NEGATE_EXPR:
5961 case LROTATE_EXPR:
5962 case RROTATE_EXPR:
5963 /* These cannot change zero->non-zero or vice versa. */
5964 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5965 break;
5966
5967#if 0
5968 /* This is never less insns than evaluating the PLUS_EXPR followed by
5969 a test and can be longer if the test is eliminated. */
5970 case PLUS_EXPR:
5971 /* Reduce to minus. */
5972 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5973 TREE_OPERAND (exp, 0),
5974 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5975 TREE_OPERAND (exp, 1))));
5976 /* Process as MINUS. */
5977#endif
5978
5979 case MINUS_EXPR:
5980 /* Non-zero iff operands of minus differ. */
5981 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5982 TREE_OPERAND (exp, 0),
5983 TREE_OPERAND (exp, 1)),
5984 NE, NE);
5985 break;
5986
5987 case BIT_AND_EXPR:
5988 /* If we are AND'ing with a small constant, do this comparison in the
5989 smallest type that fits. If the machine doesn't have comparisons
5990 that small, it will be converted back to the wider comparison.
5991 This helps if we are testing the sign bit of a narrower object.
5992 combine can't do this for us because it can't know whether a
5993 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
5994
08af8e09
RK
5995 if (! SLOW_BYTE_ACCESS
5996 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
bbf6f052
RK
5997 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_INT
5998 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
5999 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
6000 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6001 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6002 != CODE_FOR_nothing))
bbf6f052
RK
6003 {
6004 do_jump (convert (type, exp), if_false_label, if_true_label);
6005 break;
6006 }
6007 goto normal;
6008
6009 case TRUTH_NOT_EXPR:
6010 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6011 break;
6012
6013 case TRUTH_ANDIF_EXPR:
6014 if (if_false_label == 0)
6015 if_false_label = drop_through_label = gen_label_rtx ();
6016 do_jump (TREE_OPERAND (exp, 0), if_false_label, 0);
6017 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6018 break;
6019
6020 case TRUTH_ORIF_EXPR:
6021 if (if_true_label == 0)
6022 if_true_label = drop_through_label = gen_label_rtx ();
6023 do_jump (TREE_OPERAND (exp, 0), 0, if_true_label);
6024 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6025 break;
6026
6027 case COMPOUND_EXPR:
6028 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6029 free_temp_slots ();
6030 emit_queue ();
e7c33f54 6031 do_pending_stack_adjust ();
bbf6f052
RK
6032 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6033 break;
6034
6035 case COMPONENT_REF:
6036 case BIT_FIELD_REF:
6037 case ARRAY_REF:
6038 {
6039 int bitsize, bitpos, unsignedp;
6040 enum machine_mode mode;
6041 tree type;
7bb0943f 6042 tree offset;
bbf6f052
RK
6043 int volatilep = 0;
6044
6045 /* Get description of this reference. We don't actually care
6046 about the underlying object here. */
7bb0943f
RS
6047 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6048 &mode, &unsignedp, &volatilep);
bbf6f052
RK
6049
6050 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
6051 if (! SLOW_BYTE_ACCESS
6052 && type != 0 && bitsize >= 0
6053 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6054 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6055 != CODE_FOR_nothing))
bbf6f052
RK
6056 {
6057 do_jump (convert (type, exp), if_false_label, if_true_label);
6058 break;
6059 }
6060 goto normal;
6061 }
6062
6063 case COND_EXPR:
6064 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6065 if (integer_onep (TREE_OPERAND (exp, 1))
6066 && integer_zerop (TREE_OPERAND (exp, 2)))
6067 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6068
6069 else if (integer_zerop (TREE_OPERAND (exp, 1))
6070 && integer_onep (TREE_OPERAND (exp, 2)))
6071 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6072
6073 else
6074 {
6075 register rtx label1 = gen_label_rtx ();
6076 drop_through_label = gen_label_rtx ();
6077 do_jump (TREE_OPERAND (exp, 0), label1, 0);
6078 /* Now the THEN-expression. */
6079 do_jump (TREE_OPERAND (exp, 1),
6080 if_false_label ? if_false_label : drop_through_label,
6081 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
6082 /* In case the do_jump just above never jumps. */
6083 do_pending_stack_adjust ();
bbf6f052
RK
6084 emit_label (label1);
6085 /* Now the ELSE-expression. */
6086 do_jump (TREE_OPERAND (exp, 2),
6087 if_false_label ? if_false_label : drop_through_label,
6088 if_true_label ? if_true_label : drop_through_label);
6089 }
6090 break;
6091
6092 case EQ_EXPR:
6093 if (integer_zerop (TREE_OPERAND (exp, 1)))
6094 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6095 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6096 == MODE_INT)
6097 &&
6098 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6099 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6100 else
6101 comparison = compare (exp, EQ, EQ);
6102 break;
6103
6104 case NE_EXPR:
6105 if (integer_zerop (TREE_OPERAND (exp, 1)))
6106 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6107 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6108 == MODE_INT)
6109 &&
6110 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6111 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6112 else
6113 comparison = compare (exp, NE, NE);
6114 break;
6115
6116 case LT_EXPR:
6117 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6118 == MODE_INT)
6119 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6120 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6121 else
6122 comparison = compare (exp, LT, LTU);
6123 break;
6124
6125 case LE_EXPR:
6126 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6127 == MODE_INT)
6128 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6129 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6130 else
6131 comparison = compare (exp, LE, LEU);
6132 break;
6133
6134 case GT_EXPR:
6135 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6136 == MODE_INT)
6137 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6138 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6139 else
6140 comparison = compare (exp, GT, GTU);
6141 break;
6142
6143 case GE_EXPR:
6144 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6145 == MODE_INT)
6146 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6147 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6148 else
6149 comparison = compare (exp, GE, GEU);
6150 break;
6151
6152 default:
6153 normal:
6154 temp = expand_expr (exp, 0, VOIDmode, 0);
6155#if 0
6156 /* This is not needed any more and causes poor code since it causes
6157 comparisons and tests from non-SI objects to have different code
6158 sequences. */
6159 /* Copy to register to avoid generating bad insns by cse
6160 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6161 if (!cse_not_expected && GET_CODE (temp) == MEM)
6162 temp = copy_to_reg (temp);
6163#endif
6164 do_pending_stack_adjust ();
6165 if (GET_CODE (temp) == CONST_INT)
6166 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6167 else if (GET_CODE (temp) == LABEL_REF)
6168 comparison = const_true_rtx;
6169 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6170 && !can_compare_p (GET_MODE (temp)))
6171 /* Note swapping the labels gives us not-equal. */
6172 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6173 else if (GET_MODE (temp) != VOIDmode)
6174 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6175 NE, 1, GET_MODE (temp), 0, 0);
6176 else
6177 abort ();
6178 }
6179
6180 /* Do any postincrements in the expression that was tested. */
6181 emit_queue ();
6182
6183 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6184 straight into a conditional jump instruction as the jump condition.
6185 Otherwise, all the work has been done already. */
6186
6187 if (comparison == const_true_rtx)
6188 {
6189 if (if_true_label)
6190 emit_jump (if_true_label);
6191 }
6192 else if (comparison == const0_rtx)
6193 {
6194 if (if_false_label)
6195 emit_jump (if_false_label);
6196 }
6197 else if (comparison)
6198 do_jump_for_compare (comparison, if_false_label, if_true_label);
6199
6200 free_temp_slots ();
6201
6202 if (drop_through_label)
e7c33f54
RK
6203 {
6204 /* If do_jump produces code that might be jumped around,
6205 do any stack adjusts from that code, before the place
6206 where control merges in. */
6207 do_pending_stack_adjust ();
6208 emit_label (drop_through_label);
6209 }
bbf6f052
RK
6210}
6211\f
6212/* Given a comparison expression EXP for values too wide to be compared
6213 with one insn, test the comparison and jump to the appropriate label.
6214 The code of EXP is ignored; we always test GT if SWAP is 0,
6215 and LT if SWAP is 1. */
6216
6217static void
6218do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6219 tree exp;
6220 int swap;
6221 rtx if_false_label, if_true_label;
6222{
6223 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), 0, VOIDmode, 0);
6224 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), 0, VOIDmode, 0);
6225 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6226 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6227 rtx drop_through_label = 0;
6228 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6229 int i;
6230
6231 if (! if_true_label || ! if_false_label)
6232 drop_through_label = gen_label_rtx ();
6233 if (! if_true_label)
6234 if_true_label = drop_through_label;
6235 if (! if_false_label)
6236 if_false_label = drop_through_label;
6237
6238 /* Compare a word at a time, high order first. */
6239 for (i = 0; i < nwords; i++)
6240 {
6241 rtx comp;
6242 rtx op0_word, op1_word;
6243
6244 if (WORDS_BIG_ENDIAN)
6245 {
6246 op0_word = operand_subword_force (op0, i, mode);
6247 op1_word = operand_subword_force (op1, i, mode);
6248 }
6249 else
6250 {
6251 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6252 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6253 }
6254
6255 /* All but high-order word must be compared as unsigned. */
6256 comp = compare_from_rtx (op0_word, op1_word,
6257 (unsignedp || i > 0) ? GTU : GT,
6258 unsignedp, word_mode, 0, 0);
6259 if (comp == const_true_rtx)
6260 emit_jump (if_true_label);
6261 else if (comp != const0_rtx)
6262 do_jump_for_compare (comp, 0, if_true_label);
6263
6264 /* Consider lower words only if these are equal. */
6265 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6266 0, 0);
6267 if (comp == const_true_rtx)
6268 emit_jump (if_false_label);
6269 else if (comp != const0_rtx)
6270 do_jump_for_compare (comp, 0, if_false_label);
6271 }
6272
6273 if (if_false_label)
6274 emit_jump (if_false_label);
6275 if (drop_through_label)
6276 emit_label (drop_through_label);
6277}
6278
6279/* Given an EQ_EXPR expression EXP for values too wide to be compared
6280 with one insn, test the comparison and jump to the appropriate label. */
6281
6282static void
6283do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6284 tree exp;
6285 rtx if_false_label, if_true_label;
6286{
6287 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6288 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6289 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6290 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6291 int i;
6292 rtx drop_through_label = 0;
6293
6294 if (! if_false_label)
6295 drop_through_label = if_false_label = gen_label_rtx ();
6296
6297 for (i = 0; i < nwords; i++)
6298 {
6299 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6300 operand_subword_force (op1, i, mode),
6301 EQ, 0, word_mode, 0, 0);
6302 if (comp == const_true_rtx)
6303 emit_jump (if_false_label);
6304 else if (comp != const0_rtx)
6305 do_jump_for_compare (comp, if_false_label, 0);
6306 }
6307
6308 if (if_true_label)
6309 emit_jump (if_true_label);
6310 if (drop_through_label)
6311 emit_label (drop_through_label);
6312}
6313\f
6314/* Jump according to whether OP0 is 0.
6315 We assume that OP0 has an integer mode that is too wide
6316 for the available compare insns. */
6317
6318static void
6319do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6320 rtx op0;
6321 rtx if_false_label, if_true_label;
6322{
6323 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6324 int i;
6325 rtx drop_through_label = 0;
6326
6327 if (! if_false_label)
6328 drop_through_label = if_false_label = gen_label_rtx ();
6329
6330 for (i = 0; i < nwords; i++)
6331 {
6332 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6333 GET_MODE (op0)),
6334 const0_rtx, EQ, 0, word_mode, 0, 0);
6335 if (comp == const_true_rtx)
6336 emit_jump (if_false_label);
6337 else if (comp != const0_rtx)
6338 do_jump_for_compare (comp, if_false_label, 0);
6339 }
6340
6341 if (if_true_label)
6342 emit_jump (if_true_label);
6343 if (drop_through_label)
6344 emit_label (drop_through_label);
6345}
6346
6347/* Given a comparison expression in rtl form, output conditional branches to
6348 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6349
6350static void
6351do_jump_for_compare (comparison, if_false_label, if_true_label)
6352 rtx comparison, if_false_label, if_true_label;
6353{
6354 if (if_true_label)
6355 {
6356 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6357 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6358 else
6359 abort ();
6360
6361 if (if_false_label)
6362 emit_jump (if_false_label);
6363 }
6364 else if (if_false_label)
6365 {
6366 rtx insn;
6367 rtx prev = PREV_INSN (get_last_insn ());
6368 rtx branch = 0;
6369
6370 /* Output the branch with the opposite condition. Then try to invert
6371 what is generated. If more than one insn is a branch, or if the
6372 branch is not the last insn written, abort. If we can't invert
6373 the branch, emit make a true label, redirect this jump to that,
6374 emit a jump to the false label and define the true label. */
6375
6376 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6377 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6378 else
6379 abort ();
6380
6381 /* Here we get the insn before what was just emitted.
6382 On some machines, emitting the branch can discard
6383 the previous compare insn and emit a replacement. */
6384 if (prev == 0)
6385 /* If there's only one preceding insn... */
6386 insn = get_insns ();
6387 else
6388 insn = NEXT_INSN (prev);
6389
6390 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6391 if (GET_CODE (insn) == JUMP_INSN)
6392 {
6393 if (branch)
6394 abort ();
6395 branch = insn;
6396 }
6397
6398 if (branch != get_last_insn ())
6399 abort ();
6400
6401 if (! invert_jump (branch, if_false_label))
6402 {
6403 if_true_label = gen_label_rtx ();
6404 redirect_jump (branch, if_true_label);
6405 emit_jump (if_false_label);
6406 emit_label (if_true_label);
6407 }
6408 }
6409}
6410\f
6411/* Generate code for a comparison expression EXP
6412 (including code to compute the values to be compared)
6413 and set (CC0) according to the result.
6414 SIGNED_CODE should be the rtx operation for this comparison for
6415 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6416
6417 We force a stack adjustment unless there are currently
6418 things pushed on the stack that aren't yet used. */
6419
6420static rtx
6421compare (exp, signed_code, unsigned_code)
6422 register tree exp;
6423 enum rtx_code signed_code, unsigned_code;
6424{
6425 register rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6426 register rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6427 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6428 register enum machine_mode mode = TYPE_MODE (type);
6429 int unsignedp = TREE_UNSIGNED (type);
6430 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6431
6432 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6433 ((mode == BLKmode)
6434 ? expr_size (TREE_OPERAND (exp, 0)) : 0),
6435 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6436}
6437
6438/* Like compare but expects the values to compare as two rtx's.
6439 The decision as to signed or unsigned comparison must be made by the caller.
6440
6441 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6442 compared.
6443
6444 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6445 size of MODE should be used. */
6446
6447rtx
6448compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6449 register rtx op0, op1;
6450 enum rtx_code code;
6451 int unsignedp;
6452 enum machine_mode mode;
6453 rtx size;
6454 int align;
6455{
6456 /* If one operand is constant, make it the second one. */
6457
6458 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6459 {
6460 rtx tem = op0;
6461 op0 = op1;
6462 op1 = tem;
6463 code = swap_condition (code);
6464 }
6465
6466 if (flag_force_mem)
6467 {
6468 op0 = force_not_mem (op0);
6469 op1 = force_not_mem (op1);
6470 }
6471
6472 do_pending_stack_adjust ();
6473
6474 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6475 return simplify_relational_operation (code, mode, op0, op1);
6476
6477 /* If this is a signed equality comparison, we can do it as an
6478 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
6479 extension and comparisons with zero are done as unsigned. This is
6480 the case even on machines that can do fast sign extension, since
6481 zero-extension is easier to combinen with other operations than
6482 sign-extension is. If we are comparing against a constant, we must
6483 convert it to what it would look like unsigned. */
bbf6f052
RK
6484 if ((code == EQ || code == NE) && ! unsignedp
6485 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_INT)
6486 {
6487 if (GET_CODE (op1) == CONST_INT
6488 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6489 op1 = gen_rtx (CONST_INT, VOIDmode,
6490 INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6491 unsignedp = 1;
6492 }
6493
6494 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6495
6496 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6497}
6498\f
6499/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
6500 and return an rtx for the result. EXP is either a comparison
6501 or a TRUTH_NOT_EXPR whose operand is a comparison.
6502
bbf6f052
RK
6503 If TARGET is nonzero, store the result there if convenient.
6504
6505 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6506 cheap.
6507
6508 Return zero if there is no suitable set-flag instruction
6509 available on this machine.
6510
6511 Once expand_expr has been called on the arguments of the comparison,
6512 we are committed to doing the store flag, since it is not safe to
6513 re-evaluate the expression. We emit the store-flag insn by calling
6514 emit_store_flag, but only expand the arguments if we have a reason
6515 to believe that emit_store_flag will be successful. If we think that
6516 it will, but it isn't, we have to simulate the store-flag with a
6517 set/jump/set sequence. */
6518
6519static rtx
6520do_store_flag (exp, target, mode, only_cheap)
6521 tree exp;
6522 rtx target;
6523 enum machine_mode mode;
6524 int only_cheap;
6525{
6526 enum rtx_code code;
e7c33f54 6527 tree arg0, arg1, type;
bbf6f052 6528 tree tem;
e7c33f54
RK
6529 enum machine_mode operand_mode;
6530 int invert = 0;
6531 int unsignedp;
bbf6f052
RK
6532 rtx op0, op1;
6533 enum insn_code icode;
6534 rtx subtarget = target;
6535 rtx result, label, pattern, jump_pat;
6536
e7c33f54
RK
6537 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6538 result at the end. We can't simply invert the test since it would
6539 have already been inverted if it were valid. This case occurs for
6540 some floating-point comparisons. */
6541
6542 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6543 invert = 1, exp = TREE_OPERAND (exp, 0);
6544
6545 arg0 = TREE_OPERAND (exp, 0);
6546 arg1 = TREE_OPERAND (exp, 1);
6547 type = TREE_TYPE (arg0);
6548 operand_mode = TYPE_MODE (type);
6549 unsignedp = TREE_UNSIGNED (type);
6550
bbf6f052
RK
6551 /* We won't bother with BLKmode store-flag operations because it would mean
6552 passing a lot of information to emit_store_flag. */
6553 if (operand_mode == BLKmode)
6554 return 0;
6555
6556 while (TREE_CODE (arg0) == NON_LVALUE_EXPR)
6557 arg0 = TREE_OPERAND (arg0, 0);
6558
6559 while (TREE_CODE (arg1) == NON_LVALUE_EXPR)
6560 arg1 = TREE_OPERAND (arg1, 0);
6561
6562 /* Get the rtx comparison code to use. We know that EXP is a comparison
6563 operation of some type. Some comparisons against 1 and -1 can be
6564 converted to comparisons with zero. Do so here so that the tests
6565 below will be aware that we have a comparison with zero. These
6566 tests will not catch constants in the first operand, but constants
6567 are rarely passed as the first operand. */
6568
6569 switch (TREE_CODE (exp))
6570 {
6571 case EQ_EXPR:
6572 code = EQ;
6573 break;
6574 case NE_EXPR:
6575 code = NE;
6576 break;
6577 case LT_EXPR:
6578 if (integer_onep (arg1))
6579 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6580 else
6581 code = unsignedp ? LTU : LT;
6582 break;
6583 case LE_EXPR:
6584 if (integer_all_onesp (arg1))
6585 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6586 else
6587 code = unsignedp ? LEU : LE;
6588 break;
6589 case GT_EXPR:
6590 if (integer_all_onesp (arg1))
6591 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6592 else
6593 code = unsignedp ? GTU : GT;
6594 break;
6595 case GE_EXPR:
6596 if (integer_onep (arg1))
6597 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6598 else
6599 code = unsignedp ? GEU : GE;
6600 break;
6601 default:
6602 abort ();
6603 }
6604
6605 /* Put a constant second. */
6606 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6607 {
6608 tem = arg0; arg0 = arg1; arg1 = tem;
6609 code = swap_condition (code);
6610 }
6611
6612 /* If this is an equality or inequality test of a single bit, we can
6613 do this by shifting the bit being tested to the low-order bit and
6614 masking the result with the constant 1. If the condition was EQ,
6615 we xor it with 1. This does not require an scc insn and is faster
6616 than an scc insn even if we have it. */
6617
6618 if ((code == NE || code == EQ)
6619 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6620 && integer_pow2p (TREE_OPERAND (arg0, 1))
6621 && TYPE_PRECISION (type) <= HOST_BITS_PER_INT)
6622 {
6623 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6624 0, VOIDmode, 0)));
6625
6626 if (subtarget == 0 || GET_CODE (subtarget) != REG
6627 || GET_MODE (subtarget) != operand_mode
6628 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6629 subtarget = 0;
6630
6631 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6632
6633 if (bitnum != 0)
6634 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6635 size_int (bitnum), target, 1);
6636
6637 if (GET_MODE (op0) != mode)
6638 op0 = convert_to_mode (mode, op0, 1);
6639
6640 if (bitnum != TYPE_PRECISION (type) - 1)
6641 op0 = expand_and (op0, const1_rtx, target);
6642
e7c33f54 6643 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
6644 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6645 OPTAB_LIB_WIDEN);
6646
6647 return op0;
6648 }
6649
6650 /* Now see if we are likely to be able to do this. Return if not. */
6651 if (! can_compare_p (operand_mode))
6652 return 0;
6653 icode = setcc_gen_code[(int) code];
6654 if (icode == CODE_FOR_nothing
6655 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6656 {
6657 /* We can only do this if it is one of the special cases that
6658 can be handled without an scc insn. */
6659 if ((code == LT && integer_zerop (arg1))
6660 || (! only_cheap && code == GE && integer_zerop (arg1)))
6661 ;
6662 else if (BRANCH_COST >= 0
6663 && ! only_cheap && (code == NE || code == EQ)
6664 && TREE_CODE (type) != REAL_TYPE
6665 && ((abs_optab->handlers[(int) operand_mode].insn_code
6666 != CODE_FOR_nothing)
6667 || (ffs_optab->handlers[(int) operand_mode].insn_code
6668 != CODE_FOR_nothing)))
6669 ;
6670 else
6671 return 0;
6672 }
6673
6674 preexpand_calls (exp);
6675 if (subtarget == 0 || GET_CODE (subtarget) != REG
6676 || GET_MODE (subtarget) != operand_mode
6677 || ! safe_from_p (subtarget, arg1))
6678 subtarget = 0;
6679
6680 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6681 op1 = expand_expr (arg1, 0, VOIDmode, 0);
6682
6683 if (target == 0)
6684 target = gen_reg_rtx (mode);
6685
6686 result = emit_store_flag (target, code, op0, op1, operand_mode,
6687 unsignedp, 1);
6688
6689 if (result)
e7c33f54
RK
6690 {
6691 if (invert)
6692 result = expand_binop (mode, xor_optab, result, const1_rtx,
6693 result, 0, OPTAB_LIB_WIDEN);
6694 return result;
6695 }
bbf6f052
RK
6696
6697 /* If this failed, we have to do this with set/compare/jump/set code. */
6698 if (target == 0 || GET_CODE (target) != REG
6699 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6700 target = gen_reg_rtx (GET_MODE (target));
6701
e7c33f54 6702 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
bbf6f052
RK
6703 result = compare_from_rtx (op0, op1, code, unsignedp, operand_mode, 0, 0);
6704 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
6705 return (((result == const0_rtx && ! invert)
6706 || (result != const0_rtx && invert))
6707 ? const0_rtx : const1_rtx);
bbf6f052
RK
6708
6709 label = gen_label_rtx ();
6710 if (bcc_gen_fctn[(int) code] == 0)
6711 abort ();
6712
6713 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 6714 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
6715 emit_label (label);
6716
6717 return target;
6718}
6719\f
6720/* Generate a tablejump instruction (used for switch statements). */
6721
6722#ifdef HAVE_tablejump
6723
6724/* INDEX is the value being switched on, with the lowest value
6725 in the table already subtracted.
88d3b7f0 6726 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
6727 RANGE is the length of the jump table.
6728 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6729
6730 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6731 index value is out of range. */
6732
6733void
e87b4f3f 6734do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 6735 rtx index, range, table_label, default_label;
e87b4f3f 6736 enum machine_mode mode;
bbf6f052
RK
6737{
6738 register rtx temp, vector;
6739
88d3b7f0
RS
6740 /* Do an unsigned comparison (in the proper mode) between the index
6741 expression and the value which represents the length of the range.
6742 Since we just finished subtracting the lower bound of the range
6743 from the index expression, this comparison allows us to simultaneously
6744 check that the original index expression value is both greater than
6745 or equal to the minimum value of the range and less than or equal to
6746 the maximum value of the range. */
e87b4f3f
RS
6747
6748 emit_cmp_insn (range, index, LTU, 0, mode, 0, 0);
bbf6f052 6749 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
6750
6751 /* If index is in range, it must fit in Pmode.
6752 Convert to Pmode so we can index with it. */
6753 if (mode != Pmode)
6754 index = convert_to_mode (Pmode, index, 1);
6755
bbf6f052
RK
6756 /* If flag_force_addr were to affect this address
6757 it could interfere with the tricky assumptions made
6758 about addresses that contain label-refs,
6759 which may be valid only very near the tablejump itself. */
6760 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6761 GET_MODE_SIZE, because this indicates how large insns are. The other
6762 uses should all be Pmode, because they are addresses. This code
6763 could fail if addresses and insns are not the same size. */
6764 index = memory_address_noforce
6765 (CASE_VECTOR_MODE,
6766 gen_rtx (PLUS, Pmode,
6767 gen_rtx (MULT, Pmode, index,
6768 gen_rtx (CONST_INT, VOIDmode,
6769 GET_MODE_SIZE (CASE_VECTOR_MODE))),
6770 gen_rtx (LABEL_REF, Pmode, table_label)));
6771 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6772 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6773 RTX_UNCHANGING_P (vector) = 1;
6774 convert_move (temp, vector, 0);
6775
6776 emit_jump_insn (gen_tablejump (temp, table_label));
6777
6778#ifndef CASE_VECTOR_PC_RELATIVE
6779 /* If we are generating PIC code or if the table is PC-relative, the
6780 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6781 if (! flag_pic)
6782 emit_barrier ();
6783#endif
6784}
6785
6786#endif /* HAVE_tablejump */
This page took 0.776138 seconds and 5 git commands to generate.