]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(output_move_double): Divert first word via the stack
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
4fa52007
RK
105/* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109static char direct_load[NUM_MACHINE_MODES];
110static char direct_store[NUM_MACHINE_MODES];
111
bbf6f052
RK
112/* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115#ifndef MOVE_RATIO
266007a7 116#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
117#define MOVE_RATIO 2
118#else
119/* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121#define MOVE_RATIO 15
122#endif
123#endif
e87b4f3f 124
266007a7
RK
125/* This array records the insn_code of insns to perform block moves. */
126static enum insn_code movstr_optab[NUM_MACHINE_MODES];
127
e87b4f3f
RS
128/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
129
130#ifndef SLOW_UNALIGNED_ACCESS
131#define SLOW_UNALIGNED_ACCESS 0
132#endif
bbf6f052 133\f
4fa52007 134/* This is run once per compilation to set up which modes can be used
266007a7 135 directly in memory and to initialize the block move optab. */
4fa52007
RK
136
137void
138init_expr_once ()
139{
140 rtx insn, pat;
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
143
144 start_sequence ();
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
147
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
150 {
151 int regno;
152 rtx reg;
153 int num_clobbers;
154
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
157
e6fe56a4
RK
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
160
7308a047
RS
161 if (mode != VOIDmode && mode != BLKmode)
162 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
163 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
164 regno++)
165 {
166 if (! HARD_REGNO_MODE_OK (regno, mode))
167 continue;
e6fe56a4 168
7308a047 169 reg = gen_rtx (REG, mode, regno);
e6fe56a4 170
7308a047
RS
171 SET_SRC (pat) = mem;
172 SET_DEST (pat) = reg;
173 if (recog (pat, insn, &num_clobbers) >= 0)
174 direct_load[(int) mode] = 1;
e6fe56a4 175
7308a047
RS
176 SET_SRC (pat) = reg;
177 SET_DEST (pat) = mem;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_store[(int) mode] = 1;
180 }
266007a7
RK
181
182 movstr_optab[(int) mode] = CODE_FOR_nothing;
4fa52007
RK
183 }
184
185 end_sequence ();
266007a7
RK
186
187#ifdef HAVE_movstrqi
188 if (HAVE_movstrqi)
189 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
190#endif
191#ifdef HAVE_movstrhi
192 if (HAVE_movstrhi)
193 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
194#endif
195#ifdef HAVE_movstrsi
196 if (HAVE_movstrsi)
197 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
198#endif
199#ifdef HAVE_movstrdi
200 if (HAVE_movstrdi)
201 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
202#endif
203#ifdef HAVE_movstrti
204 if (HAVE_movstrti)
205 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
206#endif
4fa52007
RK
207}
208
bbf6f052
RK
209/* This is run at the start of compiling a function. */
210
211void
212init_expr ()
213{
214 init_queue ();
215
216 pending_stack_adjust = 0;
217 inhibit_defer_pop = 0;
218 cleanups_this_call = 0;
219 saveregs_value = 0;
e87b4f3f 220 forced_labels = 0;
bbf6f052
RK
221}
222
223/* Save all variables describing the current status into the structure *P.
224 This is used before starting a nested function. */
225
226void
227save_expr_status (p)
228 struct function *p;
229{
230 /* Instead of saving the postincrement queue, empty it. */
231 emit_queue ();
232
233 p->pending_stack_adjust = pending_stack_adjust;
234 p->inhibit_defer_pop = inhibit_defer_pop;
235 p->cleanups_this_call = cleanups_this_call;
236 p->saveregs_value = saveregs_value;
e87b4f3f 237 p->forced_labels = forced_labels;
bbf6f052
RK
238
239 pending_stack_adjust = 0;
240 inhibit_defer_pop = 0;
241 cleanups_this_call = 0;
242 saveregs_value = 0;
e87b4f3f 243 forced_labels = 0;
bbf6f052
RK
244}
245
246/* Restore all variables describing the current status from the structure *P.
247 This is used after a nested function. */
248
249void
250restore_expr_status (p)
251 struct function *p;
252{
253 pending_stack_adjust = p->pending_stack_adjust;
254 inhibit_defer_pop = p->inhibit_defer_pop;
255 cleanups_this_call = p->cleanups_this_call;
256 saveregs_value = p->saveregs_value;
e87b4f3f 257 forced_labels = p->forced_labels;
bbf6f052
RK
258}
259\f
260/* Manage the queue of increment instructions to be output
261 for POSTINCREMENT_EXPR expressions, etc. */
262
263static rtx pending_chain;
264
265/* Queue up to increment (or change) VAR later. BODY says how:
266 BODY should be the same thing you would pass to emit_insn
267 to increment right away. It will go to emit_insn later on.
268
269 The value is a QUEUED expression to be used in place of VAR
270 where you want to guarantee the pre-incrementation value of VAR. */
271
272static rtx
273enqueue_insn (var, body)
274 rtx var, body;
275{
276 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 277 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
278 return pending_chain;
279}
280
281/* Use protect_from_queue to convert a QUEUED expression
282 into something that you can put immediately into an instruction.
283 If the queued incrementation has not happened yet,
284 protect_from_queue returns the variable itself.
285 If the incrementation has happened, protect_from_queue returns a temp
286 that contains a copy of the old value of the variable.
287
288 Any time an rtx which might possibly be a QUEUED is to be put
289 into an instruction, it must be passed through protect_from_queue first.
290 QUEUED expressions are not meaningful in instructions.
291
292 Do not pass a value through protect_from_queue and then hold
293 on to it for a while before putting it in an instruction!
294 If the queue is flushed in between, incorrect code will result. */
295
296rtx
297protect_from_queue (x, modify)
298 register rtx x;
299 int modify;
300{
301 register RTX_CODE code = GET_CODE (x);
302
303#if 0 /* A QUEUED can hang around after the queue is forced out. */
304 /* Shortcut for most common case. */
305 if (pending_chain == 0)
306 return x;
307#endif
308
309 if (code != QUEUED)
310 {
311 /* A special hack for read access to (MEM (QUEUED ...))
312 to facilitate use of autoincrement.
313 Make a copy of the contents of the memory location
314 rather than a copy of the address, but not
315 if the value is of mode BLKmode. */
316 if (code == MEM && GET_MODE (x) != BLKmode
317 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
318 {
319 register rtx y = XEXP (x, 0);
320 XEXP (x, 0) = QUEUED_VAR (y);
321 if (QUEUED_INSN (y))
322 {
323 register rtx temp = gen_reg_rtx (GET_MODE (x));
324 emit_insn_before (gen_move_insn (temp, x),
325 QUEUED_INSN (y));
326 return temp;
327 }
328 return x;
329 }
330 /* Otherwise, recursively protect the subexpressions of all
331 the kinds of rtx's that can contain a QUEUED. */
332 if (code == MEM)
333 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
334 else if (code == PLUS || code == MULT)
335 {
336 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
337 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
338 }
339 return x;
340 }
341 /* If the increment has not happened, use the variable itself. */
342 if (QUEUED_INSN (x) == 0)
343 return QUEUED_VAR (x);
344 /* If the increment has happened and a pre-increment copy exists,
345 use that copy. */
346 if (QUEUED_COPY (x) != 0)
347 return QUEUED_COPY (x);
348 /* The increment has happened but we haven't set up a pre-increment copy.
349 Set one up now, and use it. */
350 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
351 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
352 QUEUED_INSN (x));
353 return QUEUED_COPY (x);
354}
355
356/* Return nonzero if X contains a QUEUED expression:
357 if it contains anything that will be altered by a queued increment.
358 We handle only combinations of MEM, PLUS, MINUS and MULT operators
359 since memory addresses generally contain only those. */
360
361static int
362queued_subexp_p (x)
363 rtx x;
364{
365 register enum rtx_code code = GET_CODE (x);
366 switch (code)
367 {
368 case QUEUED:
369 return 1;
370 case MEM:
371 return queued_subexp_p (XEXP (x, 0));
372 case MULT:
373 case PLUS:
374 case MINUS:
375 return queued_subexp_p (XEXP (x, 0))
376 || queued_subexp_p (XEXP (x, 1));
377 }
378 return 0;
379}
380
381/* Perform all the pending incrementations. */
382
383void
384emit_queue ()
385{
386 register rtx p;
387 while (p = pending_chain)
388 {
389 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
390 pending_chain = QUEUED_NEXT (p);
391 }
392}
393
394static void
395init_queue ()
396{
397 if (pending_chain)
398 abort ();
399}
400\f
401/* Copy data from FROM to TO, where the machine modes are not the same.
402 Both modes may be integer, or both may be floating.
403 UNSIGNEDP should be nonzero if FROM is an unsigned type.
404 This causes zero-extension instead of sign-extension. */
405
406void
407convert_move (to, from, unsignedp)
408 register rtx to, from;
409 int unsignedp;
410{
411 enum machine_mode to_mode = GET_MODE (to);
412 enum machine_mode from_mode = GET_MODE (from);
413 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
414 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
415 enum insn_code code;
416 rtx libcall;
417
418 /* rtx code for making an equivalent value. */
419 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
420
421 to = protect_from_queue (to, 1);
422 from = protect_from_queue (from, 0);
423
424 if (to_real != from_real)
425 abort ();
426
1499e0a8
RK
427 /* If FROM is a SUBREG that indicates that we have already done at least
428 the required extension, strip it. We don't handle such SUBREGs as
429 TO here. */
430
431 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
432 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
433 >= GET_MODE_SIZE (to_mode))
434 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
435 from = gen_lowpart (to_mode, from), from_mode = to_mode;
436
437 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
438 abort ();
439
bbf6f052
RK
440 if (to_mode == from_mode
441 || (from_mode == VOIDmode && CONSTANT_P (from)))
442 {
443 emit_move_insn (to, from);
444 return;
445 }
446
447 if (to_real)
448 {
449#ifdef HAVE_extendsfdf2
450 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
451 {
452 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
453 return;
454 }
455#endif
b092b471
JW
456#ifdef HAVE_extendsfxf2
457 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
458 {
459 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
460 return;
461 }
462#endif
bbf6f052
RK
463#ifdef HAVE_extendsftf2
464 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
465 {
466 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
467 return;
468 }
469#endif
b092b471
JW
470#ifdef HAVE_extenddfxf2
471 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
472 {
473 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
474 return;
475 }
476#endif
bbf6f052
RK
477#ifdef HAVE_extenddftf2
478 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
479 {
480 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
481 return;
482 }
483#endif
484#ifdef HAVE_truncdfsf2
485 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
486 {
487 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
488 return;
489 }
490#endif
b092b471
JW
491#ifdef HAVE_truncxfsf2
492 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
493 {
494 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
495 return;
496 }
497#endif
bbf6f052
RK
498#ifdef HAVE_trunctfsf2
499 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
500 {
501 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
502 return;
503 }
504#endif
b092b471
JW
505#ifdef HAVE_truncxfdf2
506 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
507 {
508 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
509 return;
510 }
511#endif
bbf6f052
RK
512#ifdef HAVE_trunctfdf2
513 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
514 {
515 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
516 return;
517 }
518#endif
519
b092b471
JW
520 libcall = (rtx) 0;
521 switch (from_mode)
522 {
523 case SFmode:
524 switch (to_mode)
525 {
526 case DFmode:
527 libcall = extendsfdf2_libfunc;
528 break;
529
530 case XFmode:
531 libcall = extendsfxf2_libfunc;
532 break;
533
534 case TFmode:
535 libcall = extendsftf2_libfunc;
536 break;
537 }
538 break;
539
540 case DFmode:
541 switch (to_mode)
542 {
543 case SFmode:
544 libcall = truncdfsf2_libfunc;
545 break;
546
547 case XFmode:
548 libcall = extenddfxf2_libfunc;
549 break;
550
551 case TFmode:
552 libcall = extenddftf2_libfunc;
553 break;
554 }
555 break;
556
557 case XFmode:
558 switch (to_mode)
559 {
560 case SFmode:
561 libcall = truncxfsf2_libfunc;
562 break;
563
564 case DFmode:
565 libcall = truncxfdf2_libfunc;
566 break;
567 }
568 break;
569
570 case TFmode:
571 switch (to_mode)
572 {
573 case SFmode:
574 libcall = trunctfsf2_libfunc;
575 break;
576
577 case DFmode:
578 libcall = trunctfdf2_libfunc;
579 break;
580 }
581 break;
582 }
583
584 if (libcall == (rtx) 0)
585 /* This conversion is not implemented yet. */
bbf6f052
RK
586 abort ();
587
e87b4f3f 588 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
589 emit_move_insn (to, hard_libcall_value (to_mode));
590 return;
591 }
592
593 /* Now both modes are integers. */
594
595 /* Handle expanding beyond a word. */
596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
597 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
598 {
599 rtx insns;
600 rtx lowpart;
601 rtx fill_value;
602 rtx lowfrom;
603 int i;
604 enum machine_mode lowpart_mode;
605 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
606
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
609 != CODE_FOR_nothing)
610 {
cd1b4b44
RK
611 /* If FROM is a SUBREG, put it into a register. Do this
612 so that we always generate the same set of insns for
613 better cse'ing; if an intermediate assignment occurred,
614 we won't be doing the operation directly on the SUBREG. */
615 if (optimize > 0 && GET_CODE (from) == SUBREG)
616 from = force_reg (from_mode, from);
bbf6f052
RK
617 emit_unop_insn (code, to, from, equiv_code);
618 return;
619 }
620 /* Next, try converting via full word. */
621 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
622 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
623 != CODE_FOR_nothing))
624 {
625 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
626 emit_unop_insn (code, to,
627 gen_lowpart (word_mode, to), equiv_code);
628 return;
629 }
630
631 /* No special multiword conversion insn; do it by hand. */
632 start_sequence ();
633
634 /* Get a copy of FROM widened to a word, if necessary. */
635 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
636 lowpart_mode = word_mode;
637 else
638 lowpart_mode = from_mode;
639
640 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
641
642 lowpart = gen_lowpart (lowpart_mode, to);
643 emit_move_insn (lowpart, lowfrom);
644
645 /* Compute the value to put in each remaining word. */
646 if (unsignedp)
647 fill_value = const0_rtx;
648 else
649 {
650#ifdef HAVE_slt
651 if (HAVE_slt
652 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
653 && STORE_FLAG_VALUE == -1)
654 {
906c4e36
RK
655 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
656 lowpart_mode, 0, 0);
bbf6f052
RK
657 fill_value = gen_reg_rtx (word_mode);
658 emit_insn (gen_slt (fill_value));
659 }
660 else
661#endif
662 {
663 fill_value
664 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
665 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 666 NULL_RTX, 0);
bbf6f052
RK
667 fill_value = convert_to_mode (word_mode, fill_value, 1);
668 }
669 }
670
671 /* Fill the remaining words. */
672 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
673 {
674 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
675 rtx subword = operand_subword (to, index, 1, to_mode);
676
677 if (subword == 0)
678 abort ();
679
680 if (fill_value != subword)
681 emit_move_insn (subword, fill_value);
682 }
683
684 insns = get_insns ();
685 end_sequence ();
686
906c4e36 687 emit_no_conflict_block (insns, to, from, NULL_RTX,
bbf6f052
RK
688 gen_rtx (equiv_code, to_mode, from));
689 return;
690 }
691
692 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
693 {
694 convert_move (to, gen_lowpart (word_mode, from), 0);
695 return;
696 }
697
698 /* Handle pointer conversion */ /* SPEE 900220 */
699 if (to_mode == PSImode)
700 {
701 if (from_mode != SImode)
702 from = convert_to_mode (SImode, from, unsignedp);
703
704#ifdef HAVE_truncsipsi
705 if (HAVE_truncsipsi)
706 {
707 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
708 return;
709 }
710#endif /* HAVE_truncsipsi */
711 abort ();
712 }
713
714 if (from_mode == PSImode)
715 {
716 if (to_mode != SImode)
717 {
718 from = convert_to_mode (SImode, from, unsignedp);
719 from_mode = SImode;
720 }
721 else
722 {
723#ifdef HAVE_extendpsisi
724 if (HAVE_extendpsisi)
725 {
726 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
727 return;
728 }
729#endif /* HAVE_extendpsisi */
730 abort ();
731 }
732 }
733
734 /* Now follow all the conversions between integers
735 no more than a word long. */
736
737 /* For truncation, usually we can just refer to FROM in a narrower mode. */
738 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
739 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
740 GET_MODE_BITSIZE (from_mode))
741 && ((GET_CODE (from) == MEM
742 && ! MEM_VOLATILE_P (from)
4fa52007 743 && direct_load[(int) to_mode]
bbf6f052
RK
744 && ! mode_dependent_address_p (XEXP (from, 0)))
745 || GET_CODE (from) == REG
746 || GET_CODE (from) == SUBREG))
747 {
748 emit_move_insn (to, gen_lowpart (to_mode, from));
749 return;
750 }
751
752 /* For truncation, usually we can just refer to FROM in a narrower mode. */
753 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
754 {
755 /* Convert directly if that works. */
756 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
757 != CODE_FOR_nothing)
758 {
3dc4195c
RK
759 /* If FROM is a SUBREG, put it into a register. Do this
760 so that we always generate the same set of insns for
761 better cse'ing; if an intermediate assignment occurred,
762 we won't be doing the operation directly on the SUBREG. */
763 if (optimize > 0 && GET_CODE (from) == SUBREG)
764 from = force_reg (from_mode, from);
bbf6f052
RK
765 emit_unop_insn (code, to, from, equiv_code);
766 return;
767 }
768 else
769 {
770 enum machine_mode intermediate;
771
772 /* Search for a mode to convert via. */
773 for (intermediate = from_mode; intermediate != VOIDmode;
774 intermediate = GET_MODE_WIDER_MODE (intermediate))
775 if ((can_extend_p (to_mode, intermediate, unsignedp)
776 != CODE_FOR_nothing)
777 && (can_extend_p (intermediate, from_mode, unsignedp)
778 != CODE_FOR_nothing))
779 {
780 convert_move (to, convert_to_mode (intermediate, from,
781 unsignedp), unsignedp);
782 return;
783 }
784
785 /* No suitable intermediate mode. */
786 abort ();
787 }
788 }
789
790 /* Support special truncate insns for certain modes. */
791
792 if (from_mode == DImode && to_mode == SImode)
793 {
794#ifdef HAVE_truncdisi2
795 if (HAVE_truncdisi2)
796 {
797 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
798 return;
799 }
800#endif
801 convert_move (to, force_reg (from_mode, from), unsignedp);
802 return;
803 }
804
805 if (from_mode == DImode && to_mode == HImode)
806 {
807#ifdef HAVE_truncdihi2
808 if (HAVE_truncdihi2)
809 {
810 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
811 return;
812 }
813#endif
814 convert_move (to, force_reg (from_mode, from), unsignedp);
815 return;
816 }
817
818 if (from_mode == DImode && to_mode == QImode)
819 {
820#ifdef HAVE_truncdiqi2
821 if (HAVE_truncdiqi2)
822 {
823 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
824 return;
825 }
826#endif
827 convert_move (to, force_reg (from_mode, from), unsignedp);
828 return;
829 }
830
831 if (from_mode == SImode && to_mode == HImode)
832 {
833#ifdef HAVE_truncsihi2
834 if (HAVE_truncsihi2)
835 {
836 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
837 return;
838 }
839#endif
840 convert_move (to, force_reg (from_mode, from), unsignedp);
841 return;
842 }
843
844 if (from_mode == SImode && to_mode == QImode)
845 {
846#ifdef HAVE_truncsiqi2
847 if (HAVE_truncsiqi2)
848 {
849 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
850 return;
851 }
852#endif
853 convert_move (to, force_reg (from_mode, from), unsignedp);
854 return;
855 }
856
857 if (from_mode == HImode && to_mode == QImode)
858 {
859#ifdef HAVE_trunchiqi2
860 if (HAVE_trunchiqi2)
861 {
862 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
863 return;
864 }
865#endif
866 convert_move (to, force_reg (from_mode, from), unsignedp);
867 return;
868 }
869
870 /* Handle truncation of volatile memrefs, and so on;
871 the things that couldn't be truncated directly,
872 and for which there was no special instruction. */
873 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
874 {
875 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
876 emit_move_insn (to, temp);
877 return;
878 }
879
880 /* Mode combination is not recognized. */
881 abort ();
882}
883
884/* Return an rtx for a value that would result
885 from converting X to mode MODE.
886 Both X and MODE may be floating, or both integer.
887 UNSIGNEDP is nonzero if X is an unsigned value.
888 This can be done by referring to a part of X in place
5d901c31
RS
889 or by copying to a new temporary with conversion.
890
891 This function *must not* call protect_from_queue
892 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
893
894rtx
895convert_to_mode (mode, x, unsignedp)
896 enum machine_mode mode;
897 rtx x;
898 int unsignedp;
899{
900 register rtx temp;
1499e0a8
RK
901
902 /* If FROM is a SUBREG that indicates that we have already done at least
903 the required extension, strip it. */
904
905 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
906 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
907 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
908 x = gen_lowpart (mode, x);
bbf6f052 909
bbf6f052
RK
910 if (mode == GET_MODE (x))
911 return x;
912
913 /* There is one case that we must handle specially: If we are converting
906c4e36 914 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
915 we are to interpret the constant as unsigned, gen_lowpart will do
916 the wrong if the constant appears negative. What we want to do is
917 make the high-order word of the constant zero, not all ones. */
918
919 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 920 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 921 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 922 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
923
924 /* We can do this with a gen_lowpart if both desired and current modes
925 are integer, and this is either a constant integer, a register, or a
926 non-volatile MEM. Except for the constant case, we must be narrowing
927 the operand. */
928
929 if (GET_CODE (x) == CONST_INT
930 || (GET_MODE_CLASS (mode) == MODE_INT
931 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
932 && (GET_CODE (x) == CONST_DOUBLE
933 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
934 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 935 && direct_load[(int) mode]
bbf6f052
RK
936 || GET_CODE (x) == REG)))))
937 return gen_lowpart (mode, x);
938
939 temp = gen_reg_rtx (mode);
940 convert_move (temp, x, unsignedp);
941 return temp;
942}
943\f
944/* Generate several move instructions to copy LEN bytes
945 from block FROM to block TO. (These are MEM rtx's with BLKmode).
946 The caller must pass FROM and TO
947 through protect_from_queue before calling.
948 ALIGN (in bytes) is maximum alignment we can assume. */
949
950struct move_by_pieces
951{
952 rtx to;
953 rtx to_addr;
954 int autinc_to;
955 int explicit_inc_to;
956 rtx from;
957 rtx from_addr;
958 int autinc_from;
959 int explicit_inc_from;
960 int len;
961 int offset;
962 int reverse;
963};
964
965static void move_by_pieces_1 ();
966static int move_by_pieces_ninsns ();
967
968static void
969move_by_pieces (to, from, len, align)
970 rtx to, from;
971 int len, align;
972{
973 struct move_by_pieces data;
974 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 975 int max_size = MOVE_MAX + 1;
bbf6f052
RK
976
977 data.offset = 0;
978 data.to_addr = to_addr;
979 data.from_addr = from_addr;
980 data.to = to;
981 data.from = from;
982 data.autinc_to
983 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
984 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
985 data.autinc_from
986 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
987 || GET_CODE (from_addr) == POST_INC
988 || GET_CODE (from_addr) == POST_DEC);
989
990 data.explicit_inc_from = 0;
991 data.explicit_inc_to = 0;
992 data.reverse
993 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
994 if (data.reverse) data.offset = len;
995 data.len = len;
996
997 /* If copying requires more than two move insns,
998 copy addresses to registers (to make displacements shorter)
999 and use post-increment if available. */
1000 if (!(data.autinc_from && data.autinc_to)
1001 && move_by_pieces_ninsns (len, align) > 2)
1002 {
1003#ifdef HAVE_PRE_DECREMENT
1004 if (data.reverse && ! data.autinc_from)
1005 {
1006 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1007 data.autinc_from = 1;
1008 data.explicit_inc_from = -1;
1009 }
1010#endif
1011#ifdef HAVE_POST_INCREMENT
1012 if (! data.autinc_from)
1013 {
1014 data.from_addr = copy_addr_to_reg (from_addr);
1015 data.autinc_from = 1;
1016 data.explicit_inc_from = 1;
1017 }
1018#endif
1019 if (!data.autinc_from && CONSTANT_P (from_addr))
1020 data.from_addr = copy_addr_to_reg (from_addr);
1021#ifdef HAVE_PRE_DECREMENT
1022 if (data.reverse && ! data.autinc_to)
1023 {
1024 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1025 data.autinc_to = 1;
1026 data.explicit_inc_to = -1;
1027 }
1028#endif
1029#ifdef HAVE_POST_INCREMENT
1030 if (! data.reverse && ! data.autinc_to)
1031 {
1032 data.to_addr = copy_addr_to_reg (to_addr);
1033 data.autinc_to = 1;
1034 data.explicit_inc_to = 1;
1035 }
1036#endif
1037 if (!data.autinc_to && CONSTANT_P (to_addr))
1038 data.to_addr = copy_addr_to_reg (to_addr);
1039 }
1040
e87b4f3f
RS
1041 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1042 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1043 align = MOVE_MAX;
bbf6f052
RK
1044
1045 /* First move what we can in the largest integer mode, then go to
1046 successively smaller modes. */
1047
1048 while (max_size > 1)
1049 {
1050 enum machine_mode mode = VOIDmode, tmode;
1051 enum insn_code icode;
1052
e7c33f54
RK
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1056 mode = tmode;
1057
1058 if (mode == VOIDmode)
1059 break;
1060
1061 icode = mov_optab->handlers[(int) mode].insn_code;
1062 if (icode != CODE_FOR_nothing
1063 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1064 GET_MODE_SIZE (mode)))
1065 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1066
1067 max_size = GET_MODE_SIZE (mode);
1068 }
1069
1070 /* The code above should have handled everything. */
1071 if (data.len != 0)
1072 abort ();
1073}
1074
1075/* Return number of insns required to move L bytes by pieces.
1076 ALIGN (in bytes) is maximum alignment we can assume. */
1077
1078static int
1079move_by_pieces_ninsns (l, align)
1080 unsigned int l;
1081 int align;
1082{
1083 register int n_insns = 0;
e87b4f3f 1084 int max_size = MOVE_MAX + 1;
bbf6f052 1085
e87b4f3f
RS
1086 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1087 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1088 align = MOVE_MAX;
bbf6f052
RK
1089
1090 while (max_size > 1)
1091 {
1092 enum machine_mode mode = VOIDmode, tmode;
1093 enum insn_code icode;
1094
e7c33f54
RK
1095 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1096 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1097 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1098 mode = tmode;
1099
1100 if (mode == VOIDmode)
1101 break;
1102
1103 icode = mov_optab->handlers[(int) mode].insn_code;
1104 if (icode != CODE_FOR_nothing
1105 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1106 GET_MODE_SIZE (mode)))
1107 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1108
1109 max_size = GET_MODE_SIZE (mode);
1110 }
1111
1112 return n_insns;
1113}
1114
1115/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1116 with move instructions for mode MODE. GENFUN is the gen_... function
1117 to make a move insn for that mode. DATA has all the other info. */
1118
1119static void
1120move_by_pieces_1 (genfun, mode, data)
1121 rtx (*genfun) ();
1122 enum machine_mode mode;
1123 struct move_by_pieces *data;
1124{
1125 register int size = GET_MODE_SIZE (mode);
1126 register rtx to1, from1;
1127
1128 while (data->len >= size)
1129 {
1130 if (data->reverse) data->offset -= size;
1131
1132 to1 = (data->autinc_to
1133 ? gen_rtx (MEM, mode, data->to_addr)
1134 : change_address (data->to, mode,
1135 plus_constant (data->to_addr, data->offset)));
1136 from1 =
1137 (data->autinc_from
1138 ? gen_rtx (MEM, mode, data->from_addr)
1139 : change_address (data->from, mode,
1140 plus_constant (data->from_addr, data->offset)));
1141
1142#ifdef HAVE_PRE_DECREMENT
1143 if (data->explicit_inc_to < 0)
906c4e36 1144 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1145 if (data->explicit_inc_from < 0)
906c4e36 1146 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1147#endif
1148
1149 emit_insn ((*genfun) (to1, from1));
1150#ifdef HAVE_POST_INCREMENT
1151 if (data->explicit_inc_to > 0)
906c4e36 1152 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1153 if (data->explicit_inc_from > 0)
906c4e36 1154 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1155#endif
1156
1157 if (! data->reverse) data->offset += size;
1158
1159 data->len -= size;
1160 }
1161}
1162\f
1163/* Emit code to move a block Y to a block X.
1164 This may be done with string-move instructions,
1165 with multiple scalar move instructions, or with a library call.
1166
1167 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1168 with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have,
1171 measured in bytes. */
1172
1173void
1174emit_block_move (x, y, size, align)
1175 rtx x, y;
1176 rtx size;
1177 int align;
1178{
1179 if (GET_MODE (x) != BLKmode)
1180 abort ();
1181
1182 if (GET_MODE (y) != BLKmode)
1183 abort ();
1184
1185 x = protect_from_queue (x, 1);
1186 y = protect_from_queue (y, 0);
5d901c31 1187 size = protect_from_queue (size, 0);
bbf6f052
RK
1188
1189 if (GET_CODE (x) != MEM)
1190 abort ();
1191 if (GET_CODE (y) != MEM)
1192 abort ();
1193 if (size == 0)
1194 abort ();
1195
1196 if (GET_CODE (size) == CONST_INT
906c4e36 1197 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1198 move_by_pieces (x, y, INTVAL (size), align);
1199 else
1200 {
1201 /* Try the most limited insn first, because there's no point
1202 including more than one in the machine description unless
1203 the more limited one has some advantage. */
266007a7 1204
0bba3f6f 1205 rtx opalign = GEN_INT (align);
266007a7
RK
1206 enum machine_mode mode;
1207
1208 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1209 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1210 {
266007a7 1211 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1212
1213 if (code != CODE_FOR_nothing
803090c4
RK
1214 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1215 here because if SIZE is less than the mode mask, as it is
1216 returned by the macro, it will definately be less than the
1217 actual mode mask. */
266007a7 1218 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1219 && (insn_operand_predicate[(int) code][0] == 0
1220 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1221 && (insn_operand_predicate[(int) code][1] == 0
1222 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1223 && (insn_operand_predicate[(int) code][3] == 0
1224 || (*insn_operand_predicate[(int) code][3]) (opalign,
1225 VOIDmode)))
bbf6f052 1226 {
1ba1e2a8 1227 rtx op2;
266007a7
RK
1228 rtx last = get_last_insn ();
1229 rtx pat;
1230
1ba1e2a8 1231 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1232 if (insn_operand_predicate[(int) code][2] != 0
1233 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1234 op2 = copy_to_mode_reg (mode, op2);
1235
1236 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1237 if (pat)
1238 {
1239 emit_insn (pat);
1240 return;
1241 }
1242 else
1243 delete_insns_since (last);
bbf6f052
RK
1244 }
1245 }
bbf6f052
RK
1246
1247#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1248 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1249 VOIDmode, 3, XEXP (x, 0), Pmode,
1250 XEXP (y, 0), Pmode,
5a2724d7 1251 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052 1252#else
d562e42e 1253 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1254 VOIDmode, 3, XEXP (y, 0), Pmode,
1255 XEXP (x, 0), Pmode,
5a2724d7 1256 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052
RK
1257#endif
1258 }
1259}
1260\f
1261/* Copy all or part of a value X into registers starting at REGNO.
1262 The number of registers to be filled is NREGS. */
1263
1264void
1265move_block_to_reg (regno, x, nregs, mode)
1266 int regno;
1267 rtx x;
1268 int nregs;
1269 enum machine_mode mode;
1270{
1271 int i;
1272 rtx pat, last;
1273
1274 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1275 x = validize_mem (force_const_mem (mode, x));
1276
1277 /* See if the machine can do this with a load multiple insn. */
1278#ifdef HAVE_load_multiple
1279 last = get_last_insn ();
1280 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1281 GEN_INT (nregs));
bbf6f052
RK
1282 if (pat)
1283 {
1284 emit_insn (pat);
1285 return;
1286 }
1287 else
1288 delete_insns_since (last);
1289#endif
1290
1291 for (i = 0; i < nregs; i++)
1292 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1293 operand_subword_force (x, i, mode));
1294}
1295
1296/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1297 The number of registers to be filled is NREGS. */
1298
1299void
1300move_block_from_reg (regno, x, nregs)
1301 int regno;
1302 rtx x;
1303 int nregs;
1304{
1305 int i;
1306 rtx pat, last;
1307
1308 /* See if the machine can do this with a store multiple insn. */
1309#ifdef HAVE_store_multiple
1310 last = get_last_insn ();
1311 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1312 GEN_INT (nregs));
bbf6f052
RK
1313 if (pat)
1314 {
1315 emit_insn (pat);
1316 return;
1317 }
1318 else
1319 delete_insns_since (last);
1320#endif
1321
1322 for (i = 0; i < nregs; i++)
1323 {
1324 rtx tem = operand_subword (x, i, 1, BLKmode);
1325
1326 if (tem == 0)
1327 abort ();
1328
1329 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1330 }
1331}
1332
1333/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1334
1335void
1336use_regs (regno, nregs)
1337 int regno;
1338 int nregs;
1339{
1340 int i;
1341
1342 for (i = 0; i < nregs; i++)
1343 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1344}
7308a047
RS
1345
1346/* Mark the instructions since PREV as a libcall block.
1347 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1348
1349static rtx
1350group_insns (prev)
1351 rtx prev;
1352{
1353 rtx insn_first;
1354 rtx insn_last;
1355
1356 /* Find the instructions to mark */
1357 if (prev)
1358 insn_first = NEXT_INSN (prev);
1359 else
1360 insn_first = get_insns ();
1361
1362 insn_last = get_last_insn ();
1363
1364 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1365 REG_NOTES (insn_last));
1366
1367 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1368 REG_NOTES (insn_first));
1369}
bbf6f052
RK
1370\f
1371/* Write zeros through the storage of OBJECT.
1372 If OBJECT has BLKmode, SIZE is its length in bytes. */
1373
1374void
1375clear_storage (object, size)
1376 rtx object;
1377 int size;
1378{
1379 if (GET_MODE (object) == BLKmode)
1380 {
1381#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1382 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1383 VOIDmode, 3,
1384 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1385 GEN_INT (size), Pmode);
bbf6f052 1386#else
d562e42e 1387 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1388 VOIDmode, 2,
1389 XEXP (object, 0), Pmode,
906c4e36 1390 GEN_INT (size), Pmode);
bbf6f052
RK
1391#endif
1392 }
1393 else
1394 emit_move_insn (object, const0_rtx);
1395}
1396
1397/* Generate code to copy Y into X.
1398 Both Y and X must have the same mode, except that
1399 Y can be a constant with VOIDmode.
1400 This mode cannot be BLKmode; use emit_block_move for that.
1401
1402 Return the last instruction emitted. */
1403
1404rtx
1405emit_move_insn (x, y)
1406 rtx x, y;
1407{
1408 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1409 enum machine_mode submode;
1410 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1411 int i;
1412
1413 x = protect_from_queue (x, 1);
1414 y = protect_from_queue (y, 0);
1415
1416 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1417 abort ();
1418
1419 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1420 y = force_const_mem (mode, y);
1421
1422 /* If X or Y are memory references, verify that their addresses are valid
1423 for the machine. */
1424 if (GET_CODE (x) == MEM
1425 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1426 && ! push_operand (x, GET_MODE (x)))
1427 || (flag_force_addr
1428 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1429 x = change_address (x, VOIDmode, XEXP (x, 0));
1430
1431 if (GET_CODE (y) == MEM
1432 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1433 || (flag_force_addr
1434 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1435 y = change_address (y, VOIDmode, XEXP (y, 0));
1436
1437 if (mode == BLKmode)
1438 abort ();
1439
7308a047
RS
1440 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1441 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1442 (class == MODE_COMPLEX_INT
1443 ? MODE_INT : MODE_FLOAT),
1444 0);
1445
bbf6f052
RK
1446 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1447 return
1448 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1449
7308a047
RS
1450 /* Expand complex moves by moving real part and imag part, if posible. */
1451 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1452 && submode != BLKmode
1453 && (mov_optab->handlers[(int) submode].insn_code
1454 != CODE_FOR_nothing))
1455 {
1456 /* Don't split destination if it is a stack push. */
1457 int stack = push_operand (x, GET_MODE (x));
1458 rtx prev = get_last_insn ();
1459
1460 /* Tell flow that the whole of the destination is being set. */
1461 if (GET_CODE (x) == REG)
1462 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1463
1464 /* If this is a stack, push the highpart first, so it
1465 will be in the argument order.
1466
1467 In that case, change_address is used only to convert
1468 the mode, not to change the address. */
1469 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1470 ((stack ? change_address (x, submode, (rtx) 0)
1471 : gen_highpart (submode, x)),
1472 gen_highpart (submode, y)));
1473 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1474 ((stack ? change_address (x, submode, (rtx) 0)
1475 : gen_lowpart (submode, x)),
1476 gen_lowpart (submode, y)));
1477
1478 group_insns (prev);
1479 }
1480
bbf6f052
RK
1481 /* This will handle any multi-word mode that lacks a move_insn pattern.
1482 However, you will get better code if you define such patterns,
1483 even if they must turn into multiple assembler instructions. */
a4320483 1484 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1485 {
1486 rtx last_insn = 0;
7308a047 1487 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1488
1489 for (i = 0;
1490 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1491 i++)
1492 {
1493 rtx xpart = operand_subword (x, i, 1, mode);
1494 rtx ypart = operand_subword (y, i, 1, mode);
1495
1496 /* If we can't get a part of Y, put Y into memory if it is a
1497 constant. Otherwise, force it into a register. If we still
1498 can't get a part of Y, abort. */
1499 if (ypart == 0 && CONSTANT_P (y))
1500 {
1501 y = force_const_mem (mode, y);
1502 ypart = operand_subword (y, i, 1, mode);
1503 }
1504 else if (ypart == 0)
1505 ypart = operand_subword_force (y, i, mode);
1506
1507 if (xpart == 0 || ypart == 0)
1508 abort ();
1509
1510 last_insn = emit_move_insn (xpart, ypart);
1511 }
7308a047
RS
1512 /* Mark these insns as a libcall block. */
1513 group_insns (prev_insn);
1514
bbf6f052
RK
1515 return last_insn;
1516 }
1517 else
1518 abort ();
1519}
1520\f
1521/* Pushing data onto the stack. */
1522
1523/* Push a block of length SIZE (perhaps variable)
1524 and return an rtx to address the beginning of the block.
1525 Note that it is not possible for the value returned to be a QUEUED.
1526 The value may be virtual_outgoing_args_rtx.
1527
1528 EXTRA is the number of bytes of padding to push in addition to SIZE.
1529 BELOW nonzero means this padding comes at low addresses;
1530 otherwise, the padding comes at high addresses. */
1531
1532rtx
1533push_block (size, extra, below)
1534 rtx size;
1535 int extra, below;
1536{
1537 register rtx temp;
1538 if (CONSTANT_P (size))
1539 anti_adjust_stack (plus_constant (size, extra));
1540 else if (GET_CODE (size) == REG && extra == 0)
1541 anti_adjust_stack (size);
1542 else
1543 {
1544 rtx temp = copy_to_mode_reg (Pmode, size);
1545 if (extra != 0)
906c4e36 1546 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1547 temp, 0, OPTAB_LIB_WIDEN);
1548 anti_adjust_stack (temp);
1549 }
1550
1551#ifdef STACK_GROWS_DOWNWARD
1552 temp = virtual_outgoing_args_rtx;
1553 if (extra != 0 && below)
1554 temp = plus_constant (temp, extra);
1555#else
1556 if (GET_CODE (size) == CONST_INT)
1557 temp = plus_constant (virtual_outgoing_args_rtx,
1558 - INTVAL (size) - (below ? 0 : extra));
1559 else if (extra != 0 && !below)
1560 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1561 negate_rtx (Pmode, plus_constant (size, extra)));
1562 else
1563 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1564 negate_rtx (Pmode, size));
1565#endif
1566
1567 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1568}
1569
1570static rtx
1571gen_push_operand ()
1572{
1573 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1574}
1575
1576/* Generate code to push X onto the stack, assuming it has mode MODE and
1577 type TYPE.
1578 MODE is redundant except when X is a CONST_INT (since they don't
1579 carry mode info).
1580 SIZE is an rtx for the size of data to be copied (in bytes),
1581 needed only if X is BLKmode.
1582
1583 ALIGN (in bytes) is maximum alignment we can assume.
1584
1585 If PARTIAL is nonzero, then copy that many of the first words
1586 of X into registers starting with REG, and push the rest of X.
1587 The amount of space pushed is decreased by PARTIAL words,
1588 rounded *down* to a multiple of PARM_BOUNDARY.
1589 REG must be a hard register in this case.
1590
1591 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1592 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1593
1594 On a machine that lacks real push insns, ARGS_ADDR is the address of
1595 the bottom of the argument block for this call. We use indexing off there
1596 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1597 argument block has not been preallocated.
1598
1599 ARGS_SO_FAR is the size of args previously pushed for this call. */
1600
1601void
1602emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1603 args_addr, args_so_far)
1604 register rtx x;
1605 enum machine_mode mode;
1606 tree type;
1607 rtx size;
1608 int align;
1609 int partial;
1610 rtx reg;
1611 int extra;
1612 rtx args_addr;
1613 rtx args_so_far;
1614{
1615 rtx xinner;
1616 enum direction stack_direction
1617#ifdef STACK_GROWS_DOWNWARD
1618 = downward;
1619#else
1620 = upward;
1621#endif
1622
1623 /* Decide where to pad the argument: `downward' for below,
1624 `upward' for above, or `none' for don't pad it.
1625 Default is below for small data on big-endian machines; else above. */
1626 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1627
1628 /* Invert direction if stack is post-update. */
1629 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1630 if (where_pad != none)
1631 where_pad = (where_pad == downward ? upward : downward);
1632
1633 xinner = x = protect_from_queue (x, 0);
1634
1635 if (mode == BLKmode)
1636 {
1637 /* Copy a block into the stack, entirely or partially. */
1638
1639 register rtx temp;
1640 int used = partial * UNITS_PER_WORD;
1641 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1642 int skip;
1643
1644 if (size == 0)
1645 abort ();
1646
1647 used -= offset;
1648
1649 /* USED is now the # of bytes we need not copy to the stack
1650 because registers will take care of them. */
1651
1652 if (partial != 0)
1653 xinner = change_address (xinner, BLKmode,
1654 plus_constant (XEXP (xinner, 0), used));
1655
1656 /* If the partial register-part of the arg counts in its stack size,
1657 skip the part of stack space corresponding to the registers.
1658 Otherwise, start copying to the beginning of the stack space,
1659 by setting SKIP to 0. */
1660#ifndef REG_PARM_STACK_SPACE
1661 skip = 0;
1662#else
1663 skip = used;
1664#endif
1665
1666#ifdef PUSH_ROUNDING
1667 /* Do it with several push insns if that doesn't take lots of insns
1668 and if there is no difficulty with push insns that skip bytes
1669 on the stack for alignment purposes. */
1670 if (args_addr == 0
1671 && GET_CODE (size) == CONST_INT
1672 && skip == 0
1673 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1674 < MOVE_RATIO)
bbf6f052
RK
1675 /* Here we avoid the case of a structure whose weak alignment
1676 forces many pushes of a small amount of data,
1677 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1678 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1679 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1680 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1681 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1682 {
1683 /* Push padding now if padding above and stack grows down,
1684 or if padding below and stack grows up.
1685 But if space already allocated, this has already been done. */
1686 if (extra && args_addr == 0
1687 && where_pad != none && where_pad != stack_direction)
906c4e36 1688 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1689
1690 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1691 INTVAL (size) - used, align);
1692 }
1693 else
1694#endif /* PUSH_ROUNDING */
1695 {
1696 /* Otherwise make space on the stack and copy the data
1697 to the address of that space. */
1698
1699 /* Deduct words put into registers from the size we must copy. */
1700 if (partial != 0)
1701 {
1702 if (GET_CODE (size) == CONST_INT)
906c4e36 1703 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1704 else
1705 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1706 GEN_INT (used), NULL_RTX, 0,
1707 OPTAB_LIB_WIDEN);
bbf6f052
RK
1708 }
1709
1710 /* Get the address of the stack space.
1711 In this case, we do not deal with EXTRA separately.
1712 A single stack adjust will do. */
1713 if (! args_addr)
1714 {
1715 temp = push_block (size, extra, where_pad == downward);
1716 extra = 0;
1717 }
1718 else if (GET_CODE (args_so_far) == CONST_INT)
1719 temp = memory_address (BLKmode,
1720 plus_constant (args_addr,
1721 skip + INTVAL (args_so_far)));
1722 else
1723 temp = memory_address (BLKmode,
1724 plus_constant (gen_rtx (PLUS, Pmode,
1725 args_addr, args_so_far),
1726 skip));
1727
1728 /* TEMP is the address of the block. Copy the data there. */
1729 if (GET_CODE (size) == CONST_INT
1730 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1731 < MOVE_RATIO))
1732 {
1733 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1734 INTVAL (size), align);
1735 goto ret;
1736 }
1737 /* Try the most limited insn first, because there's no point
1738 including more than one in the machine description unless
1739 the more limited one has some advantage. */
1740#ifdef HAVE_movstrqi
1741 if (HAVE_movstrqi
1742 && GET_CODE (size) == CONST_INT
1743 && ((unsigned) INTVAL (size)
1744 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1745 {
1746 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1747 xinner, size, GEN_INT (align)));
bbf6f052
RK
1748 goto ret;
1749 }
1750#endif
1751#ifdef HAVE_movstrhi
1752 if (HAVE_movstrhi
1753 && GET_CODE (size) == CONST_INT
1754 && ((unsigned) INTVAL (size)
1755 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1756 {
1757 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1758 xinner, size, GEN_INT (align)));
bbf6f052
RK
1759 goto ret;
1760 }
1761#endif
1762#ifdef HAVE_movstrsi
1763 if (HAVE_movstrsi)
1764 {
1765 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1766 xinner, size, GEN_INT (align)));
bbf6f052
RK
1767 goto ret;
1768 }
1769#endif
1770#ifdef HAVE_movstrdi
1771 if (HAVE_movstrdi)
1772 {
1773 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1774 xinner, size, GEN_INT (align)));
bbf6f052
RK
1775 goto ret;
1776 }
1777#endif
1778
1779#ifndef ACCUMULATE_OUTGOING_ARGS
1780 /* If the source is referenced relative to the stack pointer,
1781 copy it to another register to stabilize it. We do not need
1782 to do this if we know that we won't be changing sp. */
1783
1784 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1785 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1786 temp = copy_to_reg (temp);
1787#endif
1788
1789 /* Make inhibit_defer_pop nonzero around the library call
1790 to force it to pop the bcopy-arguments right away. */
1791 NO_DEFER_POP;
1792#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1793 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1794 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1795 size, Pmode);
1796#else
d562e42e 1797 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1798 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1799 size, Pmode);
1800#endif
1801 OK_DEFER_POP;
1802 }
1803 }
1804 else if (partial > 0)
1805 {
1806 /* Scalar partly in registers. */
1807
1808 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1809 int i;
1810 int not_stack;
1811 /* # words of start of argument
1812 that we must make space for but need not store. */
1813 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1814 int args_offset = INTVAL (args_so_far);
1815 int skip;
1816
1817 /* Push padding now if padding above and stack grows down,
1818 or if padding below and stack grows up.
1819 But if space already allocated, this has already been done. */
1820 if (extra && args_addr == 0
1821 && where_pad != none && where_pad != stack_direction)
906c4e36 1822 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1823
1824 /* If we make space by pushing it, we might as well push
1825 the real data. Otherwise, we can leave OFFSET nonzero
1826 and leave the space uninitialized. */
1827 if (args_addr == 0)
1828 offset = 0;
1829
1830 /* Now NOT_STACK gets the number of words that we don't need to
1831 allocate on the stack. */
1832 not_stack = partial - offset;
1833
1834 /* If the partial register-part of the arg counts in its stack size,
1835 skip the part of stack space corresponding to the registers.
1836 Otherwise, start copying to the beginning of the stack space,
1837 by setting SKIP to 0. */
1838#ifndef REG_PARM_STACK_SPACE
1839 skip = 0;
1840#else
1841 skip = not_stack;
1842#endif
1843
1844 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1845 x = validize_mem (force_const_mem (mode, x));
1846
1847 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1848 SUBREGs of such registers are not allowed. */
1849 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1850 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1851 x = copy_to_reg (x);
1852
1853 /* Loop over all the words allocated on the stack for this arg. */
1854 /* We can do it by words, because any scalar bigger than a word
1855 has a size a multiple of a word. */
1856#ifndef PUSH_ARGS_REVERSED
1857 for (i = not_stack; i < size; i++)
1858#else
1859 for (i = size - 1; i >= not_stack; i--)
1860#endif
1861 if (i >= not_stack + offset)
1862 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
1863 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1864 0, args_addr,
1865 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
1866 * UNITS_PER_WORD)));
1867 }
1868 else
1869 {
1870 rtx addr;
1871
1872 /* Push padding now if padding above and stack grows down,
1873 or if padding below and stack grows up.
1874 But if space already allocated, this has already been done. */
1875 if (extra && args_addr == 0
1876 && where_pad != none && where_pad != stack_direction)
906c4e36 1877 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1878
1879#ifdef PUSH_ROUNDING
1880 if (args_addr == 0)
1881 addr = gen_push_operand ();
1882 else
1883#endif
1884 if (GET_CODE (args_so_far) == CONST_INT)
1885 addr
1886 = memory_address (mode,
1887 plus_constant (args_addr, INTVAL (args_so_far)));
1888 else
1889 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1890 args_so_far));
1891
1892 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1893 }
1894
1895 ret:
1896 /* If part should go in registers, copy that part
1897 into the appropriate registers. Do this now, at the end,
1898 since mem-to-mem copies above may do function calls. */
1899 if (partial > 0)
1900 move_block_to_reg (REGNO (reg), x, partial, mode);
1901
1902 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 1903 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1904}
1905\f
1906/* Output a library call to function FUN (a SYMBOL_REF rtx)
1907 (emitting the queue unless NO_QUEUE is nonzero),
1908 for a value of mode OUTMODE,
1909 with NARGS different arguments, passed as alternating rtx values
1910 and machine_modes to convert them to.
1911 The rtx values should have been passed through protect_from_queue already.
1912
1913 NO_QUEUE will be true if and only if the library call is a `const' call
1914 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
d562e42e
JW
1915 to the variable is_const in expand_call.
1916
1917 NO_QUEUE must be true for const calls, because if it isn't, then
1918 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1919 and will be lost if the libcall sequence is optimized away.
1920
1921 NO_QUEUE must be false for non-const calls, because if it isn't, the
1922 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1923 optimized. For instance, the instruction scheduler may incorrectly
1924 move memory references across the non-const call. */
bbf6f052
RK
1925
1926void
1927emit_library_call (va_alist)
1928 va_dcl
1929{
1930 va_list p;
1931 struct args_size args_size;
1932 register int argnum;
1933 enum machine_mode outmode;
1934 int nargs;
1935 rtx fun;
1936 rtx orgfun;
1937 int inc;
1938 int count;
1939 rtx argblock = 0;
1940 CUMULATIVE_ARGS args_so_far;
1941 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1942 struct args_size offset; struct args_size size; };
1943 struct arg *argvec;
1944 int old_inhibit_defer_pop = inhibit_defer_pop;
1945 int no_queue = 0;
1946 rtx use_insns;
1947
1948 va_start (p);
1949 orgfun = fun = va_arg (p, rtx);
1950 no_queue = va_arg (p, int);
1951 outmode = va_arg (p, enum machine_mode);
1952 nargs = va_arg (p, int);
1953
1954 /* Copy all the libcall-arguments out of the varargs data
1955 and into a vector ARGVEC.
1956
1957 Compute how to pass each argument. We only support a very small subset
1958 of the full argument passing conventions to limit complexity here since
1959 library functions shouldn't have many args. */
1960
1961 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1962
1963 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1964
1965 args_size.constant = 0;
1966 args_size.var = 0;
1967
1968 for (count = 0; count < nargs; count++)
1969 {
1970 rtx val = va_arg (p, rtx);
1971 enum machine_mode mode = va_arg (p, enum machine_mode);
1972
1973 /* We cannot convert the arg value to the mode the library wants here;
1974 must do it earlier where we know the signedness of the arg. */
1975 if (mode == BLKmode
1976 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1977 abort ();
1978
1979 /* On some machines, there's no way to pass a float to a library fcn.
1980 Pass it as a double instead. */
1981#ifdef LIBGCC_NEEDS_DOUBLE
1982 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
803090c4 1983 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
bbf6f052
RK
1984#endif
1985
5d901c31
RS
1986 /* There's no need to call protect_from_queue, because
1987 either emit_move_insn or emit_push_insn will do that. */
1988
bbf6f052
RK
1989 /* Make sure it is a reasonable operand for a move or push insn. */
1990 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1991 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
906c4e36 1992 val = force_operand (val, NULL_RTX);
bbf6f052
RK
1993
1994 argvec[count].value = val;
1995 argvec[count].mode = mode;
1996
1997#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
906c4e36 1998 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
bbf6f052
RK
1999 abort ();
2000#endif
2001
906c4e36 2002 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
2003 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2004 abort ();
2005#ifdef FUNCTION_ARG_PARTIAL_NREGS
2006 argvec[count].partial
906c4e36 2007 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
2008#else
2009 argvec[count].partial = 0;
2010#endif
2011
906c4e36 2012 locate_and_pad_parm (mode, NULL_TREE,
bbf6f052 2013 argvec[count].reg && argvec[count].partial == 0,
906c4e36 2014 NULL_TREE, &args_size, &argvec[count].offset,
bbf6f052
RK
2015 &argvec[count].size);
2016
2017 if (argvec[count].size.var)
2018 abort ();
2019
2020#ifndef REG_PARM_STACK_SPACE
2021 if (argvec[count].partial)
2022 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2023#endif
2024
2025 if (argvec[count].reg == 0 || argvec[count].partial != 0
2026#ifdef REG_PARM_STACK_SPACE
2027 || 1
2028#endif
2029 )
2030 args_size.constant += argvec[count].size.constant;
2031
2032#ifdef ACCUMULATE_OUTGOING_ARGS
2033 /* If this arg is actually passed on the stack, it might be
2034 clobbering something we already put there (this library call might
2035 be inside the evaluation of an argument to a function whose call
2036 requires the stack). This will only occur when the library call
2037 has sufficient args to run out of argument registers. Abort in
2038 this case; if this ever occurs, code must be added to save and
2039 restore the arg slot. */
2040
2041 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2042 abort ();
2043#endif
2044
2045 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2046 }
2047 va_end (p);
2048
2049 /* If this machine requires an external definition for library
2050 functions, write one out. */
2051 assemble_external_libcall (fun);
2052
2053#ifdef STACK_BOUNDARY
2054 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2055 / STACK_BYTES) * STACK_BYTES);
2056#endif
2057
2058#ifdef REG_PARM_STACK_SPACE
2059 args_size.constant = MAX (args_size.constant,
2060 REG_PARM_STACK_SPACE ((tree) 0));
2061#endif
2062
2063#ifdef ACCUMULATE_OUTGOING_ARGS
2064 if (args_size.constant > current_function_outgoing_args_size)
2065 current_function_outgoing_args_size = args_size.constant;
2066 args_size.constant = 0;
2067#endif
2068
2069#ifndef PUSH_ROUNDING
906c4e36 2070 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
bbf6f052
RK
2071#endif
2072
2073#ifdef PUSH_ARGS_REVERSED
2074 inc = -1;
2075 argnum = nargs - 1;
2076#else
2077 inc = 1;
2078 argnum = 0;
2079#endif
2080
2081 /* Push the args that need to be pushed. */
2082
2083 for (count = 0; count < nargs; count++, argnum += inc)
2084 {
2085 register enum machine_mode mode = argvec[argnum].mode;
2086 register rtx val = argvec[argnum].value;
2087 rtx reg = argvec[argnum].reg;
2088 int partial = argvec[argnum].partial;
2089
2090 if (! (reg != 0 && partial == 0))
906c4e36
RK
2091 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2092 argblock, GEN_INT (argvec[count].offset.constant));
bbf6f052
RK
2093 NO_DEFER_POP;
2094 }
2095
2096#ifdef PUSH_ARGS_REVERSED
2097 argnum = nargs - 1;
2098#else
2099 argnum = 0;
2100#endif
2101
2102 /* Now load any reg parms into their regs. */
2103
2104 for (count = 0; count < nargs; count++, argnum += inc)
2105 {
2106 register enum machine_mode mode = argvec[argnum].mode;
2107 register rtx val = argvec[argnum].value;
2108 rtx reg = argvec[argnum].reg;
2109 int partial = argvec[argnum].partial;
2110
2111 if (reg != 0 && partial == 0)
2112 emit_move_insn (reg, val);
2113 NO_DEFER_POP;
2114 }
2115
2116 /* For version 1.37, try deleting this entirely. */
2117 if (! no_queue)
2118 emit_queue ();
2119
2120 /* Any regs containing parms remain in use through the call. */
2121 start_sequence ();
2122 for (count = 0; count < nargs; count++)
2123 if (argvec[count].reg != 0)
2124 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2125
2126 use_insns = get_insns ();
2127 end_sequence ();
2128
906c4e36 2129 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
bbf6f052
RK
2130
2131 /* Don't allow popping to be deferred, since then
2132 cse'ing of library calls could delete a call and leave the pop. */
2133 NO_DEFER_POP;
2134
2135 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2136 will set inhibit_defer_pop to that value. */
2137
2138 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2139 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
906c4e36 2140 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
bbf6f052
RK
2141 old_inhibit_defer_pop + 1, use_insns, no_queue);
2142
2143 /* Now restore inhibit_defer_pop to its actual original value. */
2144 OK_DEFER_POP;
2145}
2146\f
2147/* Expand an assignment that stores the value of FROM into TO.
2148 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2149 (This may contain a QUEUED rtx.)
2150 Otherwise, the returned value is not meaningful.
2151
2152 SUGGEST_REG is no longer actually used.
2153 It used to mean, copy the value through a register
2154 and return that register, if that is possible.
2155 But now we do this if WANT_VALUE.
2156
2157 If the value stored is a constant, we return the constant. */
2158
2159rtx
2160expand_assignment (to, from, want_value, suggest_reg)
2161 tree to, from;
2162 int want_value;
2163 int suggest_reg;
2164{
2165 register rtx to_rtx = 0;
2166 rtx result;
2167
2168 /* Don't crash if the lhs of the assignment was erroneous. */
2169
2170 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2171 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2172
2173 /* Assignment of a structure component needs special treatment
2174 if the structure component's rtx is not simply a MEM.
2175 Assignment of an array element at a constant index
2176 has the same problem. */
2177
2178 if (TREE_CODE (to) == COMPONENT_REF
2179 || TREE_CODE (to) == BIT_FIELD_REF
2180 || (TREE_CODE (to) == ARRAY_REF
2181 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2182 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2183 {
2184 enum machine_mode mode1;
2185 int bitsize;
2186 int bitpos;
7bb0943f 2187 tree offset;
bbf6f052
RK
2188 int unsignedp;
2189 int volatilep = 0;
7bb0943f 2190 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2191 &mode1, &unsignedp, &volatilep);
2192
2193 /* If we are going to use store_bit_field and extract_bit_field,
2194 make sure to_rtx will be safe for multiple use. */
2195
2196 if (mode1 == VOIDmode && want_value)
2197 tem = stabilize_reference (tem);
2198
906c4e36 2199 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2200 if (offset != 0)
2201 {
906c4e36 2202 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2203
2204 if (GET_CODE (to_rtx) != MEM)
2205 abort ();
2206 to_rtx = change_address (to_rtx, VOIDmode,
2207 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2208 force_reg (Pmode, offset_rtx)));
2209 }
bbf6f052
RK
2210 if (volatilep)
2211 {
2212 if (GET_CODE (to_rtx) == MEM)
2213 MEM_VOLATILE_P (to_rtx) = 1;
2214#if 0 /* This was turned off because, when a field is volatile
2215 in an object which is not volatile, the object may be in a register,
2216 and then we would abort over here. */
2217 else
2218 abort ();
2219#endif
2220 }
2221
2222 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2223 (want_value
2224 /* Spurious cast makes HPUX compiler happy. */
2225 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2226 : VOIDmode),
2227 unsignedp,
2228 /* Required alignment of containing datum. */
2229 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2230 int_size_in_bytes (TREE_TYPE (tem)));
2231 preserve_temp_slots (result);
2232 free_temp_slots ();
2233
2234 return result;
2235 }
2236
2237 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2238 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2239
2240 if (to_rtx == 0)
906c4e36 2241 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2242
2243 /* In case we are returning the contents of an object which overlaps
2244 the place the value is being stored, use a safe function when copying
2245 a value through a pointer into a structure value return block. */
2246 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2247 && current_function_returns_struct
2248 && !current_function_returns_pcc_struct)
2249 {
906c4e36 2250 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2251 rtx size = expr_size (from);
2252
2253#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2254 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2255 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2256 XEXP (from_rtx, 0), Pmode,
2257 size, Pmode);
2258#else
d562e42e 2259 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2260 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2261 XEXP (to_rtx, 0), Pmode,
2262 size, Pmode);
2263#endif
2264
2265 preserve_temp_slots (to_rtx);
2266 free_temp_slots ();
2267 return to_rtx;
2268 }
2269
2270 /* Compute FROM and store the value in the rtx we got. */
2271
2272 result = store_expr (from, to_rtx, want_value);
2273 preserve_temp_slots (result);
2274 free_temp_slots ();
2275 return result;
2276}
2277
2278/* Generate code for computing expression EXP,
2279 and storing the value into TARGET.
2280 Returns TARGET or an equivalent value.
2281 TARGET may contain a QUEUED rtx.
2282
2283 If SUGGEST_REG is nonzero, copy the value through a register
2284 and return that register, if that is possible.
2285
2286 If the value stored is a constant, we return the constant. */
2287
2288rtx
2289store_expr (exp, target, suggest_reg)
2290 register tree exp;
2291 register rtx target;
2292 int suggest_reg;
2293{
2294 register rtx temp;
2295 int dont_return_target = 0;
2296
2297 if (TREE_CODE (exp) == COMPOUND_EXPR)
2298 {
2299 /* Perform first part of compound expression, then assign from second
2300 part. */
2301 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2302 emit_queue ();
2303 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2304 }
2305 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2306 {
2307 /* For conditional expression, get safe form of the target. Then
2308 test the condition, doing the appropriate assignment on either
2309 side. This avoids the creation of unnecessary temporaries.
2310 For non-BLKmode, it is more efficient not to do this. */
2311
2312 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2313
2314 emit_queue ();
2315 target = protect_from_queue (target, 1);
2316
2317 NO_DEFER_POP;
2318 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2319 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2320 emit_queue ();
2321 emit_jump_insn (gen_jump (lab2));
2322 emit_barrier ();
2323 emit_label (lab1);
2324 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2325 emit_queue ();
2326 emit_label (lab2);
2327 OK_DEFER_POP;
2328 return target;
2329 }
2330 else if (suggest_reg && GET_CODE (target) == MEM
2331 && GET_MODE (target) != BLKmode)
2332 /* If target is in memory and caller wants value in a register instead,
2333 arrange that. Pass TARGET as target for expand_expr so that,
2334 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2335 We know expand_expr will not use the target in that case. */
2336 {
906c4e36 2337 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2338 GET_MODE (target), 0);
2339 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2340 temp = copy_to_reg (temp);
2341 dont_return_target = 1;
2342 }
2343 else if (queued_subexp_p (target))
2344 /* If target contains a postincrement, it is not safe
2345 to use as the returned value. It would access the wrong
2346 place by the time the queued increment gets output.
2347 So copy the value through a temporary and use that temp
2348 as the result. */
2349 {
2350 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2351 {
2352 /* Expand EXP into a new pseudo. */
2353 temp = gen_reg_rtx (GET_MODE (target));
2354 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2355 }
2356 else
906c4e36 2357 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2358 dont_return_target = 1;
2359 }
1499e0a8
RK
2360 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2361 /* If this is an scalar in a register that is stored in a wider mode
2362 than the declared mode, compute the result into its declared mode
2363 and then convert to the wider mode. Our value is the computed
2364 expression. */
2365 {
2366 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2367 convert_move (SUBREG_REG (target), temp,
2368 SUBREG_PROMOTED_UNSIGNED_P (target));
2369 return temp;
2370 }
bbf6f052
RK
2371 else
2372 {
2373 temp = expand_expr (exp, target, GET_MODE (target), 0);
2374 /* DO return TARGET if it's a specified hardware register.
2375 expand_return relies on this. */
2376 if (!(target && GET_CODE (target) == REG
2377 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2378 && CONSTANT_P (temp))
2379 dont_return_target = 1;
2380 }
2381
2382 /* If value was not generated in the target, store it there.
2383 Convert the value to TARGET's type first if nec. */
2384
2385 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2386 {
2387 target = protect_from_queue (target, 1);
2388 if (GET_MODE (temp) != GET_MODE (target)
2389 && GET_MODE (temp) != VOIDmode)
2390 {
2391 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2392 if (dont_return_target)
2393 {
2394 /* In this case, we will return TEMP,
2395 so make sure it has the proper mode.
2396 But don't forget to store the value into TARGET. */
2397 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2398 emit_move_insn (target, temp);
2399 }
2400 else
2401 convert_move (target, temp, unsignedp);
2402 }
2403
2404 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2405 {
2406 /* Handle copying a string constant into an array.
2407 The string constant may be shorter than the array.
2408 So copy just the string's actual length, and clear the rest. */
2409 rtx size;
2410
e87b4f3f
RS
2411 /* Get the size of the data type of the string,
2412 which is actually the size of the target. */
2413 size = expr_size (exp);
2414 if (GET_CODE (size) == CONST_INT
2415 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2416 emit_block_move (target, temp, size,
2417 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2418 else
bbf6f052 2419 {
e87b4f3f
RS
2420 /* Compute the size of the data to copy from the string. */
2421 tree copy_size
2422 = fold (build (MIN_EXPR, sizetype,
2423 size_binop (CEIL_DIV_EXPR,
2424 TYPE_SIZE (TREE_TYPE (exp)),
2425 size_int (BITS_PER_UNIT)),
2426 convert (sizetype,
2427 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
906c4e36
RK
2428 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2429 VOIDmode, 0);
e87b4f3f
RS
2430 rtx label = 0;
2431
2432 /* Copy that much. */
2433 emit_block_move (target, temp, copy_size_rtx,
2434 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2435
2436 /* Figure out how much is left in TARGET
2437 that we have to clear. */
2438 if (GET_CODE (copy_size_rtx) == CONST_INT)
2439 {
2440 temp = plus_constant (XEXP (target, 0),
2441 TREE_STRING_LENGTH (exp));
2442 size = plus_constant (size,
2443 - TREE_STRING_LENGTH (exp));
2444 }
2445 else
2446 {
2447 enum machine_mode size_mode = Pmode;
2448
2449 temp = force_reg (Pmode, XEXP (target, 0));
2450 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2451 copy_size_rtx, NULL_RTX, 0,
2452 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2453
2454 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2455 copy_size_rtx, NULL_RTX, 0,
2456 OPTAB_LIB_WIDEN);
e87b4f3f 2457
906c4e36 2458 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2459 GET_MODE (size), 0, 0);
2460 label = gen_label_rtx ();
2461 emit_jump_insn (gen_blt (label));
2462 }
2463
2464 if (size != const0_rtx)
2465 {
bbf6f052 2466#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2467 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2468 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2469#else
d562e42e 2470 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2471 temp, Pmode, size, Pmode);
bbf6f052 2472#endif
e87b4f3f
RS
2473 }
2474 if (label)
2475 emit_label (label);
bbf6f052
RK
2476 }
2477 }
2478 else if (GET_MODE (temp) == BLKmode)
2479 emit_block_move (target, temp, expr_size (exp),
2480 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2481 else
2482 emit_move_insn (target, temp);
2483 }
2484 if (dont_return_target)
2485 return temp;
2486 return target;
2487}
2488\f
2489/* Store the value of constructor EXP into the rtx TARGET.
2490 TARGET is either a REG or a MEM. */
2491
2492static void
2493store_constructor (exp, target)
2494 tree exp;
2495 rtx target;
2496{
4af3895e
JVA
2497 tree type = TREE_TYPE (exp);
2498
bbf6f052
RK
2499 /* We know our target cannot conflict, since safe_from_p has been called. */
2500#if 0
2501 /* Don't try copying piece by piece into a hard register
2502 since that is vulnerable to being clobbered by EXP.
2503 Instead, construct in a pseudo register and then copy it all. */
2504 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2505 {
2506 rtx temp = gen_reg_rtx (GET_MODE (target));
2507 store_constructor (exp, temp);
2508 emit_move_insn (target, temp);
2509 return;
2510 }
2511#endif
2512
4af3895e 2513 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2514 {
2515 register tree elt;
2516
4af3895e
JVA
2517 /* Inform later passes that the whole union value is dead. */
2518 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2519 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2520
2521 /* If we are building a static constructor into a register,
2522 set the initial value as zero so we can fold the value into
2523 a constant. */
2524 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2525 emit_move_insn (target, const0_rtx);
2526
bbf6f052
RK
2527 /* If the constructor has fewer fields than the structure,
2528 clear the whole structure first. */
2529 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2530 != list_length (TYPE_FIELDS (type)))
2531 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2532 else
2533 /* Inform later passes that the old value is dead. */
2534 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2535
2536 /* Store each element of the constructor into
2537 the corresponding field of TARGET. */
2538
2539 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2540 {
2541 register tree field = TREE_PURPOSE (elt);
2542 register enum machine_mode mode;
2543 int bitsize;
2544 int bitpos;
2545 int unsignedp;
2546
f32fd778
RS
2547 /* Just ignore missing fields.
2548 We cleared the whole structure, above,
2549 if any fields are missing. */
2550 if (field == 0)
2551 continue;
2552
bbf6f052
RK
2553 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2554 unsignedp = TREE_UNSIGNED (field);
2555 mode = DECL_MODE (field);
2556 if (DECL_BIT_FIELD (field))
2557 mode = VOIDmode;
2558
2559 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2560 /* ??? This case remains to be written. */
2561 abort ();
2562
2563 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2564
2565 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2566 /* The alignment of TARGET is
2567 at least what its type requires. */
2568 VOIDmode, 0,
4af3895e
JVA
2569 TYPE_ALIGN (type) / BITS_PER_UNIT,
2570 int_size_in_bytes (type));
bbf6f052
RK
2571 }
2572 }
4af3895e 2573 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2574 {
2575 register tree elt;
2576 register int i;
4af3895e 2577 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2578 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2579 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2580 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2581
2582 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2583 clear the whole structure first. Similarly if this this is
2584 static constructor of a non-BLKmode object. */
bbf6f052 2585
4af3895e
JVA
2586 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2587 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2588 clear_storage (target, maxelt - minelt + 1);
2589 else
2590 /* Inform later passes that the old value is dead. */
2591 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2592
2593 /* Store each element of the constructor into
2594 the corresponding element of TARGET, determined
2595 by counting the elements. */
2596 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2597 elt;
2598 elt = TREE_CHAIN (elt), i++)
2599 {
2600 register enum machine_mode mode;
2601 int bitsize;
2602 int bitpos;
2603 int unsignedp;
2604
2605 mode = TYPE_MODE (elttype);
2606 bitsize = GET_MODE_BITSIZE (mode);
2607 unsignedp = TREE_UNSIGNED (elttype);
2608
2609 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2610
2611 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2612 /* The alignment of TARGET is
2613 at least what its type requires. */
2614 VOIDmode, 0,
4af3895e
JVA
2615 TYPE_ALIGN (type) / BITS_PER_UNIT,
2616 int_size_in_bytes (type));
bbf6f052
RK
2617 }
2618 }
2619
2620 else
2621 abort ();
2622}
2623
2624/* Store the value of EXP (an expression tree)
2625 into a subfield of TARGET which has mode MODE and occupies
2626 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2627 If MODE is VOIDmode, it means that we are storing into a bit-field.
2628
2629 If VALUE_MODE is VOIDmode, return nothing in particular.
2630 UNSIGNEDP is not used in this case.
2631
2632 Otherwise, return an rtx for the value stored. This rtx
2633 has mode VALUE_MODE if that is convenient to do.
2634 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2635
2636 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2637 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2638
2639static rtx
2640store_field (target, bitsize, bitpos, mode, exp, value_mode,
2641 unsignedp, align, total_size)
2642 rtx target;
2643 int bitsize, bitpos;
2644 enum machine_mode mode;
2645 tree exp;
2646 enum machine_mode value_mode;
2647 int unsignedp;
2648 int align;
2649 int total_size;
2650{
906c4e36 2651 HOST_WIDE_INT width_mask = 0;
bbf6f052 2652
906c4e36
RK
2653 if (bitsize < HOST_BITS_PER_WIDE_INT)
2654 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2655
2656 /* If we are storing into an unaligned field of an aligned union that is
2657 in a register, we may have the mode of TARGET being an integer mode but
2658 MODE == BLKmode. In that case, get an aligned object whose size and
2659 alignment are the same as TARGET and store TARGET into it (we can avoid
2660 the store if the field being stored is the entire width of TARGET). Then
2661 call ourselves recursively to store the field into a BLKmode version of
2662 that object. Finally, load from the object into TARGET. This is not
2663 very efficient in general, but should only be slightly more expensive
2664 than the otherwise-required unaligned accesses. Perhaps this can be
2665 cleaned up later. */
2666
2667 if (mode == BLKmode
2668 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2669 {
2670 rtx object = assign_stack_temp (GET_MODE (target),
2671 GET_MODE_SIZE (GET_MODE (target)), 0);
2672 rtx blk_object = copy_rtx (object);
2673
2674 PUT_MODE (blk_object, BLKmode);
2675
2676 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2677 emit_move_insn (object, target);
2678
2679 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2680 align, total_size);
2681
2682 emit_move_insn (target, object);
2683
2684 return target;
2685 }
2686
2687 /* If the structure is in a register or if the component
2688 is a bit field, we cannot use addressing to access it.
2689 Use bit-field techniques or SUBREG to store in it. */
2690
4fa52007
RK
2691 if (mode == VOIDmode
2692 || (mode != BLKmode && ! direct_store[(int) mode])
2693 || GET_CODE (target) == REG
bbf6f052
RK
2694 || GET_CODE (target) == SUBREG)
2695 {
906c4e36 2696 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2697 /* Store the value in the bitfield. */
2698 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2699 if (value_mode != VOIDmode)
2700 {
2701 /* The caller wants an rtx for the value. */
2702 /* If possible, avoid refetching from the bitfield itself. */
2703 if (width_mask != 0
2704 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 2705 {
9074de27 2706 tree count;
5c4d7cfb 2707 enum machine_mode tmode;
86a2c12a 2708
5c4d7cfb
RS
2709 if (unsignedp)
2710 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2711 tmode = GET_MODE (temp);
86a2c12a
RS
2712 if (tmode == VOIDmode)
2713 tmode = value_mode;
5c4d7cfb
RS
2714 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2715 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2716 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2717 }
bbf6f052 2718 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2719 NULL_RTX, value_mode, 0, align,
2720 total_size);
bbf6f052
RK
2721 }
2722 return const0_rtx;
2723 }
2724 else
2725 {
2726 rtx addr = XEXP (target, 0);
2727 rtx to_rtx;
2728
2729 /* If a value is wanted, it must be the lhs;
2730 so make the address stable for multiple use. */
2731
2732 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2733 && ! CONSTANT_ADDRESS_P (addr)
2734 /* A frame-pointer reference is already stable. */
2735 && ! (GET_CODE (addr) == PLUS
2736 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2737 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2738 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2739 addr = copy_to_reg (addr);
2740
2741 /* Now build a reference to just the desired component. */
2742
2743 to_rtx = change_address (target, mode,
2744 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2745 MEM_IN_STRUCT_P (to_rtx) = 1;
2746
2747 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2748 }
2749}
2750\f
2751/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2752 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2753 ARRAY_REFs at constant positions and find the ultimate containing object,
2754 which we return.
2755
2756 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2757 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2758 If the position of the field is variable, we store a tree
2759 giving the variable offset (in units) in *POFFSET.
2760 This offset is in addition to the bit position.
2761 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2762
2763 If any of the extraction expressions is volatile,
2764 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2765
2766 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2767 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2768 is redundant.
2769
2770 If the field describes a variable-sized object, *PMODE is set to
2771 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2772 this case, but the address of the object can be found. */
bbf6f052
RK
2773
2774tree
7bb0943f 2775get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
bbf6f052
RK
2776 tree exp;
2777 int *pbitsize;
2778 int *pbitpos;
7bb0943f 2779 tree *poffset;
bbf6f052
RK
2780 enum machine_mode *pmode;
2781 int *punsignedp;
2782 int *pvolatilep;
2783{
2784 tree size_tree = 0;
2785 enum machine_mode mode = VOIDmode;
7bb0943f 2786 tree offset = 0;
bbf6f052
RK
2787
2788 if (TREE_CODE (exp) == COMPONENT_REF)
2789 {
2790 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2791 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2792 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2793 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2794 }
2795 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2796 {
2797 size_tree = TREE_OPERAND (exp, 1);
2798 *punsignedp = TREE_UNSIGNED (exp);
2799 }
2800 else
2801 {
2802 mode = TYPE_MODE (TREE_TYPE (exp));
2803 *pbitsize = GET_MODE_BITSIZE (mode);
2804 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2805 }
2806
2807 if (size_tree)
2808 {
2809 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2810 mode = BLKmode, *pbitsize = -1;
2811 else
2812 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2813 }
2814
2815 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2816 and find the ultimate containing object. */
2817
2818 *pbitpos = 0;
2819
2820 while (1)
2821 {
03708085
RS
2822 if (TREE_CODE (exp) == INDIRECT_REF && flag_volatile)
2823 *pvolatilep = 1;
2824
7bb0943f 2825 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2826 {
7bb0943f
RS
2827 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2828 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2829 : TREE_OPERAND (exp, 2));
bbf6f052 2830
7bb0943f
RS
2831 if (TREE_CODE (pos) == PLUS_EXPR)
2832 {
2833 tree constant, var;
2834 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2835 {
2836 constant = TREE_OPERAND (pos, 0);
2837 var = TREE_OPERAND (pos, 1);
2838 }
2839 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2840 {
2841 constant = TREE_OPERAND (pos, 1);
2842 var = TREE_OPERAND (pos, 0);
2843 }
2844 else
2845 abort ();
2846 *pbitpos += TREE_INT_CST_LOW (constant);
2847 if (offset)
2848 offset = size_binop (PLUS_EXPR, offset,
2849 size_binop (FLOOR_DIV_EXPR, var,
2850 size_int (BITS_PER_UNIT)));
2851 else
2852 offset = size_binop (FLOOR_DIV_EXPR, var,
2853 size_int (BITS_PER_UNIT));
2854 }
2855 else if (TREE_CODE (pos) == INTEGER_CST)
2856 *pbitpos += TREE_INT_CST_LOW (pos);
2857 else
2858 {
2859 /* Assume here that the offset is a multiple of a unit.
2860 If not, there should be an explicitly added constant. */
2861 if (offset)
2862 offset = size_binop (PLUS_EXPR, offset,
2863 size_binop (FLOOR_DIV_EXPR, pos,
2864 size_int (BITS_PER_UNIT)));
2865 else
2866 offset = size_binop (FLOOR_DIV_EXPR, pos,
2867 size_int (BITS_PER_UNIT));
2868 }
bbf6f052 2869 }
bbf6f052 2870
bbf6f052
RK
2871 else if (TREE_CODE (exp) == ARRAY_REF
2872 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2874 {
2875 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2876 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
2877 }
2878 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2879 && ! ((TREE_CODE (exp) == NOP_EXPR
2880 || TREE_CODE (exp) == CONVERT_EXPR)
2881 && (TYPE_MODE (TREE_TYPE (exp))
2882 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2883 break;
7bb0943f
RS
2884
2885 /* If any reference in the chain is volatile, the effect is volatile. */
2886 if (TREE_THIS_VOLATILE (exp))
2887 *pvolatilep = 1;
bbf6f052
RK
2888 exp = TREE_OPERAND (exp, 0);
2889 }
2890
2891 /* If this was a bit-field, see if there is a mode that allows direct
2892 access in case EXP is in memory. */
2893 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2894 {
2895 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2896 if (mode == BLKmode)
2897 mode = VOIDmode;
2898 }
2899
2900 *pmode = mode;
7bb0943f
RS
2901 *poffset = offset;
2902#if 0
2903 /* We aren't finished fixing the callers to really handle nonzero offset. */
2904 if (offset != 0)
2905 abort ();
2906#endif
bbf6f052
RK
2907
2908 return exp;
2909}
2910\f
2911/* Given an rtx VALUE that may contain additions and multiplications,
2912 return an equivalent value that just refers to a register or memory.
2913 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
2914 and returning a pseudo-register containing the value.
2915
2916 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
2917
2918rtx
2919force_operand (value, target)
2920 rtx value, target;
2921{
2922 register optab binoptab = 0;
2923 /* Use a temporary to force order of execution of calls to
2924 `force_operand'. */
2925 rtx tmp;
2926 register rtx op2;
2927 /* Use subtarget as the target for operand 0 of a binary operation. */
2928 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2929
2930 if (GET_CODE (value) == PLUS)
2931 binoptab = add_optab;
2932 else if (GET_CODE (value) == MINUS)
2933 binoptab = sub_optab;
2934 else if (GET_CODE (value) == MULT)
2935 {
2936 op2 = XEXP (value, 1);
2937 if (!CONSTANT_P (op2)
2938 && !(GET_CODE (op2) == REG && op2 != subtarget))
2939 subtarget = 0;
2940 tmp = force_operand (XEXP (value, 0), subtarget);
2941 return expand_mult (GET_MODE (value), tmp,
906c4e36 2942 force_operand (op2, NULL_RTX),
bbf6f052
RK
2943 target, 0);
2944 }
2945
2946 if (binoptab)
2947 {
2948 op2 = XEXP (value, 1);
2949 if (!CONSTANT_P (op2)
2950 && !(GET_CODE (op2) == REG && op2 != subtarget))
2951 subtarget = 0;
2952 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2953 {
2954 binoptab = add_optab;
2955 op2 = negate_rtx (GET_MODE (value), op2);
2956 }
2957
2958 /* Check for an addition with OP2 a constant integer and our first
2959 operand a PLUS of a virtual register and something else. In that
2960 case, we want to emit the sum of the virtual register and the
2961 constant first and then add the other value. This allows virtual
2962 register instantiation to simply modify the constant rather than
2963 creating another one around this addition. */
2964 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2965 && GET_CODE (XEXP (value, 0)) == PLUS
2966 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2967 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2968 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2969 {
2970 rtx temp = expand_binop (GET_MODE (value), binoptab,
2971 XEXP (XEXP (value, 0), 0), op2,
2972 subtarget, 0, OPTAB_LIB_WIDEN);
2973 return expand_binop (GET_MODE (value), binoptab, temp,
2974 force_operand (XEXP (XEXP (value, 0), 1), 0),
2975 target, 0, OPTAB_LIB_WIDEN);
2976 }
2977
2978 tmp = force_operand (XEXP (value, 0), subtarget);
2979 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 2980 force_operand (op2, NULL_RTX),
bbf6f052
RK
2981 target, 0, OPTAB_LIB_WIDEN);
2982 /* We give UNSIGNEP = 0 to expand_binop
2983 because the only operations we are expanding here are signed ones. */
2984 }
2985 return value;
2986}
2987\f
2988/* Subroutine of expand_expr:
2989 save the non-copied parts (LIST) of an expr (LHS), and return a list
2990 which can restore these values to their previous values,
2991 should something modify their storage. */
2992
2993static tree
2994save_noncopied_parts (lhs, list)
2995 tree lhs;
2996 tree list;
2997{
2998 tree tail;
2999 tree parts = 0;
3000
3001 for (tail = list; tail; tail = TREE_CHAIN (tail))
3002 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3003 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3004 else
3005 {
3006 tree part = TREE_VALUE (tail);
3007 tree part_type = TREE_TYPE (part);
906c4e36 3008 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3009 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3010 int_size_in_bytes (part_type), 0);
3011 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3012 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3013 parts = tree_cons (to_be_saved,
906c4e36
RK
3014 build (RTL_EXPR, part_type, NULL_TREE,
3015 (tree) target),
bbf6f052
RK
3016 parts);
3017 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3018 }
3019 return parts;
3020}
3021
3022/* Subroutine of expand_expr:
3023 record the non-copied parts (LIST) of an expr (LHS), and return a list
3024 which specifies the initial values of these parts. */
3025
3026static tree
3027init_noncopied_parts (lhs, list)
3028 tree lhs;
3029 tree list;
3030{
3031 tree tail;
3032 tree parts = 0;
3033
3034 for (tail = list; tail; tail = TREE_CHAIN (tail))
3035 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3036 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3037 else
3038 {
3039 tree part = TREE_VALUE (tail);
3040 tree part_type = TREE_TYPE (part);
906c4e36 3041 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3042 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3043 }
3044 return parts;
3045}
3046
3047/* Subroutine of expand_expr: return nonzero iff there is no way that
3048 EXP can reference X, which is being modified. */
3049
3050static int
3051safe_from_p (x, exp)
3052 rtx x;
3053 tree exp;
3054{
3055 rtx exp_rtl = 0;
3056 int i, nops;
3057
3058 if (x == 0)
3059 return 1;
3060
3061 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3062 find the underlying pseudo. */
3063 if (GET_CODE (x) == SUBREG)
3064 {
3065 x = SUBREG_REG (x);
3066 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3067 return 0;
3068 }
3069
3070 /* If X is a location in the outgoing argument area, it is always safe. */
3071 if (GET_CODE (x) == MEM
3072 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3073 || (GET_CODE (XEXP (x, 0)) == PLUS
3074 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3075 return 1;
3076
3077 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3078 {
3079 case 'd':
3080 exp_rtl = DECL_RTL (exp);
3081 break;
3082
3083 case 'c':
3084 return 1;
3085
3086 case 'x':
3087 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3088 return ((TREE_VALUE (exp) == 0
3089 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3090 && (TREE_CHAIN (exp) == 0
3091 || safe_from_p (x, TREE_CHAIN (exp))));
3092 else
3093 return 0;
3094
3095 case '1':
3096 return safe_from_p (x, TREE_OPERAND (exp, 0));
3097
3098 case '2':
3099 case '<':
3100 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3101 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3102
3103 case 'e':
3104 case 'r':
3105 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3106 the expression. If it is set, we conflict iff we are that rtx or
3107 both are in memory. Otherwise, we check all operands of the
3108 expression recursively. */
3109
3110 switch (TREE_CODE (exp))
3111 {
3112 case ADDR_EXPR:
3113 return staticp (TREE_OPERAND (exp, 0));
3114
3115 case INDIRECT_REF:
3116 if (GET_CODE (x) == MEM)
3117 return 0;
3118 break;
3119
3120 case CALL_EXPR:
3121 exp_rtl = CALL_EXPR_RTL (exp);
3122 if (exp_rtl == 0)
3123 {
3124 /* Assume that the call will clobber all hard registers and
3125 all of memory. */
3126 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3127 || GET_CODE (x) == MEM)
3128 return 0;
3129 }
3130
3131 break;
3132
3133 case RTL_EXPR:
3134 exp_rtl = RTL_EXPR_RTL (exp);
3135 if (exp_rtl == 0)
3136 /* We don't know what this can modify. */
3137 return 0;
3138
3139 break;
3140
3141 case WITH_CLEANUP_EXPR:
3142 exp_rtl = RTL_EXPR_RTL (exp);
3143 break;
3144
3145 case SAVE_EXPR:
3146 exp_rtl = SAVE_EXPR_RTL (exp);
3147 break;
3148
8129842c
RS
3149 case BIND_EXPR:
3150 /* The only operand we look at is operand 1. The rest aren't
3151 part of the expression. */
3152 return safe_from_p (x, TREE_OPERAND (exp, 1));
3153
bbf6f052
RK
3154 case METHOD_CALL_EXPR:
3155 /* This takes a rtx argument, but shouldn't appear here. */
3156 abort ();
3157 }
3158
3159 /* If we have an rtx, we do not need to scan our operands. */
3160 if (exp_rtl)
3161 break;
3162
3163 nops = tree_code_length[(int) TREE_CODE (exp)];
3164 for (i = 0; i < nops; i++)
3165 if (TREE_OPERAND (exp, i) != 0
3166 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3167 return 0;
3168 }
3169
3170 /* If we have an rtl, find any enclosed object. Then see if we conflict
3171 with it. */
3172 if (exp_rtl)
3173 {
3174 if (GET_CODE (exp_rtl) == SUBREG)
3175 {
3176 exp_rtl = SUBREG_REG (exp_rtl);
3177 if (GET_CODE (exp_rtl) == REG
3178 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3179 return 0;
3180 }
3181
3182 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3183 are memory and EXP is not readonly. */
3184 return ! (rtx_equal_p (x, exp_rtl)
3185 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3186 && ! TREE_READONLY (exp)));
3187 }
3188
3189 /* If we reach here, it is safe. */
3190 return 1;
3191}
3192
3193/* Subroutine of expand_expr: return nonzero iff EXP is an
3194 expression whose type is statically determinable. */
3195
3196static int
3197fixed_type_p (exp)
3198 tree exp;
3199{
3200 if (TREE_CODE (exp) == PARM_DECL
3201 || TREE_CODE (exp) == VAR_DECL
3202 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3203 || TREE_CODE (exp) == COMPONENT_REF
3204 || TREE_CODE (exp) == ARRAY_REF)
3205 return 1;
3206 return 0;
3207}
3208\f
3209/* expand_expr: generate code for computing expression EXP.
3210 An rtx for the computed value is returned. The value is never null.
3211 In the case of a void EXP, const0_rtx is returned.
3212
3213 The value may be stored in TARGET if TARGET is nonzero.
3214 TARGET is just a suggestion; callers must assume that
3215 the rtx returned may not be the same as TARGET.
3216
3217 If TARGET is CONST0_RTX, it means that the value will be ignored.
3218
3219 If TMODE is not VOIDmode, it suggests generating the
3220 result in mode TMODE. But this is done only when convenient.
3221 Otherwise, TMODE is ignored and the value generated in its natural mode.
3222 TMODE is just a suggestion; callers must assume that
3223 the rtx returned may not have mode TMODE.
3224
3225 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3226 with a constant address even if that address is not normally legitimate.
3227 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3228
3229 If MODIFIER is EXPAND_SUM then when EXP is an addition
3230 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3231 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3232 products as above, or REG or MEM, or constant.
3233 Ordinarily in such cases we would output mul or add instructions
3234 and then return a pseudo reg containing the sum.
3235
3236 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3237 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3238 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3239 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3240
3241rtx
3242expand_expr (exp, target, tmode, modifier)
3243 register tree exp;
3244 rtx target;
3245 enum machine_mode tmode;
3246 enum expand_modifier modifier;
3247{
3248 register rtx op0, op1, temp;
3249 tree type = TREE_TYPE (exp);
3250 int unsignedp = TREE_UNSIGNED (type);
3251 register enum machine_mode mode = TYPE_MODE (type);
3252 register enum tree_code code = TREE_CODE (exp);
3253 optab this_optab;
3254 /* Use subtarget as the target for operand 0 of a binary operation. */
3255 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3256 rtx original_target = target;
3257 int ignore = target == const0_rtx;
3258 tree context;
3259
3260 /* Don't use hard regs as subtargets, because the combiner
3261 can only handle pseudo regs. */
3262 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3263 subtarget = 0;
3264 /* Avoid subtargets inside loops,
3265 since they hide some invariant expressions. */
3266 if (preserve_subexpressions_p ())
3267 subtarget = 0;
3268
3269 if (ignore) target = 0, original_target = 0;
3270
3271 /* If will do cse, generate all results into pseudo registers
3272 since 1) that allows cse to find more things
3273 and 2) otherwise cse could produce an insn the machine
3274 cannot support. */
3275
3276 if (! cse_not_expected && mode != BLKmode && target
3277 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3278 target = subtarget;
3279
3280 /* Ensure we reference a volatile object even if value is ignored. */
3281 if (ignore && TREE_THIS_VOLATILE (exp)
3282 && mode != VOIDmode && mode != BLKmode)
3283 {
3284 target = gen_reg_rtx (mode);
3285 temp = expand_expr (exp, target, VOIDmode, modifier);
3286 if (temp != target)
3287 emit_move_insn (target, temp);
3288 return target;
3289 }
3290
3291 switch (code)
3292 {
3293 case LABEL_DECL:
b552441b
RS
3294 {
3295 tree function = decl_function_context (exp);
3296 /* Handle using a label in a containing function. */
3297 if (function != current_function_decl && function != 0)
3298 {
3299 struct function *p = find_function_data (function);
3300 /* Allocate in the memory associated with the function
3301 that the label is in. */
3302 push_obstacks (p->function_obstack,
3303 p->function_maybepermanent_obstack);
3304
3305 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3306 label_rtx (exp), p->forced_labels);
3307 pop_obstacks ();
3308 }
3309 else if (modifier == EXPAND_INITIALIZER)
3310 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3311 label_rtx (exp), forced_labels);
26fcb35a 3312 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3313 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3314 if (function != current_function_decl && function != 0)
3315 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3316 return temp;
b552441b 3317 }
bbf6f052
RK
3318
3319 case PARM_DECL:
3320 if (DECL_RTL (exp) == 0)
3321 {
3322 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3323 return CONST0_RTX (mode);
bbf6f052
RK
3324 }
3325
3326 case FUNCTION_DECL:
3327 case VAR_DECL:
3328 case RESULT_DECL:
3329 if (DECL_RTL (exp) == 0)
3330 abort ();
3331 /* Ensure variable marked as used
3332 even if it doesn't go through a parser. */
3333 TREE_USED (exp) = 1;
3334 /* Handle variables inherited from containing functions. */
3335 context = decl_function_context (exp);
3336
3337 /* We treat inline_function_decl as an alias for the current function
3338 because that is the inline function whose vars, types, etc.
3339 are being merged into the current function.
3340 See expand_inline_function. */
3341 if (context != 0 && context != current_function_decl
3342 && context != inline_function_decl
3343 /* If var is static, we don't need a static chain to access it. */
3344 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3345 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3346 {
3347 rtx addr;
3348
3349 /* Mark as non-local and addressable. */
81feeecb 3350 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3351 mark_addressable (exp);
3352 if (GET_CODE (DECL_RTL (exp)) != MEM)
3353 abort ();
3354 addr = XEXP (DECL_RTL (exp), 0);
3355 if (GET_CODE (addr) == MEM)
3356 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3357 else
3358 addr = fix_lexical_addr (addr, exp);
3359 return change_address (DECL_RTL (exp), mode, addr);
3360 }
4af3895e 3361
bbf6f052
RK
3362 /* This is the case of an array whose size is to be determined
3363 from its initializer, while the initializer is still being parsed.
3364 See expand_decl. */
3365 if (GET_CODE (DECL_RTL (exp)) == MEM
3366 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3367 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3368 XEXP (DECL_RTL (exp), 0));
3369 if (GET_CODE (DECL_RTL (exp)) == MEM
3370 && modifier != EXPAND_CONST_ADDRESS
3371 && modifier != EXPAND_SUM
3372 && modifier != EXPAND_INITIALIZER)
3373 {
3374 /* DECL_RTL probably contains a constant address.
3375 On RISC machines where a constant address isn't valid,
3376 make some insns to get that address into a register. */
3377 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3378 || (flag_force_addr
3379 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3380 return change_address (DECL_RTL (exp), VOIDmode,
3381 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3382 }
1499e0a8
RK
3383
3384 /* If the mode of DECL_RTL does not match that of the decl, it
3385 must be a promoted value. We return a SUBREG of the wanted mode,
3386 but mark it so that we know that it was already extended. */
3387
3388 if (GET_CODE (DECL_RTL (exp)) == REG
3389 && GET_MODE (DECL_RTL (exp)) != mode)
3390 {
3391 enum machine_mode decl_mode = DECL_MODE (exp);
3392
3393 /* Get the signedness used for this variable. Ensure we get the
3394 same mode we got when the variable was declared. */
3395
3396 PROMOTE_MODE (decl_mode, unsignedp, type);
3397
3398 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3399 abort ();
3400
3401 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3402 SUBREG_PROMOTED_VAR_P (temp) = 1;
3403 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3404 return temp;
3405 }
3406
bbf6f052
RK
3407 return DECL_RTL (exp);
3408
3409 case INTEGER_CST:
3410 return immed_double_const (TREE_INT_CST_LOW (exp),
3411 TREE_INT_CST_HIGH (exp),
3412 mode);
3413
3414 case CONST_DECL:
3415 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3416
3417 case REAL_CST:
3418 /* If optimized, generate immediate CONST_DOUBLE
3419 which will be turned into memory by reload if necessary.
3420
3421 We used to force a register so that loop.c could see it. But
3422 this does not allow gen_* patterns to perform optimizations with
3423 the constants. It also produces two insns in cases like "x = 1.0;".
3424 On most machines, floating-point constants are not permitted in
3425 many insns, so we'd end up copying it to a register in any case.
3426
3427 Now, we do the copying in expand_binop, if appropriate. */
3428 return immed_real_const (exp);
3429
3430 case COMPLEX_CST:
3431 case STRING_CST:
3432 if (! TREE_CST_RTL (exp))
3433 output_constant_def (exp);
3434
3435 /* TREE_CST_RTL probably contains a constant address.
3436 On RISC machines where a constant address isn't valid,
3437 make some insns to get that address into a register. */
3438 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3439 && modifier != EXPAND_CONST_ADDRESS
3440 && modifier != EXPAND_INITIALIZER
3441 && modifier != EXPAND_SUM
3442 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3443 return change_address (TREE_CST_RTL (exp), VOIDmode,
3444 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3445 return TREE_CST_RTL (exp);
3446
3447 case SAVE_EXPR:
3448 context = decl_function_context (exp);
3449 /* We treat inline_function_decl as an alias for the current function
3450 because that is the inline function whose vars, types, etc.
3451 are being merged into the current function.
3452 See expand_inline_function. */
3453 if (context == current_function_decl || context == inline_function_decl)
3454 context = 0;
3455
3456 /* If this is non-local, handle it. */
3457 if (context)
3458 {
3459 temp = SAVE_EXPR_RTL (exp);
3460 if (temp && GET_CODE (temp) == REG)
3461 {
3462 put_var_into_stack (exp);
3463 temp = SAVE_EXPR_RTL (exp);
3464 }
3465 if (temp == 0 || GET_CODE (temp) != MEM)
3466 abort ();
3467 return change_address (temp, mode,
3468 fix_lexical_addr (XEXP (temp, 0), exp));
3469 }
3470 if (SAVE_EXPR_RTL (exp) == 0)
3471 {
3472 if (mode == BLKmode)
3473 temp
3474 = assign_stack_temp (mode,
3475 int_size_in_bytes (TREE_TYPE (exp)), 0);
3476 else
1499e0a8
RK
3477 {
3478 enum machine_mode var_mode = mode;
3479
3480 if (TREE_CODE (type) == INTEGER_TYPE
3481 || TREE_CODE (type) == ENUMERAL_TYPE
3482 || TREE_CODE (type) == BOOLEAN_TYPE
3483 || TREE_CODE (type) == CHAR_TYPE
3484 || TREE_CODE (type) == REAL_TYPE
3485 || TREE_CODE (type) == POINTER_TYPE
3486 || TREE_CODE (type) == OFFSET_TYPE)
3487 {
3488 PROMOTE_MODE (var_mode, unsignedp, type);
3489 }
3490
3491 temp = gen_reg_rtx (var_mode);
3492 }
3493
bbf6f052
RK
3494 SAVE_EXPR_RTL (exp) = temp;
3495 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3496 if (!optimize && GET_CODE (temp) == REG)
3497 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3498 save_expr_regs);
3499 }
1499e0a8
RK
3500
3501 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3502 must be a promoted value. We return a SUBREG of the wanted mode,
3503 but mark it so that we know that it was already extended. Note
3504 that `unsignedp' was modified above in this case. */
3505
3506 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3507 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3508 {
3509 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3510 SUBREG_PROMOTED_VAR_P (temp) = 1;
3511 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3512 return temp;
3513 }
3514
bbf6f052
RK
3515 return SAVE_EXPR_RTL (exp);
3516
3517 case EXIT_EXPR:
3518 /* Exit the current loop if the body-expression is true. */
3519 {
3520 rtx label = gen_label_rtx ();
906c4e36
RK
3521 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3522 expand_exit_loop (NULL_PTR);
bbf6f052
RK
3523 emit_label (label);
3524 }
3525 return const0_rtx;
3526
3527 case LOOP_EXPR:
3528 expand_start_loop (1);
3529 expand_expr_stmt (TREE_OPERAND (exp, 0));
3530 expand_end_loop ();
3531
3532 return const0_rtx;
3533
3534 case BIND_EXPR:
3535 {
3536 tree vars = TREE_OPERAND (exp, 0);
3537 int vars_need_expansion = 0;
3538
3539 /* Need to open a binding contour here because
3540 if there are any cleanups they most be contained here. */
3541 expand_start_bindings (0);
3542
2df53c0b
RS
3543 /* Mark the corresponding BLOCK for output in its proper place. */
3544 if (TREE_OPERAND (exp, 2) != 0
3545 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3546 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3547
3548 /* If VARS have not yet been expanded, expand them now. */
3549 while (vars)
3550 {
3551 if (DECL_RTL (vars) == 0)
3552 {
3553 vars_need_expansion = 1;
3554 expand_decl (vars);
3555 }
3556 expand_decl_init (vars);
3557 vars = TREE_CHAIN (vars);
3558 }
3559
3560 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3561
3562 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3563
3564 return temp;
3565 }
3566
3567 case RTL_EXPR:
3568 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3569 abort ();
3570 emit_insns (RTL_EXPR_SEQUENCE (exp));
3571 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3572 return RTL_EXPR_RTL (exp);
3573
3574 case CONSTRUCTOR:
4af3895e
JVA
3575 /* All elts simple constants => refer to a constant in memory. But
3576 if this is a non-BLKmode mode, let it store a field at a time
3577 since that should make a CONST_INT or CONST_DOUBLE when we
3578 fold. */
3579 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3580 {
3581 rtx constructor = output_constant_def (exp);
b552441b
RS
3582 if (modifier != EXPAND_CONST_ADDRESS
3583 && modifier != EXPAND_INITIALIZER
3584 && modifier != EXPAND_SUM
3585 && !memory_address_p (GET_MODE (constructor),
3586 XEXP (constructor, 0)))
bbf6f052
RK
3587 constructor = change_address (constructor, VOIDmode,
3588 XEXP (constructor, 0));
3589 return constructor;
3590 }
3591
3592 if (ignore)
3593 {
3594 tree elt;
3595 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3596 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3597 return const0_rtx;
3598 }
3599 else
3600 {
3601 if (target == 0 || ! safe_from_p (target, exp))
3602 {
3603 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3604 target = gen_reg_rtx (mode);
3605 else
3606 {
3607 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3608 if (target)
3609 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3610 target = safe_target;
3611 }
3612 }
3613 store_constructor (exp, target);
3614 return target;
3615 }
3616
3617 case INDIRECT_REF:
3618 {
3619 tree exp1 = TREE_OPERAND (exp, 0);
3620 tree exp2;
3621
3622 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3623 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3624 This code has the same general effect as simply doing
3625 expand_expr on the save expr, except that the expression PTR
3626 is computed for use as a memory address. This means different
3627 code, suitable for indexing, may be generated. */
3628 if (TREE_CODE (exp1) == SAVE_EXPR
3629 && SAVE_EXPR_RTL (exp1) == 0
3630 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3631 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3632 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3633 {
906c4e36
RK
3634 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3635 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3636 op0 = memory_address (mode, temp);
3637 op0 = copy_all_regs (op0);
3638 SAVE_EXPR_RTL (exp1) = op0;
3639 }
3640 else
3641 {
906c4e36 3642 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3643 op0 = memory_address (mode, op0);
3644 }
8c8a8e34
JW
3645
3646 temp = gen_rtx (MEM, mode, op0);
3647 /* If address was computed by addition,
3648 mark this as an element of an aggregate. */
3649 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3650 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3651 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3652 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3653 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3654 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3655 || (TREE_CODE (exp1) == ADDR_EXPR
3656 && (exp2 = TREE_OPERAND (exp1, 0))
3657 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3658 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3659 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3660 MEM_IN_STRUCT_P (temp) = 1;
3661 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
bbf6f052
RK
3662#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3663 a location is accessed through a pointer to const does not mean
3664 that the value there can never change. */
8c8a8e34 3665 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3666#endif
8c8a8e34
JW
3667 return temp;
3668 }
bbf6f052
RK
3669
3670 case ARRAY_REF:
3671 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3672 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3673 {
3674 /* Nonconstant array index or nonconstant element size.
3675 Generate the tree for *(&array+index) and expand that,
3676 except do it in a language-independent way
3677 and don't complain about non-lvalue arrays.
3678 `mark_addressable' should already have been called
3679 for any array for which this case will be reached. */
3680
3681 /* Don't forget the const or volatile flag from the array element. */
3682 tree variant_type = build_type_variant (type,
3683 TREE_READONLY (exp),
3684 TREE_THIS_VOLATILE (exp));
3685 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3686 TREE_OPERAND (exp, 0));
3687 tree index = TREE_OPERAND (exp, 1);
3688 tree elt;
3689
3690 /* Convert the integer argument to a type the same size as a pointer
3691 so the multiply won't overflow spuriously. */
3692 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3693 index = convert (type_for_size (POINTER_SIZE, 0), index);
3694
3695 /* Don't think the address has side effects
3696 just because the array does.
3697 (In some cases the address might have side effects,
3698 and we fail to record that fact here. However, it should not
3699 matter, since expand_expr should not care.) */
3700 TREE_SIDE_EFFECTS (array_adr) = 0;
3701
3702 elt = build1 (INDIRECT_REF, type,
3703 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3704 array_adr,
3705 fold (build (MULT_EXPR,
3706 TYPE_POINTER_TO (variant_type),
3707 index, size_in_bytes (type))))));
3708
3709 /* Volatility, etc., of new expression is same as old expression. */
3710 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3711 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3712 TREE_READONLY (elt) = TREE_READONLY (exp);
3713
3714 return expand_expr (elt, target, tmode, modifier);
3715 }
3716
3717 /* Fold an expression like: "foo"[2].
3718 This is not done in fold so it won't happen inside &. */
3719 {
3720 int i;
3721 tree arg0 = TREE_OPERAND (exp, 0);
3722 tree arg1 = TREE_OPERAND (exp, 1);
3723
3724 if (TREE_CODE (arg0) == STRING_CST
3725 && TREE_CODE (arg1) == INTEGER_CST
3726 && !TREE_INT_CST_HIGH (arg1)
3727 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3728 {
3729 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3730 {
3731 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3732 TREE_TYPE (exp) = integer_type_node;
3733 return expand_expr (exp, target, tmode, modifier);
3734 }
3735 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3736 {
3737 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3738 TREE_TYPE (exp) = integer_type_node;
3739 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3740 }
3741 }
3742 }
3743
3744 /* If this is a constant index into a constant array,
4af3895e
JVA
3745 just get the value from the array. Handle both the cases when
3746 we have an explicit constructor and when our operand is a variable
3747 that was declared const. */
3748
3749 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3750 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3751 {
3752 tree index = fold (TREE_OPERAND (exp, 1));
3753 if (TREE_CODE (index) == INTEGER_CST
3754 && TREE_INT_CST_HIGH (index) == 0)
3755 {
3756 int i = TREE_INT_CST_LOW (index);
3757 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3758
3759 while (elem && i--)
3760 elem = TREE_CHAIN (elem);
3761 if (elem)
3762 return expand_expr (fold (TREE_VALUE (elem)), target,
3763 tmode, modifier);
3764 }
3765 }
3766
3767 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3768 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3769 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3770 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3771 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3772 && optimize >= 1
3773 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3774 != ERROR_MARK))
bbf6f052
RK
3775 {
3776 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3777 if (TREE_CODE (index) == INTEGER_CST
3778 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3779 {
3780 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3781 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3782
8c8a8e34
JW
3783 if (TREE_CODE (init) == CONSTRUCTOR)
3784 {
3785 tree elem = CONSTRUCTOR_ELTS (init);
3786
3787 while (elem && i--)
3788 elem = TREE_CHAIN (elem);
3789 if (elem)
3790 return expand_expr (fold (TREE_VALUE (elem)), target,
3791 tmode, modifier);
3792 }
3793 else if (TREE_CODE (init) == STRING_CST
3794 && i < TREE_STRING_LENGTH (init))
3795 {
906c4e36 3796 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
8c8a8e34
JW
3797 return convert_to_mode (mode, temp, 0);
3798 }
bbf6f052
RK
3799 }
3800 }
3801 /* Treat array-ref with constant index as a component-ref. */
3802
3803 case COMPONENT_REF:
3804 case BIT_FIELD_REF:
4af3895e
JVA
3805 /* If the operand is a CONSTRUCTOR, we can just extract the
3806 appropriate field if it is present. */
3807 if (code != ARRAY_REF
3808 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3809 {
3810 tree elt;
3811
3812 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3813 elt = TREE_CHAIN (elt))
3814 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3815 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3816 }
3817
bbf6f052
RK
3818 {
3819 enum machine_mode mode1;
3820 int bitsize;
3821 int bitpos;
7bb0943f 3822 tree offset;
bbf6f052 3823 int volatilep = 0;
7bb0943f 3824 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3825 &mode1, &unsignedp, &volatilep);
3826
3827 /* In some cases, we will be offsetting OP0's address by a constant.
3828 So get it as a sum, if possible. If we will be using it
3829 directly in an insn, we validate it. */
906c4e36 3830 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3831
8c8a8e34
JW
3832 /* If this is a constant, put it into a register if it is a
3833 legimate constant and memory if it isn't. */
3834 if (CONSTANT_P (op0))
3835 {
3836 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3837 if (LEGITIMATE_CONSTANT_P (op0))
3838 op0 = force_reg (mode, op0);
3839 else
3840 op0 = validize_mem (force_const_mem (mode, op0));
3841 }
3842
7bb0943f
RS
3843 if (offset != 0)
3844 {
906c4e36 3845 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3846
3847 if (GET_CODE (op0) != MEM)
3848 abort ();
3849 op0 = change_address (op0, VOIDmode,
3850 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3851 force_reg (Pmode, offset_rtx)));
3852 }
3853
bbf6f052
RK
3854 /* Don't forget about volatility even if this is a bitfield. */
3855 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3856 {
3857 op0 = copy_rtx (op0);
3858 MEM_VOLATILE_P (op0) = 1;
3859 }
3860
3861 if (mode1 == VOIDmode
0bba3f6f
RK
3862 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3863 && modifier != EXPAND_CONST_ADDRESS
3864 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
3865 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3866 {
3867 /* In cases where an aligned union has an unaligned object
3868 as a field, we might be extracting a BLKmode value from
3869 an integer-mode (e.g., SImode) object. Handle this case
3870 by doing the extract into an object as wide as the field
3871 (which we know to be the width of a basic mode), then
3872 storing into memory, and changing the mode to BLKmode. */
3873 enum machine_mode ext_mode = mode;
3874
3875 if (ext_mode == BLKmode)
3876 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3877
3878 if (ext_mode == BLKmode)
3879 abort ();
3880
3881 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3882 unsignedp, target, ext_mode, ext_mode,
3883 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3884 int_size_in_bytes (TREE_TYPE (tem)));
3885 if (mode == BLKmode)
3886 {
3887 rtx new = assign_stack_temp (ext_mode,
3888 bitsize / BITS_PER_UNIT, 0);
3889
3890 emit_move_insn (new, op0);
3891 op0 = copy_rtx (new);
3892 PUT_MODE (op0, BLKmode);
3893 }
3894
3895 return op0;
3896 }
3897
3898 /* Get a reference to just this component. */
3899 if (modifier == EXPAND_CONST_ADDRESS
3900 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3901 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3902 (bitpos / BITS_PER_UNIT)));
3903 else
3904 op0 = change_address (op0, mode1,
3905 plus_constant (XEXP (op0, 0),
3906 (bitpos / BITS_PER_UNIT)));
3907 MEM_IN_STRUCT_P (op0) = 1;
3908 MEM_VOLATILE_P (op0) |= volatilep;
3909 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3910 return op0;
3911 if (target == 0)
3912 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3913 convert_move (target, op0, unsignedp);
3914 return target;
3915 }
3916
3917 case OFFSET_REF:
3918 {
3919 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3920 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 3921 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3922 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3923 MEM_IN_STRUCT_P (temp) = 1;
3924 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3925#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3926 a location is accessed through a pointer to const does not mean
3927 that the value there can never change. */
3928 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3929#endif
3930 return temp;
3931 }
3932
3933 /* Intended for a reference to a buffer of a file-object in Pascal.
3934 But it's not certain that a special tree code will really be
3935 necessary for these. INDIRECT_REF might work for them. */
3936 case BUFFER_REF:
3937 abort ();
3938
7308a047
RS
3939 /* IN_EXPR: Inlined pascal set IN expression.
3940
3941 Algorithm:
3942 rlo = set_low - (set_low%bits_per_word);
3943 the_word = set [ (index - rlo)/bits_per_word ];
3944 bit_index = index % bits_per_word;
3945 bitmask = 1 << bit_index;
3946 return !!(the_word & bitmask); */
3947 case IN_EXPR:
3948 preexpand_calls (exp);
3949 {
3950 tree set = TREE_OPERAND (exp, 0);
3951 tree index = TREE_OPERAND (exp, 1);
3952 tree set_type = TREE_TYPE (set);
3953
3954 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3955 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3956
3957 rtx index_val;
3958 rtx lo_r;
3959 rtx hi_r;
3960 rtx rlow;
3961 rtx diff, quo, rem, addr, bit, result;
3962 rtx setval, setaddr;
3963 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3964
3965 if (target == 0)
3966 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
3967
3968 /* If domain is empty, answer is no. */
3969 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3970 return const0_rtx;
3971
3972 index_val = expand_expr (index, 0, VOIDmode, 0);
3973 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3974 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3975 setval = expand_expr (set, 0, VOIDmode, 0);
3976 setaddr = XEXP (setval, 0);
3977
3978 /* Compare index against bounds, if they are constant. */
3979 if (GET_CODE (index_val) == CONST_INT
3980 && GET_CODE (lo_r) == CONST_INT)
3981 {
3982 if (INTVAL (index_val) < INTVAL (lo_r))
3983 return const0_rtx;
3984 }
3985
3986 if (GET_CODE (index_val) == CONST_INT
3987 && GET_CODE (hi_r) == CONST_INT)
3988 {
3989 if (INTVAL (hi_r) < INTVAL (index_val))
3990 return const0_rtx;
3991 }
3992
3993 /* If we get here, we have to generate the code for both cases
3994 (in range and out of range). */
3995
3996 op0 = gen_label_rtx ();
3997 op1 = gen_label_rtx ();
3998
3999 if (! (GET_CODE (index_val) == CONST_INT
4000 && GET_CODE (lo_r) == CONST_INT))
4001 {
4002 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4003 emit_jump_insn (gen_blt (op1));
4004 }
4005
4006 if (! (GET_CODE (index_val) == CONST_INT
4007 && GET_CODE (hi_r) == CONST_INT))
4008 {
4009 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4010 emit_jump_insn (gen_bgt (op1));
4011 }
4012
4013 /* Calculate the element number of bit zero in the first word
4014 of the set. */
4015 if (GET_CODE (lo_r) == CONST_INT)
4016 rlow = gen_rtx (CONST_INT, VOIDmode,
4017 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4018 else
4019 rlow = expand_binop (index_mode, and_optab,
4020 lo_r, gen_rtx (CONST_INT, VOIDmode,
4021 ~ (1 << BITS_PER_UNIT)),
4022 0, 0, OPTAB_LIB_WIDEN);
4023
4024 diff = expand_binop (index_mode, sub_optab,
4025 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4026
4027 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4028 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4029 0, 0);
4030 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4031 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4032 0, 0);
4033 addr = memory_address (byte_mode,
4034 expand_binop (index_mode, add_optab,
4035 diff, setaddr));
4036 /* Extract the bit we want to examine */
4037 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4038 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4039 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4040 1, OPTAB_LIB_WIDEN);
4041 emit_move_insn (target, result);
4042
4043 /* Output the code to handle the out-of-range case. */
4044 emit_jump (op0);
4045 emit_label (op1);
4046 emit_move_insn (target, const0_rtx);
4047 emit_label (op0);
4048 return target;
4049 }
4050
bbf6f052
RK
4051 case WITH_CLEANUP_EXPR:
4052 if (RTL_EXPR_RTL (exp) == 0)
4053 {
4054 RTL_EXPR_RTL (exp)
4055 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4056 cleanups_this_call
4057 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4058 /* That's it for this cleanup. */
4059 TREE_OPERAND (exp, 2) = 0;
4060 }
4061 return RTL_EXPR_RTL (exp);
4062
4063 case CALL_EXPR:
4064 /* Check for a built-in function. */
4065 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4066 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4067 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4068 return expand_builtin (exp, target, subtarget, tmode, ignore);
4069 /* If this call was expanded already by preexpand_calls,
4070 just return the result we got. */
4071 if (CALL_EXPR_RTL (exp) != 0)
4072 return CALL_EXPR_RTL (exp);
8129842c 4073 return expand_call (exp, target, ignore);
bbf6f052
RK
4074
4075 case NON_LVALUE_EXPR:
4076 case NOP_EXPR:
4077 case CONVERT_EXPR:
4078 case REFERENCE_EXPR:
4079 if (TREE_CODE (type) == VOID_TYPE || ignore)
4080 {
4081 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4082 return const0_rtx;
4083 }
4084 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4085 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4086 if (TREE_CODE (type) == UNION_TYPE)
4087 {
4088 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4089 if (target == 0)
4090 {
4091 if (mode == BLKmode)
4092 {
4093 if (TYPE_SIZE (type) == 0
4094 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4095 abort ();
4096 target = assign_stack_temp (BLKmode,
4097 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4098 + BITS_PER_UNIT - 1)
4099 / BITS_PER_UNIT, 0);
4100 }
4101 else
4102 target = gen_reg_rtx (mode);
4103 }
4104 if (GET_CODE (target) == MEM)
4105 /* Store data into beginning of memory target. */
4106 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4107 change_address (target, TYPE_MODE (valtype), 0), 0);
4108
bbf6f052
RK
4109 else if (GET_CODE (target) == REG)
4110 /* Store this field into a union of the proper type. */
4111 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4112 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4113 VOIDmode, 0, 1,
4114 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4115 else
4116 abort ();
4117
4118 /* Return the entire union. */
4119 return target;
4120 }
1499e0a8 4121 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
bbf6f052
RK
4122 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
4123 return op0;
26fcb35a
RS
4124 if (modifier == EXPAND_INITIALIZER)
4125 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4126 if (flag_force_mem && GET_CODE (op0) == MEM)
4127 op0 = copy_to_reg (op0);
4128
4129 if (target == 0)
4130 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4131 else
4132 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4133 return target;
4134
4135 case PLUS_EXPR:
4136 /* We come here from MINUS_EXPR when the second operand is a constant. */
4137 plus_expr:
4138 this_optab = add_optab;
4139
4140 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4141 something else, make sure we add the register to the constant and
4142 then to the other thing. This case can occur during strength
4143 reduction and doing it this way will produce better code if the
4144 frame pointer or argument pointer is eliminated.
4145
4146 fold-const.c will ensure that the constant is always in the inner
4147 PLUS_EXPR, so the only case we need to do anything about is if
4148 sp, ap, or fp is our second argument, in which case we must swap
4149 the innermost first argument and our second argument. */
4150
4151 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4152 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4153 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4154 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4155 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4156 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4157 {
4158 tree t = TREE_OPERAND (exp, 1);
4159
4160 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4161 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4162 }
4163
4164 /* If the result is to be Pmode and we are adding an integer to
4165 something, we might be forming a constant. So try to use
4166 plus_constant. If it produces a sum and we can't accept it,
4167 use force_operand. This allows P = &ARR[const] to generate
4168 efficient code on machines where a SYMBOL_REF is not a valid
4169 address.
4170
4171 If this is an EXPAND_SUM call, always return the sum. */
4172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 4173 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
4174 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4175 || mode == Pmode))
4176 {
4177 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4178 EXPAND_SUM);
4179 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4180 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4181 op1 = force_operand (op1, target);
4182 return op1;
4183 }
4184
4185 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4186 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4187 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4188 || mode == Pmode))
4189 {
4190 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4191 EXPAND_SUM);
4192 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4193 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4194 op0 = force_operand (op0, target);
4195 return op0;
4196 }
4197
4198 /* No sense saving up arithmetic to be done
4199 if it's all in the wrong mode to form part of an address.
4200 And force_operand won't know whether to sign-extend or
4201 zero-extend. */
4202 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4203 || mode != Pmode) goto binop;
4204
4205 preexpand_calls (exp);
4206 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4207 subtarget = 0;
4208
4209 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4210 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
4211
4212 /* Make sure any term that's a sum with a constant comes last. */
4213 if (GET_CODE (op0) == PLUS
4214 && CONSTANT_P (XEXP (op0, 1)))
4215 {
4216 temp = op0;
4217 op0 = op1;
4218 op1 = temp;
4219 }
4220 /* If adding to a sum including a constant,
4221 associate it to put the constant outside. */
4222 if (GET_CODE (op1) == PLUS
4223 && CONSTANT_P (XEXP (op1, 1)))
4224 {
4225 rtx constant_term = const0_rtx;
4226
4227 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4228 if (temp != 0)
4229 op0 = temp;
6f90e075
JW
4230 /* Ensure that MULT comes first if there is one. */
4231 else if (GET_CODE (op0) == MULT)
4232 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4233 else
4234 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4235
4236 /* Let's also eliminate constants from op0 if possible. */
4237 op0 = eliminate_constant_term (op0, &constant_term);
4238
4239 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4240 their sum should be a constant. Form it into OP1, since the
4241 result we want will then be OP0 + OP1. */
4242
4243 temp = simplify_binary_operation (PLUS, mode, constant_term,
4244 XEXP (op1, 1));
4245 if (temp != 0)
4246 op1 = temp;
4247 else
4248 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4249 }
4250
4251 /* Put a constant term last and put a multiplication first. */
4252 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4253 temp = op1, op1 = op0, op0 = temp;
4254
4255 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4256 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4257
4258 case MINUS_EXPR:
4259 /* Handle difference of two symbolic constants,
4260 for the sake of an initializer. */
4261 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4262 && really_constant_p (TREE_OPERAND (exp, 0))
4263 && really_constant_p (TREE_OPERAND (exp, 1)))
4264 {
906c4e36
RK
4265 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4266 VOIDmode, modifier);
4267 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4268 VOIDmode, modifier);
bbf6f052
RK
4269 return gen_rtx (MINUS, mode, op0, op1);
4270 }
4271 /* Convert A - const to A + (-const). */
4272 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4273 {
4274 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4275 fold (build1 (NEGATE_EXPR, type,
4276 TREE_OPERAND (exp, 1))));
4277 goto plus_expr;
4278 }
4279 this_optab = sub_optab;
4280 goto binop;
4281
4282 case MULT_EXPR:
4283 preexpand_calls (exp);
4284 /* If first operand is constant, swap them.
4285 Thus the following special case checks need only
4286 check the second operand. */
4287 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4288 {
4289 register tree t1 = TREE_OPERAND (exp, 0);
4290 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4291 TREE_OPERAND (exp, 1) = t1;
4292 }
4293
4294 /* Attempt to return something suitable for generating an
4295 indexed address, for machines that support that. */
4296
4297 if (modifier == EXPAND_SUM && mode == Pmode
4298 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4299 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4300 {
4301 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4302
4303 /* Apply distributive law if OP0 is x+c. */
4304 if (GET_CODE (op0) == PLUS
4305 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4306 return gen_rtx (PLUS, mode,
4307 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4308 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4309 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4310 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4311
4312 if (GET_CODE (op0) != REG)
906c4e36 4313 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4314 if (GET_CODE (op0) != REG)
4315 op0 = copy_to_mode_reg (mode, op0);
4316
4317 return gen_rtx (MULT, mode, op0,
906c4e36 4318 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4319 }
4320
4321 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4322 subtarget = 0;
4323
4324 /* Check for multiplying things that have been extended
4325 from a narrower type. If this machine supports multiplying
4326 in that narrower type with a result in the desired type,
4327 do it that way, and avoid the explicit type-conversion. */
4328 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4329 && TREE_CODE (type) == INTEGER_TYPE
4330 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4331 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4332 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4333 && int_fits_type_p (TREE_OPERAND (exp, 1),
4334 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4335 /* Don't use a widening multiply if a shift will do. */
4336 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4337 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4338 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4339 ||
4340 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4341 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4342 ==
4343 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4344 /* If both operands are extended, they must either both
4345 be zero-extended or both be sign-extended. */
4346 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4347 ==
4348 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4349 {
4350 enum machine_mode innermode
4351 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4352 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4353 ? umul_widen_optab : smul_widen_optab);
4354 if (mode == GET_MODE_WIDER_MODE (innermode)
4355 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4356 {
4357 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4358 NULL_RTX, VOIDmode, 0);
bbf6f052 4359 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4360 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4361 VOIDmode, 0);
bbf6f052
RK
4362 else
4363 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4364 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4365 goto binop2;
4366 }
4367 }
4368 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4369 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4370 return expand_mult (mode, op0, op1, target, unsignedp);
4371
4372 case TRUNC_DIV_EXPR:
4373 case FLOOR_DIV_EXPR:
4374 case CEIL_DIV_EXPR:
4375 case ROUND_DIV_EXPR:
4376 case EXACT_DIV_EXPR:
4377 preexpand_calls (exp);
4378 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4379 subtarget = 0;
4380 /* Possible optimization: compute the dividend with EXPAND_SUM
4381 then if the divisor is constant can optimize the case
4382 where some terms of the dividend have coeffs divisible by it. */
4383 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4384 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4385 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4386
4387 case RDIV_EXPR:
4388 this_optab = flodiv_optab;
4389 goto binop;
4390
4391 case TRUNC_MOD_EXPR:
4392 case FLOOR_MOD_EXPR:
4393 case CEIL_MOD_EXPR:
4394 case ROUND_MOD_EXPR:
4395 preexpand_calls (exp);
4396 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4397 subtarget = 0;
4398 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4399 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4400 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4401
4402 case FIX_ROUND_EXPR:
4403 case FIX_FLOOR_EXPR:
4404 case FIX_CEIL_EXPR:
4405 abort (); /* Not used for C. */
4406
4407 case FIX_TRUNC_EXPR:
906c4e36 4408 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4409 if (target == 0)
4410 target = gen_reg_rtx (mode);
4411 expand_fix (target, op0, unsignedp);
4412 return target;
4413
4414 case FLOAT_EXPR:
906c4e36 4415 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4416 if (target == 0)
4417 target = gen_reg_rtx (mode);
4418 /* expand_float can't figure out what to do if FROM has VOIDmode.
4419 So give it the correct mode. With -O, cse will optimize this. */
4420 if (GET_MODE (op0) == VOIDmode)
4421 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4422 op0);
4423 expand_float (target, op0,
4424 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4425 return target;
4426
4427 case NEGATE_EXPR:
4428 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4429 temp = expand_unop (mode, neg_optab, op0, target, 0);
4430 if (temp == 0)
4431 abort ();
4432 return temp;
4433
4434 case ABS_EXPR:
4435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4436
2d7050fd
RS
4437 /* Handle complex values specially. */
4438 {
4439 enum machine_mode opmode
4440 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4441
4442 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4443 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4444 return expand_complex_abs (opmode, op0, target, unsignedp);
4445 }
4446
bbf6f052
RK
4447 /* Unsigned abs is simply the operand. Testing here means we don't
4448 risk generating incorrect code below. */
4449 if (TREE_UNSIGNED (type))
4450 return op0;
4451
4452 /* First try to do it with a special abs instruction. */
4453 temp = expand_unop (mode, abs_optab, op0, target, 0);
4454 if (temp != 0)
4455 return temp;
4456
4457 /* If this machine has expensive jumps, we can do integer absolute
4458 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4459 where W is the width of MODE. */
4460
4461 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4462 {
4463 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4464 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4465 NULL_RTX, 0);
bbf6f052
RK
4466
4467 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4468 OPTAB_LIB_WIDEN);
4469 if (temp != 0)
4470 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4471 OPTAB_LIB_WIDEN);
4472
4473 if (temp != 0)
4474 return temp;
4475 }
4476
4477 /* If that does not win, use conditional jump and negate. */
4478 target = original_target;
4479 temp = gen_label_rtx ();
4480 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4481 || (GET_CODE (target) == REG
4482 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4483 target = gen_reg_rtx (mode);
4484 emit_move_insn (target, op0);
4485 emit_cmp_insn (target,
4486 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4487 NULL_RTX, VOIDmode, 0),
4488 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4489 NO_DEFER_POP;
4490 emit_jump_insn (gen_bge (temp));
4491 op0 = expand_unop (mode, neg_optab, target, target, 0);
4492 if (op0 != target)
4493 emit_move_insn (target, op0);
4494 emit_label (temp);
4495 OK_DEFER_POP;
4496 return target;
4497
4498 case MAX_EXPR:
4499 case MIN_EXPR:
4500 target = original_target;
4501 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4502 || (GET_CODE (target) == REG
4503 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4504 target = gen_reg_rtx (mode);
906c4e36 4505 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4506 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4507
4508 /* First try to do it with a special MIN or MAX instruction.
4509 If that does not win, use a conditional jump to select the proper
4510 value. */
4511 this_optab = (TREE_UNSIGNED (type)
4512 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4513 : (code == MIN_EXPR ? smin_optab : smax_optab));
4514
4515 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4516 OPTAB_WIDEN);
4517 if (temp != 0)
4518 return temp;
4519
4520 if (target != op0)
4521 emit_move_insn (target, op0);
4522 op0 = gen_label_rtx ();
4523 if (code == MAX_EXPR)
4524 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4525 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4526 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4527 else
4528 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4529 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4530 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4531 if (temp == const0_rtx)
4532 emit_move_insn (target, op1);
4533 else if (temp != const_true_rtx)
4534 {
4535 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4536 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4537 else
4538 abort ();
4539 emit_move_insn (target, op1);
4540 }
4541 emit_label (op0);
4542 return target;
4543
4544/* ??? Can optimize when the operand of this is a bitwise operation,
4545 by using a different bitwise operation. */
4546 case BIT_NOT_EXPR:
4547 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4548 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4549 if (temp == 0)
4550 abort ();
4551 return temp;
4552
4553 case FFS_EXPR:
4554 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4555 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4556 if (temp == 0)
4557 abort ();
4558 return temp;
4559
4560/* ??? Can optimize bitwise operations with one arg constant.
4561 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4562 and (a bitwise1 b) bitwise2 b (etc)
4563 but that is probably not worth while. */
4564
4565/* BIT_AND_EXPR is for bitwise anding.
4566 TRUTH_AND_EXPR is for anding two boolean values
4567 when we want in all cases to compute both of them.
4568 In general it is fastest to do TRUTH_AND_EXPR by
4569 computing both operands as actual zero-or-1 values
4570 and then bitwise anding. In cases where there cannot
4571 be any side effects, better code would be made by
4572 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4573 but the question is how to recognize those cases. */
4574
4575 case TRUTH_AND_EXPR:
4576 case BIT_AND_EXPR:
4577 this_optab = and_optab;
4578 goto binop;
4579
4580/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4581 case TRUTH_OR_EXPR:
4582 case BIT_IOR_EXPR:
4583 this_optab = ior_optab;
4584 goto binop;
4585
4586 case BIT_XOR_EXPR:
4587 this_optab = xor_optab;
4588 goto binop;
4589
4590 case LSHIFT_EXPR:
4591 case RSHIFT_EXPR:
4592 case LROTATE_EXPR:
4593 case RROTATE_EXPR:
4594 preexpand_calls (exp);
4595 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4596 subtarget = 0;
4597 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4598 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4599 unsignedp);
4600
4601/* Could determine the answer when only additive constants differ.
4602 Also, the addition of one can be handled by changing the condition. */
4603 case LT_EXPR:
4604 case LE_EXPR:
4605 case GT_EXPR:
4606 case GE_EXPR:
4607 case EQ_EXPR:
4608 case NE_EXPR:
4609 preexpand_calls (exp);
4610 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4611 if (temp != 0)
4612 return temp;
4613 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4614 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4615 && original_target
4616 && GET_CODE (original_target) == REG
4617 && (GET_MODE (original_target)
4618 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4619 {
4620 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4621 if (temp != original_target)
4622 temp = copy_to_reg (temp);
4623 op1 = gen_label_rtx ();
906c4e36 4624 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4625 GET_MODE (temp), unsignedp, 0);
4626 emit_jump_insn (gen_beq (op1));
4627 emit_move_insn (temp, const1_rtx);
4628 emit_label (op1);
4629 return temp;
4630 }
4631 /* If no set-flag instruction, must generate a conditional
4632 store into a temporary variable. Drop through
4633 and handle this like && and ||. */
4634
4635 case TRUTH_ANDIF_EXPR:
4636 case TRUTH_ORIF_EXPR:
4637 if (target == 0 || ! safe_from_p (target, exp)
4638 /* Make sure we don't have a hard reg (such as function's return
4639 value) live across basic blocks, if not optimizing. */
4640 || (!optimize && GET_CODE (target) == REG
4641 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4642 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4643 emit_clr_insn (target);
4644 op1 = gen_label_rtx ();
4645 jumpifnot (exp, op1);
4646 emit_0_to_1_insn (target);
4647 emit_label (op1);
4648 return target;
4649
4650 case TRUTH_NOT_EXPR:
4651 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4652 /* The parser is careful to generate TRUTH_NOT_EXPR
4653 only with operands that are always zero or one. */
906c4e36 4654 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4655 target, 1, OPTAB_LIB_WIDEN);
4656 if (temp == 0)
4657 abort ();
4658 return temp;
4659
4660 case COMPOUND_EXPR:
4661 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4662 emit_queue ();
4663 return expand_expr (TREE_OPERAND (exp, 1),
4664 (ignore ? const0_rtx : target),
4665 VOIDmode, 0);
4666
4667 case COND_EXPR:
4668 {
4669 /* Note that COND_EXPRs whose type is a structure or union
4670 are required to be constructed to contain assignments of
4671 a temporary variable, so that we can evaluate them here
4672 for side effect only. If type is void, we must do likewise. */
4673
4674 /* If an arm of the branch requires a cleanup,
4675 only that cleanup is performed. */
4676
4677 tree singleton = 0;
4678 tree binary_op = 0, unary_op = 0;
4679 tree old_cleanups = cleanups_this_call;
4680 cleanups_this_call = 0;
4681
4682 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4683 convert it to our mode, if necessary. */
4684 if (integer_onep (TREE_OPERAND (exp, 1))
4685 && integer_zerop (TREE_OPERAND (exp, 2))
4686 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4687 {
4688 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4689 if (GET_MODE (op0) == mode)
4690 return op0;
4691 if (target == 0)
4692 target = gen_reg_rtx (mode);
4693 convert_move (target, op0, unsignedp);
4694 return target;
4695 }
4696
4697 /* If we are not to produce a result, we have no target. Otherwise,
4698 if a target was specified use it; it will not be used as an
4699 intermediate target unless it is safe. If no target, use a
4700 temporary. */
4701
4702 if (mode == VOIDmode || ignore)
4703 temp = 0;
4704 else if (original_target
4705 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4706 temp = original_target;
4707 else if (mode == BLKmode)
4708 {
4709 if (TYPE_SIZE (type) == 0
4710 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4711 abort ();
4712 temp = assign_stack_temp (BLKmode,
4713 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4714 + BITS_PER_UNIT - 1)
4715 / BITS_PER_UNIT, 0);
4716 }
4717 else
4718 temp = gen_reg_rtx (mode);
4719
4720 /* Check for X ? A + B : A. If we have this, we can copy
4721 A to the output and conditionally add B. Similarly for unary
4722 operations. Don't do this if X has side-effects because
4723 those side effects might affect A or B and the "?" operation is
4724 a sequence point in ANSI. (We test for side effects later.) */
4725
4726 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4727 && operand_equal_p (TREE_OPERAND (exp, 2),
4728 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4729 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4730 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4731 && operand_equal_p (TREE_OPERAND (exp, 1),
4732 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4733 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4734 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4735 && operand_equal_p (TREE_OPERAND (exp, 2),
4736 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4737 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4738 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4739 && operand_equal_p (TREE_OPERAND (exp, 1),
4740 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4741 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4742
4743 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4744 operation, do this as A + (X != 0). Similarly for other simple
4745 binary operators. */
4746 if (singleton && binary_op
4747 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4748 && (TREE_CODE (binary_op) == PLUS_EXPR
4749 || TREE_CODE (binary_op) == MINUS_EXPR
4750 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4751 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4752 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4753 && integer_onep (TREE_OPERAND (binary_op, 1))
4754 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4755 {
4756 rtx result;
4757 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4758 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4759 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4760 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4761 : and_optab);
4762
4763 /* If we had X ? A : A + 1, do this as A + (X == 0).
4764
4765 We have to invert the truth value here and then put it
4766 back later if do_store_flag fails. We cannot simply copy
4767 TREE_OPERAND (exp, 0) to another variable and modify that
4768 because invert_truthvalue can modify the tree pointed to
4769 by its argument. */
4770 if (singleton == TREE_OPERAND (exp, 1))
4771 TREE_OPERAND (exp, 0)
4772 = invert_truthvalue (TREE_OPERAND (exp, 0));
4773
4774 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4775 (safe_from_p (temp, singleton)
4776 ? temp : NULL_RTX),
bbf6f052
RK
4777 mode, BRANCH_COST <= 1);
4778
4779 if (result)
4780 {
906c4e36 4781 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4782 return expand_binop (mode, boptab, op1, result, temp,
4783 unsignedp, OPTAB_LIB_WIDEN);
4784 }
4785 else if (singleton == TREE_OPERAND (exp, 1))
4786 TREE_OPERAND (exp, 0)
4787 = invert_truthvalue (TREE_OPERAND (exp, 0));
4788 }
4789
4790 NO_DEFER_POP;
4791 op0 = gen_label_rtx ();
4792
4793 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4794 {
4795 if (temp != 0)
4796 {
4797 /* If the target conflicts with the other operand of the
4798 binary op, we can't use it. Also, we can't use the target
4799 if it is a hard register, because evaluating the condition
4800 might clobber it. */
4801 if ((binary_op
4802 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4803 || (GET_CODE (temp) == REG
4804 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4805 temp = gen_reg_rtx (mode);
4806 store_expr (singleton, temp, 0);
4807 }
4808 else
906c4e36
RK
4809 expand_expr (singleton,
4810 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4811 if (cleanups_this_call)
4812 {
4813 sorry ("aggregate value in COND_EXPR");
4814 cleanups_this_call = 0;
4815 }
4816 if (singleton == TREE_OPERAND (exp, 1))
4817 jumpif (TREE_OPERAND (exp, 0), op0);
4818 else
4819 jumpifnot (TREE_OPERAND (exp, 0), op0);
4820
4821 if (binary_op && temp == 0)
4822 /* Just touch the other operand. */
4823 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 4824 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4825 else if (binary_op)
4826 store_expr (build (TREE_CODE (binary_op), type,
4827 make_tree (type, temp),
4828 TREE_OPERAND (binary_op, 1)),
4829 temp, 0);
4830 else
4831 store_expr (build1 (TREE_CODE (unary_op), type,
4832 make_tree (type, temp)),
4833 temp, 0);
4834 op1 = op0;
4835 }
4836#if 0
4837 /* This is now done in jump.c and is better done there because it
4838 produces shorter register lifetimes. */
4839
4840 /* Check for both possibilities either constants or variables
4841 in registers (but not the same as the target!). If so, can
4842 save branches by assigning one, branching, and assigning the
4843 other. */
4844 else if (temp && GET_MODE (temp) != BLKmode
4845 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4846 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4847 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4848 && DECL_RTL (TREE_OPERAND (exp, 1))
4849 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4850 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4851 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4852 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4853 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4854 && DECL_RTL (TREE_OPERAND (exp, 2))
4855 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4856 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4857 {
4858 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4859 temp = gen_reg_rtx (mode);
4860 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4861 jumpifnot (TREE_OPERAND (exp, 0), op0);
4862 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4863 op1 = op0;
4864 }
4865#endif
4866 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4867 comparison operator. If we have one of these cases, set the
4868 output to A, branch on A (cse will merge these two references),
4869 then set the output to FOO. */
4870 else if (temp
4871 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4872 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4873 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4874 TREE_OPERAND (exp, 1), 0)
4875 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4876 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4877 {
4878 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4879 temp = gen_reg_rtx (mode);
4880 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4881 jumpif (TREE_OPERAND (exp, 0), op0);
4882 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4883 op1 = op0;
4884 }
4885 else if (temp
4886 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4887 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4888 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4889 TREE_OPERAND (exp, 2), 0)
4890 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4891 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4892 {
4893 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4894 temp = gen_reg_rtx (mode);
4895 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4896 jumpifnot (TREE_OPERAND (exp, 0), op0);
4897 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4898 op1 = op0;
4899 }
4900 else
4901 {
4902 op1 = gen_label_rtx ();
4903 jumpifnot (TREE_OPERAND (exp, 0), op0);
4904 if (temp != 0)
4905 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4906 else
906c4e36
RK
4907 expand_expr (TREE_OPERAND (exp, 1),
4908 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4909 if (cleanups_this_call)
4910 {
4911 sorry ("aggregate value in COND_EXPR");
4912 cleanups_this_call = 0;
4913 }
4914
4915 emit_queue ();
4916 emit_jump_insn (gen_jump (op1));
4917 emit_barrier ();
4918 emit_label (op0);
4919 if (temp != 0)
4920 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4921 else
906c4e36
RK
4922 expand_expr (TREE_OPERAND (exp, 2),
4923 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4924 }
4925
4926 if (cleanups_this_call)
4927 {
4928 sorry ("aggregate value in COND_EXPR");
4929 cleanups_this_call = 0;
4930 }
4931
4932 emit_queue ();
4933 emit_label (op1);
4934 OK_DEFER_POP;
4935 cleanups_this_call = old_cleanups;
4936 return temp;
4937 }
4938
4939 case TARGET_EXPR:
4940 {
4941 /* Something needs to be initialized, but we didn't know
4942 where that thing was when building the tree. For example,
4943 it could be the return value of a function, or a parameter
4944 to a function which lays down in the stack, or a temporary
4945 variable which must be passed by reference.
4946
4947 We guarantee that the expression will either be constructed
4948 or copied into our original target. */
4949
4950 tree slot = TREE_OPERAND (exp, 0);
5c062816 4951 tree exp1;
bbf6f052
RK
4952
4953 if (TREE_CODE (slot) != VAR_DECL)
4954 abort ();
4955
4956 if (target == 0)
4957 {
4958 if (DECL_RTL (slot) != 0)
ac993f4f
MS
4959 {
4960 target = DECL_RTL (slot);
5c062816 4961 /* If we have already expanded the slot, so don't do
ac993f4f 4962 it again. (mrs) */
5c062816
MS
4963 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4964 return target;
ac993f4f 4965 }
bbf6f052
RK
4966 else
4967 {
4968 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4969 /* All temp slots at this level must not conflict. */
4970 preserve_temp_slots (target);
4971 DECL_RTL (slot) = target;
4972 }
4973
4974#if 0
ac993f4f
MS
4975 /* I bet this needs to be done, and I bet that it needs to
4976 be above, inside the else clause. The reason is
4977 simple, how else is it going to get cleaned up? (mrs)
4978
4979 The reason is probably did not work before, and was
4980 commented out is because this was re-expanding already
4981 expanded target_exprs (target == 0 and DECL_RTL (slot)
4982 != 0) also cleaning them up many times as well. :-( */
4983
bbf6f052
RK
4984 /* Since SLOT is not known to the called function
4985 to belong to its stack frame, we must build an explicit
4986 cleanup. This case occurs when we must build up a reference
4987 to pass the reference as an argument. In this case,
4988 it is very likely that such a reference need not be
4989 built here. */
4990
4991 if (TREE_OPERAND (exp, 2) == 0)
4992 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4993 if (TREE_OPERAND (exp, 2))
906c4e36
RK
4994 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4995 cleanups_this_call);
bbf6f052
RK
4996#endif
4997 }
4998 else
4999 {
5000 /* This case does occur, when expanding a parameter which
5001 needs to be constructed on the stack. The target
5002 is the actual stack address that we want to initialize.
5003 The function we call will perform the cleanup in this case. */
5004
5005 DECL_RTL (slot) = target;
5006 }
5007
5c062816
MS
5008 exp1 = TREE_OPERAND (exp, 1);
5009 /* Mark it as expanded. */
5010 TREE_OPERAND (exp, 1) = NULL_TREE;
5011
5012 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5013 }
5014
5015 case INIT_EXPR:
5016 {
5017 tree lhs = TREE_OPERAND (exp, 0);
5018 tree rhs = TREE_OPERAND (exp, 1);
5019 tree noncopied_parts = 0;
5020 tree lhs_type = TREE_TYPE (lhs);
5021
5022 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5023 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5024 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5025 TYPE_NONCOPIED_PARTS (lhs_type));
5026 while (noncopied_parts != 0)
5027 {
5028 expand_assignment (TREE_VALUE (noncopied_parts),
5029 TREE_PURPOSE (noncopied_parts), 0, 0);
5030 noncopied_parts = TREE_CHAIN (noncopied_parts);
5031 }
5032 return temp;
5033 }
5034
5035 case MODIFY_EXPR:
5036 {
5037 /* If lhs is complex, expand calls in rhs before computing it.
5038 That's so we don't compute a pointer and save it over a call.
5039 If lhs is simple, compute it first so we can give it as a
5040 target if the rhs is just a call. This avoids an extra temp and copy
5041 and that prevents a partial-subsumption which makes bad code.
5042 Actually we could treat component_ref's of vars like vars. */
5043
5044 tree lhs = TREE_OPERAND (exp, 0);
5045 tree rhs = TREE_OPERAND (exp, 1);
5046 tree noncopied_parts = 0;
5047 tree lhs_type = TREE_TYPE (lhs);
5048
5049 temp = 0;
5050
5051 if (TREE_CODE (lhs) != VAR_DECL
5052 && TREE_CODE (lhs) != RESULT_DECL
5053 && TREE_CODE (lhs) != PARM_DECL)
5054 preexpand_calls (exp);
5055
5056 /* Check for |= or &= of a bitfield of size one into another bitfield
5057 of size 1. In this case, (unless we need the result of the
5058 assignment) we can do this more efficiently with a
5059 test followed by an assignment, if necessary.
5060
5061 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5062 things change so we do, this code should be enhanced to
5063 support it. */
5064 if (ignore
5065 && TREE_CODE (lhs) == COMPONENT_REF
5066 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5067 || TREE_CODE (rhs) == BIT_AND_EXPR)
5068 && TREE_OPERAND (rhs, 0) == lhs
5069 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5070 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5071 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5072 {
5073 rtx label = gen_label_rtx ();
5074
5075 do_jump (TREE_OPERAND (rhs, 1),
5076 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5077 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5078 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5079 (TREE_CODE (rhs) == BIT_IOR_EXPR
5080 ? integer_one_node
5081 : integer_zero_node)),
5082 0, 0);
e7c33f54 5083 do_pending_stack_adjust ();
bbf6f052
RK
5084 emit_label (label);
5085 return const0_rtx;
5086 }
5087
5088 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5089 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5090 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5091 TYPE_NONCOPIED_PARTS (lhs_type));
5092
5093 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5094 while (noncopied_parts != 0)
5095 {
5096 expand_assignment (TREE_PURPOSE (noncopied_parts),
5097 TREE_VALUE (noncopied_parts), 0, 0);
5098 noncopied_parts = TREE_CHAIN (noncopied_parts);
5099 }
5100 return temp;
5101 }
5102
5103 case PREINCREMENT_EXPR:
5104 case PREDECREMENT_EXPR:
5105 return expand_increment (exp, 0);
5106
5107 case POSTINCREMENT_EXPR:
5108 case POSTDECREMENT_EXPR:
5109 /* Faster to treat as pre-increment if result is not used. */
5110 return expand_increment (exp, ! ignore);
5111
5112 case ADDR_EXPR:
5113 /* Are we taking the address of a nested function? */
5114 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5115 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5116 {
5117 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5118 op0 = force_operand (op0, target);
5119 }
5120 else
5121 {
906c4e36 5122 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5123 (modifier == EXPAND_INITIALIZER
5124 ? modifier : EXPAND_CONST_ADDRESS));
5125 if (GET_CODE (op0) != MEM)
5126 abort ();
5127
5128 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5129 return XEXP (op0, 0);
5130 op0 = force_operand (XEXP (op0, 0), target);
5131 }
5132 if (flag_force_addr && GET_CODE (op0) != REG)
5133 return force_reg (Pmode, op0);
5134 return op0;
5135
5136 case ENTRY_VALUE_EXPR:
5137 abort ();
5138
7308a047
RS
5139 /* COMPLEX type for Extended Pascal & Fortran */
5140 case COMPLEX_EXPR:
5141 {
5142 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5143
5144 rtx prev;
5145
5146 /* Get the rtx code of the operands. */
5147 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5148 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5149
5150 if (! target)
5151 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5152
5153 prev = get_last_insn ();
5154
5155 /* Tell flow that the whole of the destination is being set. */
5156 if (GET_CODE (target) == REG)
5157 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5158
5159 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5160 emit_move_insn (gen_realpart (mode, target), op0);
5161 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5162
5163 /* Complex construction should appear as a single unit. */
5164 group_insns (prev);
5165
5166 return target;
5167 }
5168
5169 case REALPART_EXPR:
2d7050fd
RS
5170 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5171 return gen_realpart (mode, op0);
7308a047
RS
5172
5173 case IMAGPART_EXPR:
2d7050fd
RS
5174 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5175 return gen_imagpart (mode, op0);
7308a047
RS
5176
5177 case CONJ_EXPR:
5178 {
5179 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5180 rtx imag_t;
5181 rtx prev;
5182
5183 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5184
5185 if (! target)
5186 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5187
5188 prev = get_last_insn ();
5189
5190 /* Tell flow that the whole of the destination is being set. */
5191 if (GET_CODE (target) == REG)
5192 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5193
5194 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5195 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5196
2d7050fd 5197 imag_t = gen_imagpart (mode, target);
7308a047 5198 temp = expand_unop (mode, neg_optab,
2d7050fd 5199 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5200 if (temp != imag_t)
5201 emit_move_insn (imag_t, temp);
5202
5203 /* Conjugate should appear as a single unit */
5204 group_insns (prev);
5205
5206 return target;
5207 }
5208
bbf6f052
RK
5209 case ERROR_MARK:
5210 return const0_rtx;
5211
5212 default:
5213 return (*lang_expand_expr) (exp, target, tmode, modifier);
5214 }
5215
5216 /* Here to do an ordinary binary operator, generating an instruction
5217 from the optab already placed in `this_optab'. */
5218 binop:
5219 preexpand_calls (exp);
5220 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5221 subtarget = 0;
5222 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5223 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5224 binop2:
5225 temp = expand_binop (mode, this_optab, op0, op1, target,
5226 unsignedp, OPTAB_LIB_WIDEN);
5227 if (temp == 0)
5228 abort ();
5229 return temp;
5230}
5231\f
e87b4f3f
RS
5232/* Return the alignment in bits of EXP, a pointer valued expression.
5233 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5234 The alignment returned is, by default, the alignment of the thing that
5235 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5236
5237 Otherwise, look at the expression to see if we can do better, i.e., if the
5238 expression is actually pointing at an object whose alignment is tighter. */
5239
5240static int
5241get_pointer_alignment (exp, max_align)
5242 tree exp;
5243 unsigned max_align;
5244{
5245 unsigned align, inner;
5246
5247 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5248 return 0;
5249
5250 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5251 align = MIN (align, max_align);
5252
5253 while (1)
5254 {
5255 switch (TREE_CODE (exp))
5256 {
5257 case NOP_EXPR:
5258 case CONVERT_EXPR:
5259 case NON_LVALUE_EXPR:
5260 exp = TREE_OPERAND (exp, 0);
5261 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5262 return align;
5263 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5264 inner = MIN (inner, max_align);
5265 align = MAX (align, inner);
5266 break;
5267
5268 case PLUS_EXPR:
5269 /* If sum of pointer + int, restrict our maximum alignment to that
5270 imposed by the integer. If not, we can't do any better than
5271 ALIGN. */
5272 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5273 return align;
5274
e87b4f3f
RS
5275 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5276 & (max_align - 1))
5277 != 0)
bbf6f052
RK
5278 max_align >>= 1;
5279
5280 exp = TREE_OPERAND (exp, 0);
5281 break;
5282
5283 case ADDR_EXPR:
5284 /* See what we are pointing at and look at its alignment. */
5285 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5286 if (TREE_CODE (exp) == FUNCTION_DECL)
5287 align = MAX (align, FUNCTION_BOUNDARY);
5288 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5289 align = MAX (align, DECL_ALIGN (exp));
5290#ifdef CONSTANT_ALIGNMENT
5291 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5292 align = CONSTANT_ALIGNMENT (exp, align);
5293#endif
5294 return MIN (align, max_align);
5295
5296 default:
5297 return align;
5298 }
5299 }
5300}
5301\f
5302/* Return the tree node and offset if a given argument corresponds to
5303 a string constant. */
5304
5305static tree
5306string_constant (arg, ptr_offset)
5307 tree arg;
5308 tree *ptr_offset;
5309{
5310 STRIP_NOPS (arg);
5311
5312 if (TREE_CODE (arg) == ADDR_EXPR
5313 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5314 {
5315 *ptr_offset = integer_zero_node;
5316 return TREE_OPERAND (arg, 0);
5317 }
5318 else if (TREE_CODE (arg) == PLUS_EXPR)
5319 {
5320 tree arg0 = TREE_OPERAND (arg, 0);
5321 tree arg1 = TREE_OPERAND (arg, 1);
5322
5323 STRIP_NOPS (arg0);
5324 STRIP_NOPS (arg1);
5325
5326 if (TREE_CODE (arg0) == ADDR_EXPR
5327 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5328 {
5329 *ptr_offset = arg1;
5330 return TREE_OPERAND (arg0, 0);
5331 }
5332 else if (TREE_CODE (arg1) == ADDR_EXPR
5333 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5334 {
5335 *ptr_offset = arg0;
5336 return TREE_OPERAND (arg1, 0);
5337 }
5338 }
5339
5340 return 0;
5341}
5342
5343/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5344 way, because it could contain a zero byte in the middle.
5345 TREE_STRING_LENGTH is the size of the character array, not the string.
5346
5347 Unfortunately, string_constant can't access the values of const char
5348 arrays with initializers, so neither can we do so here. */
5349
5350static tree
5351c_strlen (src)
5352 tree src;
5353{
5354 tree offset_node;
5355 int offset, max;
5356 char *ptr;
5357
5358 src = string_constant (src, &offset_node);
5359 if (src == 0)
5360 return 0;
5361 max = TREE_STRING_LENGTH (src);
5362 ptr = TREE_STRING_POINTER (src);
5363 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5364 {
5365 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5366 compute the offset to the following null if we don't know where to
5367 start searching for it. */
5368 int i;
5369 for (i = 0; i < max; i++)
5370 if (ptr[i] == 0)
5371 return 0;
5372 /* We don't know the starting offset, but we do know that the string
5373 has no internal zero bytes. We can assume that the offset falls
5374 within the bounds of the string; otherwise, the programmer deserves
5375 what he gets. Subtract the offset from the length of the string,
5376 and return that. */
5377 /* This would perhaps not be valid if we were dealing with named
5378 arrays in addition to literal string constants. */
5379 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5380 }
5381
5382 /* We have a known offset into the string. Start searching there for
5383 a null character. */
5384 if (offset_node == 0)
5385 offset = 0;
5386 else
5387 {
5388 /* Did we get a long long offset? If so, punt. */
5389 if (TREE_INT_CST_HIGH (offset_node) != 0)
5390 return 0;
5391 offset = TREE_INT_CST_LOW (offset_node);
5392 }
5393 /* If the offset is known to be out of bounds, warn, and call strlen at
5394 runtime. */
5395 if (offset < 0 || offset > max)
5396 {
5397 warning ("offset outside bounds of constant string");
5398 return 0;
5399 }
5400 /* Use strlen to search for the first zero byte. Since any strings
5401 constructed with build_string will have nulls appended, we win even
5402 if we get handed something like (char[4])"abcd".
5403
5404 Since OFFSET is our starting index into the string, no further
5405 calculation is needed. */
5406 return size_int (strlen (ptr + offset));
5407}
5408\f
5409/* Expand an expression EXP that calls a built-in function,
5410 with result going to TARGET if that's convenient
5411 (and in mode MODE if that's convenient).
5412 SUBTARGET may be used as the target for computing one of EXP's operands.
5413 IGNORE is nonzero if the value is to be ignored. */
5414
5415static rtx
5416expand_builtin (exp, target, subtarget, mode, ignore)
5417 tree exp;
5418 rtx target;
5419 rtx subtarget;
5420 enum machine_mode mode;
5421 int ignore;
5422{
5423 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5424 tree arglist = TREE_OPERAND (exp, 1);
5425 rtx op0;
60bac6ea 5426 rtx lab1, insns;
bbf6f052 5427 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 5428 optab builtin_optab;
bbf6f052
RK
5429
5430 switch (DECL_FUNCTION_CODE (fndecl))
5431 {
5432 case BUILT_IN_ABS:
5433 case BUILT_IN_LABS:
5434 case BUILT_IN_FABS:
5435 /* build_function_call changes these into ABS_EXPR. */
5436 abort ();
5437
1bbddf11
JVA
5438 case BUILT_IN_SIN:
5439 case BUILT_IN_COS:
e87b4f3f
RS
5440 case BUILT_IN_FSQRT:
5441 /* If not optimizing, call the library function. */
8c8a8e34 5442 if (! optimize)
e87b4f3f
RS
5443 break;
5444
5445 if (arglist == 0
19deaec9 5446 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5447 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5448 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5449
db0e6d01
RS
5450 /* Stabilize and compute the argument. */
5451 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5452 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5453 {
5454 exp = copy_node (exp);
5455 arglist = copy_node (arglist);
5456 TREE_OPERAND (exp, 1) = arglist;
5457 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5458 }
e87b4f3f 5459 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5460
5461 /* Make a suitable register to place result in. */
5462 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5463
c1f7c223 5464 emit_queue ();
8c8a8e34 5465 start_sequence ();
e7c33f54 5466
1bbddf11
JVA
5467 switch (DECL_FUNCTION_CODE (fndecl))
5468 {
5469 case BUILT_IN_SIN:
5470 builtin_optab = sin_optab; break;
5471 case BUILT_IN_COS:
5472 builtin_optab = cos_optab; break;
5473 case BUILT_IN_FSQRT:
5474 builtin_optab = sqrt_optab; break;
5475 default:
5476 abort ();
5477 }
5478
5479 /* Compute into TARGET.
e87b4f3f
RS
5480 Set TARGET to wherever the result comes back. */
5481 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 5482 builtin_optab, op0, target, 0);
e7c33f54
RK
5483
5484 /* If we were unable to expand via the builtin, stop the
5485 sequence (without outputting the insns) and break, causing
5486 a call the the library function. */
e87b4f3f 5487 if (target == 0)
e7c33f54 5488 {
8c8a8e34 5489 end_sequence ();
e7c33f54
RK
5490 break;
5491 }
e87b4f3f 5492
60bac6ea
RS
5493 /* Check the results by default. But if flag_fast_math is turned on,
5494 then assume sqrt will always be called with valid arguments. */
5495
5496 if (! flag_fast_math)
5497 {
1bbddf11 5498 /* Don't define the builtin FP instructions
60bac6ea
RS
5499 if your machine is not IEEE. */
5500 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5501 abort ();
5502
5503 lab1 = gen_label_rtx ();
5504
5505 /* Test the result; if it is NaN, set errno=EDOM because
5506 the argument was not in the domain. */
5507 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5508 emit_jump_insn (gen_beq (lab1));
5509
5510#if TARGET_EDOM
5511 {
5512#ifdef GEN_ERRNO_RTX
5513 rtx errno_rtx = GEN_ERRNO_RTX;
5514#else
5515 rtx errno_rtx
5516 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5517#endif
5518
5519 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5520 }
5521#else
5522 /* We can't set errno=EDOM directly; let the library call do it.
5523 Pop the arguments right away in case the call gets deleted. */
5524 NO_DEFER_POP;
5525 expand_call (exp, target, 0);
5526 OK_DEFER_POP;
5527#endif
5528
5529 emit_label (lab1);
5530 }
e87b4f3f 5531
e7c33f54 5532 /* Output the entire sequence. */
8c8a8e34
JW
5533 insns = get_insns ();
5534 end_sequence ();
5535 emit_insns (insns);
e7c33f54
RK
5536
5537 return target;
5538
bbf6f052
RK
5539 case BUILT_IN_SAVEREGS:
5540 /* Don't do __builtin_saveregs more than once in a function.
5541 Save the result of the first call and reuse it. */
5542 if (saveregs_value != 0)
5543 return saveregs_value;
5544 {
5545 /* When this function is called, it means that registers must be
5546 saved on entry to this function. So we migrate the
5547 call to the first insn of this function. */
5548 rtx temp;
5549 rtx seq;
5550 rtx valreg, saved_valreg;
5551
5552 /* Now really call the function. `expand_call' does not call
5553 expand_builtin, so there is no danger of infinite recursion here. */
5554 start_sequence ();
5555
5556#ifdef EXPAND_BUILTIN_SAVEREGS
5557 /* Do whatever the machine needs done in this case. */
5558 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5559#else
5560 /* The register where the function returns its value
5561 is likely to have something else in it, such as an argument.
5562 So preserve that register around the call. */
5563 if (value_mode != VOIDmode)
5564 {
5565 valreg = hard_libcall_value (value_mode);
5566 saved_valreg = gen_reg_rtx (value_mode);
5567 emit_move_insn (saved_valreg, valreg);
5568 }
5569
5570 /* Generate the call, putting the value in a pseudo. */
5571 temp = expand_call (exp, target, ignore);
5572
5573 if (value_mode != VOIDmode)
5574 emit_move_insn (valreg, saved_valreg);
5575#endif
5576
5577 seq = get_insns ();
5578 end_sequence ();
5579
5580 saveregs_value = temp;
5581
5582 /* This won't work inside a SEQUENCE--it really has to be
5583 at the start of the function. */
5584 if (in_sequence_p ())
5585 {
5586 /* Better to do this than to crash. */
5587 error ("`va_start' used within `({...})'");
5588 return temp;
5589 }
5590
5591 /* Put the sequence after the NOTE that starts the function. */
5592 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5593 return temp;
5594 }
5595
5596 /* __builtin_args_info (N) returns word N of the arg space info
5597 for the current function. The number and meanings of words
5598 is controlled by the definition of CUMULATIVE_ARGS. */
5599 case BUILT_IN_ARGS_INFO:
5600 {
5601 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5602 int i;
5603 int *word_ptr = (int *) &current_function_args_info;
5604 tree type, elts, result;
5605
5606 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5607 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5608 __FILE__, __LINE__);
5609
5610 if (arglist != 0)
5611 {
5612 tree arg = TREE_VALUE (arglist);
5613 if (TREE_CODE (arg) != INTEGER_CST)
5614 error ("argument of __builtin_args_info must be constant");
5615 else
5616 {
5617 int wordnum = TREE_INT_CST_LOW (arg);
5618
5619 if (wordnum < 0 || wordnum >= nwords)
5620 error ("argument of __builtin_args_info out of range");
5621 else
906c4e36 5622 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5623 }
5624 }
5625 else
5626 error ("missing argument in __builtin_args_info");
5627
5628 return const0_rtx;
5629
5630#if 0
5631 for (i = 0; i < nwords; i++)
5632 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5633
5634 type = build_array_type (integer_type_node,
5635 build_index_type (build_int_2 (nwords, 0)));
5636 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5637 TREE_CONSTANT (result) = 1;
5638 TREE_STATIC (result) = 1;
5639 result = build (INDIRECT_REF, build_pointer_type (type), result);
5640 TREE_CONSTANT (result) = 1;
906c4e36 5641 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5642#endif
5643 }
5644
5645 /* Return the address of the first anonymous stack arg. */
5646 case BUILT_IN_NEXT_ARG:
5647 {
5648 tree fntype = TREE_TYPE (current_function_decl);
5649 if (!(TYPE_ARG_TYPES (fntype) != 0
5650 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5651 != void_type_node)))
5652 {
5653 error ("`va_start' used in function with fixed args");
5654 return const0_rtx;
5655 }
5656 }
5657
5658 return expand_binop (Pmode, add_optab,
5659 current_function_internal_arg_pointer,
5660 current_function_arg_offset_rtx,
906c4e36 5661 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5662
5663 case BUILT_IN_CLASSIFY_TYPE:
5664 if (arglist != 0)
5665 {
5666 tree type = TREE_TYPE (TREE_VALUE (arglist));
5667 enum tree_code code = TREE_CODE (type);
5668 if (code == VOID_TYPE)
906c4e36 5669 return GEN_INT (void_type_class);
bbf6f052 5670 if (code == INTEGER_TYPE)
906c4e36 5671 return GEN_INT (integer_type_class);
bbf6f052 5672 if (code == CHAR_TYPE)
906c4e36 5673 return GEN_INT (char_type_class);
bbf6f052 5674 if (code == ENUMERAL_TYPE)
906c4e36 5675 return GEN_INT (enumeral_type_class);
bbf6f052 5676 if (code == BOOLEAN_TYPE)
906c4e36 5677 return GEN_INT (boolean_type_class);
bbf6f052 5678 if (code == POINTER_TYPE)
906c4e36 5679 return GEN_INT (pointer_type_class);
bbf6f052 5680 if (code == REFERENCE_TYPE)
906c4e36 5681 return GEN_INT (reference_type_class);
bbf6f052 5682 if (code == OFFSET_TYPE)
906c4e36 5683 return GEN_INT (offset_type_class);
bbf6f052 5684 if (code == REAL_TYPE)
906c4e36 5685 return GEN_INT (real_type_class);
bbf6f052 5686 if (code == COMPLEX_TYPE)
906c4e36 5687 return GEN_INT (complex_type_class);
bbf6f052 5688 if (code == FUNCTION_TYPE)
906c4e36 5689 return GEN_INT (function_type_class);
bbf6f052 5690 if (code == METHOD_TYPE)
906c4e36 5691 return GEN_INT (method_type_class);
bbf6f052 5692 if (code == RECORD_TYPE)
906c4e36 5693 return GEN_INT (record_type_class);
bbf6f052 5694 if (code == UNION_TYPE)
906c4e36 5695 return GEN_INT (union_type_class);
bbf6f052 5696 if (code == ARRAY_TYPE)
906c4e36 5697 return GEN_INT (array_type_class);
bbf6f052 5698 if (code == STRING_TYPE)
906c4e36 5699 return GEN_INT (string_type_class);
bbf6f052 5700 if (code == SET_TYPE)
906c4e36 5701 return GEN_INT (set_type_class);
bbf6f052 5702 if (code == FILE_TYPE)
906c4e36 5703 return GEN_INT (file_type_class);
bbf6f052 5704 if (code == LANG_TYPE)
906c4e36 5705 return GEN_INT (lang_type_class);
bbf6f052 5706 }
906c4e36 5707 return GEN_INT (no_type_class);
bbf6f052
RK
5708
5709 case BUILT_IN_CONSTANT_P:
5710 if (arglist == 0)
5711 return const0_rtx;
5712 else
cda0ec81 5713 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5714 ? const1_rtx : const0_rtx);
5715
5716 case BUILT_IN_FRAME_ADDRESS:
5717 /* The argument must be a nonnegative integer constant.
5718 It counts the number of frames to scan up the stack.
5719 The value is the address of that frame. */
5720 case BUILT_IN_RETURN_ADDRESS:
5721 /* The argument must be a nonnegative integer constant.
5722 It counts the number of frames to scan up the stack.
5723 The value is the return address saved in that frame. */
5724 if (arglist == 0)
5725 /* Warning about missing arg was already issued. */
5726 return const0_rtx;
5727 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5728 {
5729 error ("invalid arg to __builtin_return_address");
5730 return const0_rtx;
5731 }
5732 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5733 {
5734 error ("invalid arg to __builtin_return_address");
5735 return const0_rtx;
5736 }
5737 else
5738 {
5739 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5740 rtx tem = frame_pointer_rtx;
5741 int i;
5742
5743 /* Scan back COUNT frames to the specified frame. */
5744 for (i = 0; i < count; i++)
5745 {
5746 /* Assume the dynamic chain pointer is in the word that
5747 the frame address points to, unless otherwise specified. */
5748#ifdef DYNAMIC_CHAIN_ADDRESS
5749 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5750#endif
5751 tem = memory_address (Pmode, tem);
5752 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5753 }
5754
5755 /* For __builtin_frame_address, return what we've got. */
5756 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5757 return tem;
5758
5759 /* For __builtin_return_address,
5760 Get the return address from that frame. */
5761#ifdef RETURN_ADDR_RTX
5762 return RETURN_ADDR_RTX (count, tem);
5763#else
5764 tem = memory_address (Pmode,
5765 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5766 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5767#endif
5768 }
5769
5770 case BUILT_IN_ALLOCA:
5771 if (arglist == 0
5772 /* Arg could be non-integer if user redeclared this fcn wrong. */
5773 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5774 return const0_rtx;
5775 current_function_calls_alloca = 1;
5776 /* Compute the argument. */
906c4e36 5777 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5778
5779 /* Allocate the desired space. */
8c8a8e34 5780 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5781
5782 /* Record the new stack level for nonlocal gotos. */
6dc42e49 5783 if (nonlocal_goto_handler_slot != 0)
906c4e36 5784 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
5785 return target;
5786
5787 case BUILT_IN_FFS:
5788 /* If not optimizing, call the library function. */
5789 if (!optimize)
5790 break;
5791
5792 if (arglist == 0
5793 /* Arg could be non-integer if user redeclared this fcn wrong. */
5794 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5795 return const0_rtx;
5796
5797 /* Compute the argument. */
5798 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5799 /* Compute ffs, into TARGET if possible.
5800 Set TARGET to wherever the result comes back. */
5801 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5802 ffs_optab, op0, target, 1);
5803 if (target == 0)
5804 abort ();
5805 return target;
5806
5807 case BUILT_IN_STRLEN:
5808 /* If not optimizing, call the library function. */
5809 if (!optimize)
5810 break;
5811
5812 if (arglist == 0
5813 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5814 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5815 return const0_rtx;
5816 else
5817 {
e7c33f54
RK
5818 tree src = TREE_VALUE (arglist);
5819 tree len = c_strlen (src);
bbf6f052 5820
e7c33f54
RK
5821 int align
5822 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5823
5824 rtx result, src_rtx, char_rtx;
5825 enum machine_mode insn_mode = value_mode, char_mode;
5826 enum insn_code icode;
5827
5828 /* If the length is known, just return it. */
5829 if (len != 0)
5830 return expand_expr (len, target, mode, 0);
5831
5832 /* If SRC is not a pointer type, don't do this operation inline. */
5833 if (align == 0)
5834 break;
5835
5836 /* Call a function if we can't compute strlen in the right mode. */
5837
5838 while (insn_mode != VOIDmode)
5839 {
5840 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5841 if (icode != CODE_FOR_nothing)
5842 break;
5843
5844 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5845 }
5846 if (insn_mode == VOIDmode)
bbf6f052 5847 break;
e7c33f54
RK
5848
5849 /* Make a place to write the result of the instruction. */
5850 result = target;
5851 if (! (result != 0
5852 && GET_CODE (result) == REG
5853 && GET_MODE (result) == insn_mode
5854 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5855 result = gen_reg_rtx (insn_mode);
5856
4d613828 5857 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 5858
4d613828 5859 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
5860 result = gen_reg_rtx (insn_mode);
5861
5862 src_rtx = memory_address (BLKmode,
906c4e36 5863 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 5864 EXPAND_NORMAL));
4d613828 5865 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
5866 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5867
5868 char_rtx = const0_rtx;
4d613828
RS
5869 char_mode = insn_operand_mode[(int)icode][2];
5870 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
5871 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5872
5873 emit_insn (GEN_FCN (icode) (result,
5874 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 5875 char_rtx, GEN_INT (align)));
e7c33f54
RK
5876
5877 /* Return the value in the proper mode for this function. */
5878 if (GET_MODE (result) == value_mode)
5879 return result;
5880 else if (target != 0)
5881 {
5882 convert_move (target, result, 0);
5883 return target;
5884 }
5885 else
5886 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
5887 }
5888
5889 case BUILT_IN_STRCPY:
5890 /* If not optimizing, call the library function. */
5891 if (!optimize)
5892 break;
5893
5894 if (arglist == 0
5895 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5896 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5897 || TREE_CHAIN (arglist) == 0
5898 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5899 return const0_rtx;
5900 else
5901 {
5902 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5903
5904 if (len == 0)
5905 break;
5906
5907 len = size_binop (PLUS_EXPR, len, integer_one_node);
5908
906c4e36 5909 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
5910 }
5911
5912 /* Drops in. */
5913 case BUILT_IN_MEMCPY:
5914 /* If not optimizing, call the library function. */
5915 if (!optimize)
5916 break;
5917
5918 if (arglist == 0
5919 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5920 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5921 || TREE_CHAIN (arglist) == 0
5922 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5923 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5924 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5925 return const0_rtx;
5926 else
5927 {
5928 tree dest = TREE_VALUE (arglist);
5929 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5930 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5931
5932 int src_align
5933 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5934 int dest_align
5935 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5936 rtx dest_rtx;
5937
5938 /* If either SRC or DEST is not a pointer type, don't do
5939 this operation in-line. */
5940 if (src_align == 0 || dest_align == 0)
5941 {
5942 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5943 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5944 break;
5945 }
5946
906c4e36 5947 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
bbf6f052
RK
5948
5949 /* Copy word part most expediently. */
5950 emit_block_move (gen_rtx (MEM, BLKmode,
5951 memory_address (BLKmode, dest_rtx)),
5952 gen_rtx (MEM, BLKmode,
5953 memory_address (BLKmode,
906c4e36
RK
5954 expand_expr (src, NULL_RTX,
5955 Pmode,
bbf6f052 5956 EXPAND_NORMAL))),
906c4e36 5957 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
5958 MIN (src_align, dest_align));
5959 return dest_rtx;
5960 }
5961
5962/* These comparison functions need an instruction that returns an actual
5963 index. An ordinary compare that just sets the condition codes
5964 is not enough. */
5965#ifdef HAVE_cmpstrsi
5966 case BUILT_IN_STRCMP:
5967 /* If not optimizing, call the library function. */
5968 if (!optimize)
5969 break;
5970
5971 if (arglist == 0
5972 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5973 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5974 || TREE_CHAIN (arglist) == 0
5975 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5976 return const0_rtx;
5977 else if (!HAVE_cmpstrsi)
5978 break;
5979 {
5980 tree arg1 = TREE_VALUE (arglist);
5981 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5982 tree offset;
5983 tree len, len2;
5984
5985 len = c_strlen (arg1);
5986 if (len)
5987 len = size_binop (PLUS_EXPR, integer_one_node, len);
5988 len2 = c_strlen (arg2);
5989 if (len2)
5990 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5991
5992 /* If we don't have a constant length for the first, use the length
5993 of the second, if we know it. We don't require a constant for
5994 this case; some cost analysis could be done if both are available
5995 but neither is constant. For now, assume they're equally cheap.
5996
5997 If both strings have constant lengths, use the smaller. This
5998 could arise if optimization results in strcpy being called with
5999 two fixed strings, or if the code was machine-generated. We should
6000 add some code to the `memcmp' handler below to deal with such
6001 situations, someday. */
6002 if (!len || TREE_CODE (len) != INTEGER_CST)
6003 {
6004 if (len2)
6005 len = len2;
6006 else if (len == 0)
6007 break;
6008 }
6009 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6010 {
6011 if (tree_int_cst_lt (len2, len))
6012 len = len2;
6013 }
6014
906c4e36 6015 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6016 }
6017
6018 /* Drops in. */
6019 case BUILT_IN_MEMCMP:
6020 /* If not optimizing, call the library function. */
6021 if (!optimize)
6022 break;
6023
6024 if (arglist == 0
6025 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6026 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6027 || TREE_CHAIN (arglist) == 0
6028 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6029 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6030 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6031 return const0_rtx;
6032 else if (!HAVE_cmpstrsi)
6033 break;
6034 {
6035 tree arg1 = TREE_VALUE (arglist);
6036 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6037 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6038 rtx result;
6039
6040 int arg1_align
6041 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6042 int arg2_align
6043 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6044 enum machine_mode insn_mode
6045 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6046
6047 /* If we don't have POINTER_TYPE, call the function. */
6048 if (arg1_align == 0 || arg2_align == 0)
6049 {
6050 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6051 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6052 break;
6053 }
6054
6055 /* Make a place to write the result of the instruction. */
6056 result = target;
6057 if (! (result != 0
6058 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6059 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6060 result = gen_reg_rtx (insn_mode);
6061
6062 emit_insn (gen_cmpstrsi (result,
6063 gen_rtx (MEM, BLKmode,
906c4e36
RK
6064 expand_expr (arg1, NULL_RTX, Pmode,
6065 EXPAND_NORMAL)),
bbf6f052 6066 gen_rtx (MEM, BLKmode,
906c4e36
RK
6067 expand_expr (arg2, NULL_RTX, Pmode,
6068 EXPAND_NORMAL)),
6069 expand_expr (len, NULL_RTX, VOIDmode, 0),
6070 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6071
6072 /* Return the value in the proper mode for this function. */
6073 mode = TYPE_MODE (TREE_TYPE (exp));
6074 if (GET_MODE (result) == mode)
6075 return result;
6076 else if (target != 0)
6077 {
6078 convert_move (target, result, 0);
6079 return target;
6080 }
6081 else
6082 return convert_to_mode (mode, result, 0);
6083 }
6084#else
6085 case BUILT_IN_STRCMP:
6086 case BUILT_IN_MEMCMP:
6087 break;
6088#endif
6089
6090 default: /* just do library call, if unknown builtin */
6091 error ("built-in function %s not currently supported",
6092 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6093 }
6094
6095 /* The switch statement above can drop through to cause the function
6096 to be called normally. */
6097
6098 return expand_call (exp, target, ignore);
6099}
6100\f
6101/* Expand code for a post- or pre- increment or decrement
6102 and return the RTX for the result.
6103 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6104
6105static rtx
6106expand_increment (exp, post)
6107 register tree exp;
6108 int post;
6109{
6110 register rtx op0, op1;
6111 register rtx temp, value;
6112 register tree incremented = TREE_OPERAND (exp, 0);
6113 optab this_optab = add_optab;
6114 int icode;
6115 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6116 int op0_is_copy = 0;
6117
6118 /* Stabilize any component ref that might need to be
6119 evaluated more than once below. */
6120 if (TREE_CODE (incremented) == BIT_FIELD_REF
6121 || (TREE_CODE (incremented) == COMPONENT_REF
6122 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6123 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6124 incremented = stabilize_reference (incremented);
6125
6126 /* Compute the operands as RTX.
6127 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 6128 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
6129 and insns were generated in computing it. */
6130
bbf6f052 6131 temp = get_last_insn ();
906c4e36 6132 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
6133
6134 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6135 in place but intead must do sign- or zero-extension during assignment,
6136 so we copy it into a new register and let the code below use it as
6137 a copy.
6138
6139 Note that we can safely modify this SUBREG since it is know not to be
6140 shared (it was made by the expand_expr call above). */
6141
6142 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6143 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6144
94a58076
RS
6145 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6146 && temp != get_last_insn ());
906c4e36 6147 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6148
6149 /* Decide whether incrementing or decrementing. */
6150 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6151 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6152 this_optab = sub_optab;
6153
6154 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6155 then we cannot just increment OP0. We must
6156 therefore contrive to increment the original value.
6157 Then we can return OP0 since it is a copy of the old value. */
6158 if (op0_is_copy)
6159 {
6160 /* This is the easiest way to increment the value wherever it is.
6161 Problems with multiple evaluation of INCREMENTED
6162 are prevented because either (1) it is a component_ref,
6163 in which case it was stabilized above, or (2) it is an array_ref
6164 with constant index in an array in a register, which is
6165 safe to reevaluate. */
6166 tree newexp = build ((this_optab == add_optab
6167 ? PLUS_EXPR : MINUS_EXPR),
6168 TREE_TYPE (exp),
6169 incremented,
6170 TREE_OPERAND (exp, 1));
6171 temp = expand_assignment (incremented, newexp, ! post, 0);
6172 return post ? op0 : temp;
6173 }
6174
6175 /* Convert decrement by a constant into a negative increment. */
6176 if (this_optab == sub_optab
6177 && GET_CODE (op1) == CONST_INT)
6178 {
906c4e36 6179 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
6180 this_optab = add_optab;
6181 }
6182
6183 if (post)
6184 {
6185 /* We have a true reference to the value in OP0.
6186 If there is an insn to add or subtract in this mode, queue it. */
6187
6188#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6189 op0 = stabilize (op0);
6190#endif
6191
6192 icode = (int) this_optab->handlers[(int) mode].insn_code;
6193 if (icode != (int) CODE_FOR_nothing
6194 /* Make sure that OP0 is valid for operands 0 and 1
6195 of the insn we want to queue. */
6196 && (*insn_operand_predicate[icode][0]) (op0, mode)
6197 && (*insn_operand_predicate[icode][1]) (op0, mode))
6198 {
6199 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6200 op1 = force_reg (mode, op1);
6201
6202 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6203 }
6204 }
6205
6206 /* Preincrement, or we can't increment with one simple insn. */
6207 if (post)
6208 /* Save a copy of the value before inc or dec, to return it later. */
6209 temp = value = copy_to_reg (op0);
6210 else
6211 /* Arrange to return the incremented value. */
6212 /* Copy the rtx because expand_binop will protect from the queue,
6213 and the results of that would be invalid for us to return
6214 if our caller does emit_queue before using our result. */
6215 temp = copy_rtx (value = op0);
6216
6217 /* Increment however we can. */
6218 op1 = expand_binop (mode, this_optab, value, op1, op0,
6219 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6220 /* Make sure the value is stored into OP0. */
6221 if (op1 != op0)
6222 emit_move_insn (op0, op1);
6223
6224 return temp;
6225}
6226\f
6227/* Expand all function calls contained within EXP, innermost ones first.
6228 But don't look within expressions that have sequence points.
6229 For each CALL_EXPR, record the rtx for its value
6230 in the CALL_EXPR_RTL field. */
6231
6232static void
6233preexpand_calls (exp)
6234 tree exp;
6235{
6236 register int nops, i;
6237 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6238
6239 if (! do_preexpand_calls)
6240 return;
6241
6242 /* Only expressions and references can contain calls. */
6243
6244 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6245 return;
6246
6247 switch (TREE_CODE (exp))
6248 {
6249 case CALL_EXPR:
6250 /* Do nothing if already expanded. */
6251 if (CALL_EXPR_RTL (exp) != 0)
6252 return;
6253
6254 /* Do nothing to built-in functions. */
6255 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6256 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6257 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 6258 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
6259 return;
6260
6261 case COMPOUND_EXPR:
6262 case COND_EXPR:
6263 case TRUTH_ANDIF_EXPR:
6264 case TRUTH_ORIF_EXPR:
6265 /* If we find one of these, then we can be sure
6266 the adjust will be done for it (since it makes jumps).
6267 Do it now, so that if this is inside an argument
6268 of a function, we don't get the stack adjustment
6269 after some other args have already been pushed. */
6270 do_pending_stack_adjust ();
6271 return;
6272
6273 case BLOCK:
6274 case RTL_EXPR:
6275 case WITH_CLEANUP_EXPR:
6276 return;
6277
6278 case SAVE_EXPR:
6279 if (SAVE_EXPR_RTL (exp) != 0)
6280 return;
6281 }
6282
6283 nops = tree_code_length[(int) TREE_CODE (exp)];
6284 for (i = 0; i < nops; i++)
6285 if (TREE_OPERAND (exp, i) != 0)
6286 {
6287 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6288 if (type == 'e' || type == '<' || type == '1' || type == '2'
6289 || type == 'r')
6290 preexpand_calls (TREE_OPERAND (exp, i));
6291 }
6292}
6293\f
6294/* At the start of a function, record that we have no previously-pushed
6295 arguments waiting to be popped. */
6296
6297void
6298init_pending_stack_adjust ()
6299{
6300 pending_stack_adjust = 0;
6301}
6302
6303/* When exiting from function, if safe, clear out any pending stack adjust
6304 so the adjustment won't get done. */
6305
6306void
6307clear_pending_stack_adjust ()
6308{
6309#ifdef EXIT_IGNORE_STACK
6310 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 6311 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
6312 && ! flag_inline_functions)
6313 pending_stack_adjust = 0;
6314#endif
6315}
6316
6317/* Pop any previously-pushed arguments that have not been popped yet. */
6318
6319void
6320do_pending_stack_adjust ()
6321{
6322 if (inhibit_defer_pop == 0)
6323 {
6324 if (pending_stack_adjust != 0)
906c4e36 6325 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
6326 pending_stack_adjust = 0;
6327 }
6328}
6329
6330/* Expand all cleanups up to OLD_CLEANUPS.
6331 Needed here, and also for language-dependent calls. */
6332
6333void
6334expand_cleanups_to (old_cleanups)
6335 tree old_cleanups;
6336{
6337 while (cleanups_this_call != old_cleanups)
6338 {
906c4e36 6339 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6340 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6341 }
6342}
6343\f
6344/* Expand conditional expressions. */
6345
6346/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6347 LABEL is an rtx of code CODE_LABEL, in this function and all the
6348 functions here. */
6349
6350void
6351jumpifnot (exp, label)
6352 tree exp;
6353 rtx label;
6354{
906c4e36 6355 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
6356}
6357
6358/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6359
6360void
6361jumpif (exp, label)
6362 tree exp;
6363 rtx label;
6364{
906c4e36 6365 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
6366}
6367
6368/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6369 the result is zero, or IF_TRUE_LABEL if the result is one.
6370 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6371 meaning fall through in that case.
6372
e7c33f54
RK
6373 do_jump always does any pending stack adjust except when it does not
6374 actually perform a jump. An example where there is no jump
6375 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6376
bbf6f052
RK
6377 This function is responsible for optimizing cases such as
6378 &&, || and comparison operators in EXP. */
6379
6380void
6381do_jump (exp, if_false_label, if_true_label)
6382 tree exp;
6383 rtx if_false_label, if_true_label;
6384{
6385 register enum tree_code code = TREE_CODE (exp);
6386 /* Some cases need to create a label to jump to
6387 in order to properly fall through.
6388 These cases set DROP_THROUGH_LABEL nonzero. */
6389 rtx drop_through_label = 0;
6390 rtx temp;
6391 rtx comparison = 0;
6392 int i;
6393 tree type;
6394
6395 emit_queue ();
6396
6397 switch (code)
6398 {
6399 case ERROR_MARK:
6400 break;
6401
6402 case INTEGER_CST:
6403 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6404 if (temp)
6405 emit_jump (temp);
6406 break;
6407
6408#if 0
6409 /* This is not true with #pragma weak */
6410 case ADDR_EXPR:
6411 /* The address of something can never be zero. */
6412 if (if_true_label)
6413 emit_jump (if_true_label);
6414 break;
6415#endif
6416
6417 case NOP_EXPR:
6418 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6419 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6420 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6421 goto normal;
6422 case CONVERT_EXPR:
6423 /* If we are narrowing the operand, we have to do the compare in the
6424 narrower mode. */
6425 if ((TYPE_PRECISION (TREE_TYPE (exp))
6426 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6427 goto normal;
6428 case NON_LVALUE_EXPR:
6429 case REFERENCE_EXPR:
6430 case ABS_EXPR:
6431 case NEGATE_EXPR:
6432 case LROTATE_EXPR:
6433 case RROTATE_EXPR:
6434 /* These cannot change zero->non-zero or vice versa. */
6435 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6436 break;
6437
6438#if 0
6439 /* This is never less insns than evaluating the PLUS_EXPR followed by
6440 a test and can be longer if the test is eliminated. */
6441 case PLUS_EXPR:
6442 /* Reduce to minus. */
6443 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6444 TREE_OPERAND (exp, 0),
6445 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6446 TREE_OPERAND (exp, 1))));
6447 /* Process as MINUS. */
6448#endif
6449
6450 case MINUS_EXPR:
6451 /* Non-zero iff operands of minus differ. */
6452 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6453 TREE_OPERAND (exp, 0),
6454 TREE_OPERAND (exp, 1)),
6455 NE, NE);
6456 break;
6457
6458 case BIT_AND_EXPR:
6459 /* If we are AND'ing with a small constant, do this comparison in the
6460 smallest type that fits. If the machine doesn't have comparisons
6461 that small, it will be converted back to the wider comparison.
6462 This helps if we are testing the sign bit of a narrower object.
6463 combine can't do this for us because it can't know whether a
6464 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6465
08af8e09
RK
6466 if (! SLOW_BYTE_ACCESS
6467 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6468 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
6469 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6470 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
6471 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6472 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6473 != CODE_FOR_nothing))
bbf6f052
RK
6474 {
6475 do_jump (convert (type, exp), if_false_label, if_true_label);
6476 break;
6477 }
6478 goto normal;
6479
6480 case TRUTH_NOT_EXPR:
6481 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6482 break;
6483
6484 case TRUTH_ANDIF_EXPR:
6485 if (if_false_label == 0)
6486 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 6487 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
6488 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6489 break;
6490
6491 case TRUTH_ORIF_EXPR:
6492 if (if_true_label == 0)
6493 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 6494 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
6495 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6496 break;
6497
6498 case COMPOUND_EXPR:
6499 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6500 free_temp_slots ();
6501 emit_queue ();
e7c33f54 6502 do_pending_stack_adjust ();
bbf6f052
RK
6503 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6504 break;
6505
6506 case COMPONENT_REF:
6507 case BIT_FIELD_REF:
6508 case ARRAY_REF:
6509 {
6510 int bitsize, bitpos, unsignedp;
6511 enum machine_mode mode;
6512 tree type;
7bb0943f 6513 tree offset;
bbf6f052
RK
6514 int volatilep = 0;
6515
6516 /* Get description of this reference. We don't actually care
6517 about the underlying object here. */
7bb0943f
RS
6518 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6519 &mode, &unsignedp, &volatilep);
bbf6f052
RK
6520
6521 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
6522 if (! SLOW_BYTE_ACCESS
6523 && type != 0 && bitsize >= 0
6524 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6525 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6526 != CODE_FOR_nothing))
bbf6f052
RK
6527 {
6528 do_jump (convert (type, exp), if_false_label, if_true_label);
6529 break;
6530 }
6531 goto normal;
6532 }
6533
6534 case COND_EXPR:
6535 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6536 if (integer_onep (TREE_OPERAND (exp, 1))
6537 && integer_zerop (TREE_OPERAND (exp, 2)))
6538 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6539
6540 else if (integer_zerop (TREE_OPERAND (exp, 1))
6541 && integer_onep (TREE_OPERAND (exp, 2)))
6542 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6543
6544 else
6545 {
6546 register rtx label1 = gen_label_rtx ();
6547 drop_through_label = gen_label_rtx ();
906c4e36 6548 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
6549 /* Now the THEN-expression. */
6550 do_jump (TREE_OPERAND (exp, 1),
6551 if_false_label ? if_false_label : drop_through_label,
6552 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
6553 /* In case the do_jump just above never jumps. */
6554 do_pending_stack_adjust ();
bbf6f052
RK
6555 emit_label (label1);
6556 /* Now the ELSE-expression. */
6557 do_jump (TREE_OPERAND (exp, 2),
6558 if_false_label ? if_false_label : drop_through_label,
6559 if_true_label ? if_true_label : drop_through_label);
6560 }
6561 break;
6562
6563 case EQ_EXPR:
6564 if (integer_zerop (TREE_OPERAND (exp, 1)))
6565 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6566 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6567 == MODE_INT)
6568 &&
6569 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6570 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6571 else
6572 comparison = compare (exp, EQ, EQ);
6573 break;
6574
6575 case NE_EXPR:
6576 if (integer_zerop (TREE_OPERAND (exp, 1)))
6577 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6578 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6579 == MODE_INT)
6580 &&
6581 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6582 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6583 else
6584 comparison = compare (exp, NE, NE);
6585 break;
6586
6587 case LT_EXPR:
6588 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6589 == MODE_INT)
6590 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6591 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6592 else
6593 comparison = compare (exp, LT, LTU);
6594 break;
6595
6596 case LE_EXPR:
6597 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6598 == MODE_INT)
6599 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6600 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6601 else
6602 comparison = compare (exp, LE, LEU);
6603 break;
6604
6605 case GT_EXPR:
6606 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6607 == MODE_INT)
6608 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6609 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6610 else
6611 comparison = compare (exp, GT, GTU);
6612 break;
6613
6614 case GE_EXPR:
6615 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6616 == MODE_INT)
6617 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6618 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6619 else
6620 comparison = compare (exp, GE, GEU);
6621 break;
6622
6623 default:
6624 normal:
906c4e36 6625 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6626#if 0
6627 /* This is not needed any more and causes poor code since it causes
6628 comparisons and tests from non-SI objects to have different code
6629 sequences. */
6630 /* Copy to register to avoid generating bad insns by cse
6631 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6632 if (!cse_not_expected && GET_CODE (temp) == MEM)
6633 temp = copy_to_reg (temp);
6634#endif
6635 do_pending_stack_adjust ();
6636 if (GET_CODE (temp) == CONST_INT)
6637 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6638 else if (GET_CODE (temp) == LABEL_REF)
6639 comparison = const_true_rtx;
6640 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6641 && !can_compare_p (GET_MODE (temp)))
6642 /* Note swapping the labels gives us not-equal. */
6643 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6644 else if (GET_MODE (temp) != VOIDmode)
6645 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
6646 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6647 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
6648 else
6649 abort ();
6650 }
6651
6652 /* Do any postincrements in the expression that was tested. */
6653 emit_queue ();
6654
6655 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6656 straight into a conditional jump instruction as the jump condition.
6657 Otherwise, all the work has been done already. */
6658
6659 if (comparison == const_true_rtx)
6660 {
6661 if (if_true_label)
6662 emit_jump (if_true_label);
6663 }
6664 else if (comparison == const0_rtx)
6665 {
6666 if (if_false_label)
6667 emit_jump (if_false_label);
6668 }
6669 else if (comparison)
6670 do_jump_for_compare (comparison, if_false_label, if_true_label);
6671
6672 free_temp_slots ();
6673
6674 if (drop_through_label)
e7c33f54
RK
6675 {
6676 /* If do_jump produces code that might be jumped around,
6677 do any stack adjusts from that code, before the place
6678 where control merges in. */
6679 do_pending_stack_adjust ();
6680 emit_label (drop_through_label);
6681 }
bbf6f052
RK
6682}
6683\f
6684/* Given a comparison expression EXP for values too wide to be compared
6685 with one insn, test the comparison and jump to the appropriate label.
6686 The code of EXP is ignored; we always test GT if SWAP is 0,
6687 and LT if SWAP is 1. */
6688
6689static void
6690do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6691 tree exp;
6692 int swap;
6693 rtx if_false_label, if_true_label;
6694{
906c4e36
RK
6695 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6696 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6697 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6698 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6699 rtx drop_through_label = 0;
6700 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6701 int i;
6702
6703 if (! if_true_label || ! if_false_label)
6704 drop_through_label = gen_label_rtx ();
6705 if (! if_true_label)
6706 if_true_label = drop_through_label;
6707 if (! if_false_label)
6708 if_false_label = drop_through_label;
6709
6710 /* Compare a word at a time, high order first. */
6711 for (i = 0; i < nwords; i++)
6712 {
6713 rtx comp;
6714 rtx op0_word, op1_word;
6715
6716 if (WORDS_BIG_ENDIAN)
6717 {
6718 op0_word = operand_subword_force (op0, i, mode);
6719 op1_word = operand_subword_force (op1, i, mode);
6720 }
6721 else
6722 {
6723 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6724 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6725 }
6726
6727 /* All but high-order word must be compared as unsigned. */
6728 comp = compare_from_rtx (op0_word, op1_word,
6729 (unsignedp || i > 0) ? GTU : GT,
906c4e36 6730 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
6731 if (comp == const_true_rtx)
6732 emit_jump (if_true_label);
6733 else if (comp != const0_rtx)
906c4e36 6734 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
6735
6736 /* Consider lower words only if these are equal. */
6737 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 6738 NULL_RTX, 0);
bbf6f052
RK
6739 if (comp == const_true_rtx)
6740 emit_jump (if_false_label);
6741 else if (comp != const0_rtx)
906c4e36 6742 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
6743 }
6744
6745 if (if_false_label)
6746 emit_jump (if_false_label);
6747 if (drop_through_label)
6748 emit_label (drop_through_label);
6749}
6750
6751/* Given an EQ_EXPR expression EXP for values too wide to be compared
6752 with one insn, test the comparison and jump to the appropriate label. */
6753
6754static void
6755do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6756 tree exp;
6757 rtx if_false_label, if_true_label;
6758{
906c4e36
RK
6759 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6760 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6761 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6762 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6763 int i;
6764 rtx drop_through_label = 0;
6765
6766 if (! if_false_label)
6767 drop_through_label = if_false_label = gen_label_rtx ();
6768
6769 for (i = 0; i < nwords; i++)
6770 {
6771 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6772 operand_subword_force (op1, i, mode),
cd1b4b44
RK
6773 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6774 word_mode, NULL_RTX, 0);
bbf6f052
RK
6775 if (comp == const_true_rtx)
6776 emit_jump (if_false_label);
6777 else if (comp != const0_rtx)
906c4e36 6778 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6779 }
6780
6781 if (if_true_label)
6782 emit_jump (if_true_label);
6783 if (drop_through_label)
6784 emit_label (drop_through_label);
6785}
6786\f
6787/* Jump according to whether OP0 is 0.
6788 We assume that OP0 has an integer mode that is too wide
6789 for the available compare insns. */
6790
6791static void
6792do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6793 rtx op0;
6794 rtx if_false_label, if_true_label;
6795{
6796 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6797 int i;
6798 rtx drop_through_label = 0;
6799
6800 if (! if_false_label)
6801 drop_through_label = if_false_label = gen_label_rtx ();
6802
6803 for (i = 0; i < nwords; i++)
6804 {
6805 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6806 GET_MODE (op0)),
cd1b4b44 6807 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
6808 if (comp == const_true_rtx)
6809 emit_jump (if_false_label);
6810 else if (comp != const0_rtx)
906c4e36 6811 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6812 }
6813
6814 if (if_true_label)
6815 emit_jump (if_true_label);
6816 if (drop_through_label)
6817 emit_label (drop_through_label);
6818}
6819
6820/* Given a comparison expression in rtl form, output conditional branches to
6821 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6822
6823static void
6824do_jump_for_compare (comparison, if_false_label, if_true_label)
6825 rtx comparison, if_false_label, if_true_label;
6826{
6827 if (if_true_label)
6828 {
6829 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6830 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6831 else
6832 abort ();
6833
6834 if (if_false_label)
6835 emit_jump (if_false_label);
6836 }
6837 else if (if_false_label)
6838 {
6839 rtx insn;
6840 rtx prev = PREV_INSN (get_last_insn ());
6841 rtx branch = 0;
6842
6843 /* Output the branch with the opposite condition. Then try to invert
6844 what is generated. If more than one insn is a branch, or if the
6845 branch is not the last insn written, abort. If we can't invert
6846 the branch, emit make a true label, redirect this jump to that,
6847 emit a jump to the false label and define the true label. */
6848
6849 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6850 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6851 else
6852 abort ();
6853
6854 /* Here we get the insn before what was just emitted.
6855 On some machines, emitting the branch can discard
6856 the previous compare insn and emit a replacement. */
6857 if (prev == 0)
6858 /* If there's only one preceding insn... */
6859 insn = get_insns ();
6860 else
6861 insn = NEXT_INSN (prev);
6862
6863 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6864 if (GET_CODE (insn) == JUMP_INSN)
6865 {
6866 if (branch)
6867 abort ();
6868 branch = insn;
6869 }
6870
6871 if (branch != get_last_insn ())
6872 abort ();
6873
6874 if (! invert_jump (branch, if_false_label))
6875 {
6876 if_true_label = gen_label_rtx ();
6877 redirect_jump (branch, if_true_label);
6878 emit_jump (if_false_label);
6879 emit_label (if_true_label);
6880 }
6881 }
6882}
6883\f
6884/* Generate code for a comparison expression EXP
6885 (including code to compute the values to be compared)
6886 and set (CC0) according to the result.
6887 SIGNED_CODE should be the rtx operation for this comparison for
6888 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6889
6890 We force a stack adjustment unless there are currently
6891 things pushed on the stack that aren't yet used. */
6892
6893static rtx
6894compare (exp, signed_code, unsigned_code)
6895 register tree exp;
6896 enum rtx_code signed_code, unsigned_code;
6897{
906c4e36
RK
6898 register rtx op0
6899 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6900 register rtx op1
6901 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6902 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6903 register enum machine_mode mode = TYPE_MODE (type);
6904 int unsignedp = TREE_UNSIGNED (type);
6905 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6906
6907 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6908 ((mode == BLKmode)
906c4e36 6909 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
6910 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6911}
6912
6913/* Like compare but expects the values to compare as two rtx's.
6914 The decision as to signed or unsigned comparison must be made by the caller.
6915
6916 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6917 compared.
6918
6919 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6920 size of MODE should be used. */
6921
6922rtx
6923compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6924 register rtx op0, op1;
6925 enum rtx_code code;
6926 int unsignedp;
6927 enum machine_mode mode;
6928 rtx size;
6929 int align;
6930{
6931 /* If one operand is constant, make it the second one. */
6932
6933 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6934 {
6935 rtx tem = op0;
6936 op0 = op1;
6937 op1 = tem;
6938 code = swap_condition (code);
6939 }
6940
6941 if (flag_force_mem)
6942 {
6943 op0 = force_not_mem (op0);
6944 op1 = force_not_mem (op1);
6945 }
6946
6947 do_pending_stack_adjust ();
6948
6949 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6950 return simplify_relational_operation (code, mode, op0, op1);
6951
cd1b4b44
RK
6952#if 0
6953 /* There's no need to do this now that combine.c can eliminate lots of
6954 sign extensions. This can be less efficient in certain cases on other
6955 machines.
6956
bbf6f052
RK
6957 /* If this is a signed equality comparison, we can do it as an
6958 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
6959 extension and comparisons with zero are done as unsigned. This is
6960 the case even on machines that can do fast sign extension, since
6961 zero-extension is easier to combinen with other operations than
6962 sign-extension is. If we are comparing against a constant, we must
6963 convert it to what it would look like unsigned. */
bbf6f052 6964 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 6965 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6966 {
6967 if (GET_CODE (op1) == CONST_INT
6968 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 6969 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
6970 unsignedp = 1;
6971 }
cd1b4b44 6972#endif
bbf6f052
RK
6973
6974 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6975
6976 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6977}
6978\f
6979/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
6980 and return an rtx for the result. EXP is either a comparison
6981 or a TRUTH_NOT_EXPR whose operand is a comparison.
6982
bbf6f052
RK
6983 If TARGET is nonzero, store the result there if convenient.
6984
6985 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6986 cheap.
6987
6988 Return zero if there is no suitable set-flag instruction
6989 available on this machine.
6990
6991 Once expand_expr has been called on the arguments of the comparison,
6992 we are committed to doing the store flag, since it is not safe to
6993 re-evaluate the expression. We emit the store-flag insn by calling
6994 emit_store_flag, but only expand the arguments if we have a reason
6995 to believe that emit_store_flag will be successful. If we think that
6996 it will, but it isn't, we have to simulate the store-flag with a
6997 set/jump/set sequence. */
6998
6999static rtx
7000do_store_flag (exp, target, mode, only_cheap)
7001 tree exp;
7002 rtx target;
7003 enum machine_mode mode;
7004 int only_cheap;
7005{
7006 enum rtx_code code;
e7c33f54 7007 tree arg0, arg1, type;
bbf6f052 7008 tree tem;
e7c33f54
RK
7009 enum machine_mode operand_mode;
7010 int invert = 0;
7011 int unsignedp;
bbf6f052
RK
7012 rtx op0, op1;
7013 enum insn_code icode;
7014 rtx subtarget = target;
7015 rtx result, label, pattern, jump_pat;
7016
e7c33f54
RK
7017 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7018 result at the end. We can't simply invert the test since it would
7019 have already been inverted if it were valid. This case occurs for
7020 some floating-point comparisons. */
7021
7022 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7023 invert = 1, exp = TREE_OPERAND (exp, 0);
7024
7025 arg0 = TREE_OPERAND (exp, 0);
7026 arg1 = TREE_OPERAND (exp, 1);
7027 type = TREE_TYPE (arg0);
7028 operand_mode = TYPE_MODE (type);
7029 unsignedp = TREE_UNSIGNED (type);
7030
bbf6f052
RK
7031 /* We won't bother with BLKmode store-flag operations because it would mean
7032 passing a lot of information to emit_store_flag. */
7033 if (operand_mode == BLKmode)
7034 return 0;
7035
d964285c
CH
7036 STRIP_NOPS (arg0);
7037 STRIP_NOPS (arg1);
bbf6f052
RK
7038
7039 /* Get the rtx comparison code to use. We know that EXP is a comparison
7040 operation of some type. Some comparisons against 1 and -1 can be
7041 converted to comparisons with zero. Do so here so that the tests
7042 below will be aware that we have a comparison with zero. These
7043 tests will not catch constants in the first operand, but constants
7044 are rarely passed as the first operand. */
7045
7046 switch (TREE_CODE (exp))
7047 {
7048 case EQ_EXPR:
7049 code = EQ;
7050 break;
7051 case NE_EXPR:
7052 code = NE;
7053 break;
7054 case LT_EXPR:
7055 if (integer_onep (arg1))
7056 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7057 else
7058 code = unsignedp ? LTU : LT;
7059 break;
7060 case LE_EXPR:
7061 if (integer_all_onesp (arg1))
7062 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7063 else
7064 code = unsignedp ? LEU : LE;
7065 break;
7066 case GT_EXPR:
7067 if (integer_all_onesp (arg1))
7068 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7069 else
7070 code = unsignedp ? GTU : GT;
7071 break;
7072 case GE_EXPR:
7073 if (integer_onep (arg1))
7074 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7075 else
7076 code = unsignedp ? GEU : GE;
7077 break;
7078 default:
7079 abort ();
7080 }
7081
7082 /* Put a constant second. */
7083 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7084 {
7085 tem = arg0; arg0 = arg1; arg1 = tem;
7086 code = swap_condition (code);
7087 }
7088
7089 /* If this is an equality or inequality test of a single bit, we can
7090 do this by shifting the bit being tested to the low-order bit and
7091 masking the result with the constant 1. If the condition was EQ,
7092 we xor it with 1. This does not require an scc insn and is faster
7093 than an scc insn even if we have it. */
7094
7095 if ((code == NE || code == EQ)
7096 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7097 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 7098 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7099 {
7100 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 7101 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
7102
7103 if (subtarget == 0 || GET_CODE (subtarget) != REG
7104 || GET_MODE (subtarget) != operand_mode
7105 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7106 subtarget = 0;
7107
7108 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7109
7110 if (bitnum != 0)
7111 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7112 size_int (bitnum), target, 1);
7113
7114 if (GET_MODE (op0) != mode)
7115 op0 = convert_to_mode (mode, op0, 1);
7116
7117 if (bitnum != TYPE_PRECISION (type) - 1)
7118 op0 = expand_and (op0, const1_rtx, target);
7119
e7c33f54 7120 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
7121 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7122 OPTAB_LIB_WIDEN);
7123
7124 return op0;
7125 }
7126
7127 /* Now see if we are likely to be able to do this. Return if not. */
7128 if (! can_compare_p (operand_mode))
7129 return 0;
7130 icode = setcc_gen_code[(int) code];
7131 if (icode == CODE_FOR_nothing
7132 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7133 {
7134 /* We can only do this if it is one of the special cases that
7135 can be handled without an scc insn. */
7136 if ((code == LT && integer_zerop (arg1))
7137 || (! only_cheap && code == GE && integer_zerop (arg1)))
7138 ;
7139 else if (BRANCH_COST >= 0
7140 && ! only_cheap && (code == NE || code == EQ)
7141 && TREE_CODE (type) != REAL_TYPE
7142 && ((abs_optab->handlers[(int) operand_mode].insn_code
7143 != CODE_FOR_nothing)
7144 || (ffs_optab->handlers[(int) operand_mode].insn_code
7145 != CODE_FOR_nothing)))
7146 ;
7147 else
7148 return 0;
7149 }
7150
7151 preexpand_calls (exp);
7152 if (subtarget == 0 || GET_CODE (subtarget) != REG
7153 || GET_MODE (subtarget) != operand_mode
7154 || ! safe_from_p (subtarget, arg1))
7155 subtarget = 0;
7156
7157 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 7158 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7159
7160 if (target == 0)
7161 target = gen_reg_rtx (mode);
7162
d39985fa
RK
7163 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7164 because, if the emit_store_flag does anything it will succeed and
7165 OP0 and OP1 will not be used subsequently. */
7166
7167 result = emit_store_flag (target, code,
7168 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7169 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7170 operand_mode, unsignedp, 1);
bbf6f052
RK
7171
7172 if (result)
e7c33f54
RK
7173 {
7174 if (invert)
7175 result = expand_binop (mode, xor_optab, result, const1_rtx,
7176 result, 0, OPTAB_LIB_WIDEN);
7177 return result;
7178 }
bbf6f052
RK
7179
7180 /* If this failed, we have to do this with set/compare/jump/set code. */
7181 if (target == 0 || GET_CODE (target) != REG
7182 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7183 target = gen_reg_rtx (GET_MODE (target));
7184
e7c33f54 7185 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
7186 result = compare_from_rtx (op0, op1, code, unsignedp,
7187 operand_mode, NULL_RTX, 0);
bbf6f052 7188 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
7189 return (((result == const0_rtx && ! invert)
7190 || (result != const0_rtx && invert))
7191 ? const0_rtx : const1_rtx);
bbf6f052
RK
7192
7193 label = gen_label_rtx ();
7194 if (bcc_gen_fctn[(int) code] == 0)
7195 abort ();
7196
7197 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 7198 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
7199 emit_label (label);
7200
7201 return target;
7202}
7203\f
7204/* Generate a tablejump instruction (used for switch statements). */
7205
7206#ifdef HAVE_tablejump
7207
7208/* INDEX is the value being switched on, with the lowest value
7209 in the table already subtracted.
88d3b7f0 7210 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
7211 RANGE is the length of the jump table.
7212 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7213
7214 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7215 index value is out of range. */
7216
7217void
e87b4f3f 7218do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 7219 rtx index, range, table_label, default_label;
e87b4f3f 7220 enum machine_mode mode;
bbf6f052
RK
7221{
7222 register rtx temp, vector;
7223
88d3b7f0
RS
7224 /* Do an unsigned comparison (in the proper mode) between the index
7225 expression and the value which represents the length of the range.
7226 Since we just finished subtracting the lower bound of the range
7227 from the index expression, this comparison allows us to simultaneously
7228 check that the original index expression value is both greater than
7229 or equal to the minimum value of the range and less than or equal to
7230 the maximum value of the range. */
e87b4f3f 7231
906c4e36 7232 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
bbf6f052 7233 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
7234
7235 /* If index is in range, it must fit in Pmode.
7236 Convert to Pmode so we can index with it. */
7237 if (mode != Pmode)
7238 index = convert_to_mode (Pmode, index, 1);
7239
bbf6f052
RK
7240 /* If flag_force_addr were to affect this address
7241 it could interfere with the tricky assumptions made
7242 about addresses that contain label-refs,
7243 which may be valid only very near the tablejump itself. */
7244 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7245 GET_MODE_SIZE, because this indicates how large insns are. The other
7246 uses should all be Pmode, because they are addresses. This code
7247 could fail if addresses and insns are not the same size. */
7248 index = memory_address_noforce
7249 (CASE_VECTOR_MODE,
7250 gen_rtx (PLUS, Pmode,
7251 gen_rtx (MULT, Pmode, index,
906c4e36 7252 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
7253 gen_rtx (LABEL_REF, Pmode, table_label)));
7254 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7255 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7256 RTX_UNCHANGING_P (vector) = 1;
7257 convert_move (temp, vector, 0);
7258
7259 emit_jump_insn (gen_tablejump (temp, table_label));
7260
7261#ifndef CASE_VECTOR_PC_RELATIVE
7262 /* If we are generating PIC code or if the table is PC-relative, the
7263 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7264 if (! flag_pic)
7265 emit_barrier ();
7266#endif
7267}
7268
7269#endif /* HAVE_tablejump */
This page took 0.811663 seconds and 5 git commands to generate.