]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(expand_return): Call emit_queue in ignore-the-value case.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
4fa52007
RK
105/* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109static char direct_load[NUM_MACHINE_MODES];
110static char direct_store[NUM_MACHINE_MODES];
111
bbf6f052
RK
112/* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115#ifndef MOVE_RATIO
266007a7 116#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
117#define MOVE_RATIO 2
118#else
119/* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121#define MOVE_RATIO 15
122#endif
123#endif
e87b4f3f 124
266007a7
RK
125/* This array records the insn_code of insns to perform block moves. */
126static enum insn_code movstr_optab[NUM_MACHINE_MODES];
127
e87b4f3f
RS
128/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
129
130#ifndef SLOW_UNALIGNED_ACCESS
131#define SLOW_UNALIGNED_ACCESS 0
132#endif
bbf6f052 133\f
4fa52007 134/* This is run once per compilation to set up which modes can be used
266007a7 135 directly in memory and to initialize the block move optab. */
4fa52007
RK
136
137void
138init_expr_once ()
139{
140 rtx insn, pat;
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
143
144 start_sequence ();
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
147
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
150 {
151 int regno;
152 rtx reg;
153 int num_clobbers;
154
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
157
158 /* Find a register that can be used in this mode, if any. */
159 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
160 if (HARD_REGNO_MODE_OK (regno, mode))
161 break;
162
163 if (regno == FIRST_PSEUDO_REGISTER)
164 continue;
165
166 reg = gen_rtx (REG, mode, regno);
167
168 SET_SRC (pat) = mem;
169 SET_DEST (pat) = reg;
170 direct_load[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
171
172 SET_SRC (pat) = reg;
173 SET_DEST (pat) = mem;
174 direct_store[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
266007a7
RK
175
176 movstr_optab[(int) mode] = CODE_FOR_nothing;
4fa52007
RK
177 }
178
179 end_sequence ();
266007a7
RK
180
181#ifdef HAVE_movstrqi
182 if (HAVE_movstrqi)
183 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
184#endif
185#ifdef HAVE_movstrhi
186 if (HAVE_movstrhi)
187 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
188#endif
189#ifdef HAVE_movstrsi
190 if (HAVE_movstrsi)
191 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
192#endif
193#ifdef HAVE_movstrdi
194 if (HAVE_movstrdi)
195 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
196#endif
197#ifdef HAVE_movstrti
198 if (HAVE_movstrti)
199 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
200#endif
4fa52007
RK
201}
202
bbf6f052
RK
203/* This is run at the start of compiling a function. */
204
205void
206init_expr ()
207{
208 init_queue ();
209
210 pending_stack_adjust = 0;
211 inhibit_defer_pop = 0;
212 cleanups_this_call = 0;
213 saveregs_value = 0;
e87b4f3f 214 forced_labels = 0;
bbf6f052
RK
215}
216
217/* Save all variables describing the current status into the structure *P.
218 This is used before starting a nested function. */
219
220void
221save_expr_status (p)
222 struct function *p;
223{
224 /* Instead of saving the postincrement queue, empty it. */
225 emit_queue ();
226
227 p->pending_stack_adjust = pending_stack_adjust;
228 p->inhibit_defer_pop = inhibit_defer_pop;
229 p->cleanups_this_call = cleanups_this_call;
230 p->saveregs_value = saveregs_value;
e87b4f3f 231 p->forced_labels = forced_labels;
bbf6f052
RK
232
233 pending_stack_adjust = 0;
234 inhibit_defer_pop = 0;
235 cleanups_this_call = 0;
236 saveregs_value = 0;
e87b4f3f 237 forced_labels = 0;
bbf6f052
RK
238}
239
240/* Restore all variables describing the current status from the structure *P.
241 This is used after a nested function. */
242
243void
244restore_expr_status (p)
245 struct function *p;
246{
247 pending_stack_adjust = p->pending_stack_adjust;
248 inhibit_defer_pop = p->inhibit_defer_pop;
249 cleanups_this_call = p->cleanups_this_call;
250 saveregs_value = p->saveregs_value;
e87b4f3f 251 forced_labels = p->forced_labels;
bbf6f052
RK
252}
253\f
254/* Manage the queue of increment instructions to be output
255 for POSTINCREMENT_EXPR expressions, etc. */
256
257static rtx pending_chain;
258
259/* Queue up to increment (or change) VAR later. BODY says how:
260 BODY should be the same thing you would pass to emit_insn
261 to increment right away. It will go to emit_insn later on.
262
263 The value is a QUEUED expression to be used in place of VAR
264 where you want to guarantee the pre-incrementation value of VAR. */
265
266static rtx
267enqueue_insn (var, body)
268 rtx var, body;
269{
270 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 271 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
272 return pending_chain;
273}
274
275/* Use protect_from_queue to convert a QUEUED expression
276 into something that you can put immediately into an instruction.
277 If the queued incrementation has not happened yet,
278 protect_from_queue returns the variable itself.
279 If the incrementation has happened, protect_from_queue returns a temp
280 that contains a copy of the old value of the variable.
281
282 Any time an rtx which might possibly be a QUEUED is to be put
283 into an instruction, it must be passed through protect_from_queue first.
284 QUEUED expressions are not meaningful in instructions.
285
286 Do not pass a value through protect_from_queue and then hold
287 on to it for a while before putting it in an instruction!
288 If the queue is flushed in between, incorrect code will result. */
289
290rtx
291protect_from_queue (x, modify)
292 register rtx x;
293 int modify;
294{
295 register RTX_CODE code = GET_CODE (x);
296
297#if 0 /* A QUEUED can hang around after the queue is forced out. */
298 /* Shortcut for most common case. */
299 if (pending_chain == 0)
300 return x;
301#endif
302
303 if (code != QUEUED)
304 {
305 /* A special hack for read access to (MEM (QUEUED ...))
306 to facilitate use of autoincrement.
307 Make a copy of the contents of the memory location
308 rather than a copy of the address, but not
309 if the value is of mode BLKmode. */
310 if (code == MEM && GET_MODE (x) != BLKmode
311 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
312 {
313 register rtx y = XEXP (x, 0);
314 XEXP (x, 0) = QUEUED_VAR (y);
315 if (QUEUED_INSN (y))
316 {
317 register rtx temp = gen_reg_rtx (GET_MODE (x));
318 emit_insn_before (gen_move_insn (temp, x),
319 QUEUED_INSN (y));
320 return temp;
321 }
322 return x;
323 }
324 /* Otherwise, recursively protect the subexpressions of all
325 the kinds of rtx's that can contain a QUEUED. */
326 if (code == MEM)
327 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
328 else if (code == PLUS || code == MULT)
329 {
330 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
331 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
332 }
333 return x;
334 }
335 /* If the increment has not happened, use the variable itself. */
336 if (QUEUED_INSN (x) == 0)
337 return QUEUED_VAR (x);
338 /* If the increment has happened and a pre-increment copy exists,
339 use that copy. */
340 if (QUEUED_COPY (x) != 0)
341 return QUEUED_COPY (x);
342 /* The increment has happened but we haven't set up a pre-increment copy.
343 Set one up now, and use it. */
344 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
345 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
346 QUEUED_INSN (x));
347 return QUEUED_COPY (x);
348}
349
350/* Return nonzero if X contains a QUEUED expression:
351 if it contains anything that will be altered by a queued increment.
352 We handle only combinations of MEM, PLUS, MINUS and MULT operators
353 since memory addresses generally contain only those. */
354
355static int
356queued_subexp_p (x)
357 rtx x;
358{
359 register enum rtx_code code = GET_CODE (x);
360 switch (code)
361 {
362 case QUEUED:
363 return 1;
364 case MEM:
365 return queued_subexp_p (XEXP (x, 0));
366 case MULT:
367 case PLUS:
368 case MINUS:
369 return queued_subexp_p (XEXP (x, 0))
370 || queued_subexp_p (XEXP (x, 1));
371 }
372 return 0;
373}
374
375/* Perform all the pending incrementations. */
376
377void
378emit_queue ()
379{
380 register rtx p;
381 while (p = pending_chain)
382 {
383 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
384 pending_chain = QUEUED_NEXT (p);
385 }
386}
387
388static void
389init_queue ()
390{
391 if (pending_chain)
392 abort ();
393}
394\f
395/* Copy data from FROM to TO, where the machine modes are not the same.
396 Both modes may be integer, or both may be floating.
397 UNSIGNEDP should be nonzero if FROM is an unsigned type.
398 This causes zero-extension instead of sign-extension. */
399
400void
401convert_move (to, from, unsignedp)
402 register rtx to, from;
403 int unsignedp;
404{
405 enum machine_mode to_mode = GET_MODE (to);
406 enum machine_mode from_mode = GET_MODE (from);
407 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
408 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
409 enum insn_code code;
410 rtx libcall;
411
412 /* rtx code for making an equivalent value. */
413 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
414
415 to = protect_from_queue (to, 1);
416 from = protect_from_queue (from, 0);
417
418 if (to_real != from_real)
419 abort ();
420
421 if (to_mode == from_mode
422 || (from_mode == VOIDmode && CONSTANT_P (from)))
423 {
424 emit_move_insn (to, from);
425 return;
426 }
427
428 if (to_real)
429 {
430#ifdef HAVE_extendsfdf2
431 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
432 {
433 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
434 return;
435 }
436#endif
b092b471
JW
437#ifdef HAVE_extendsfxf2
438 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
439 {
440 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
441 return;
442 }
443#endif
bbf6f052
RK
444#ifdef HAVE_extendsftf2
445 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
446 {
447 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
448 return;
449 }
450#endif
b092b471
JW
451#ifdef HAVE_extenddfxf2
452 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
453 {
454 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
455 return;
456 }
457#endif
bbf6f052
RK
458#ifdef HAVE_extenddftf2
459 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
460 {
461 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
462 return;
463 }
464#endif
465#ifdef HAVE_truncdfsf2
466 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
467 {
468 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
469 return;
470 }
471#endif
b092b471
JW
472#ifdef HAVE_truncxfsf2
473 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
474 {
475 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
476 return;
477 }
478#endif
bbf6f052
RK
479#ifdef HAVE_trunctfsf2
480 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
481 {
482 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
483 return;
484 }
485#endif
b092b471
JW
486#ifdef HAVE_truncxfdf2
487 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
488 {
489 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
490 return;
491 }
492#endif
bbf6f052
RK
493#ifdef HAVE_trunctfdf2
494 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
495 {
496 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
497 return;
498 }
499#endif
500
b092b471
JW
501 libcall = (rtx) 0;
502 switch (from_mode)
503 {
504 case SFmode:
505 switch (to_mode)
506 {
507 case DFmode:
508 libcall = extendsfdf2_libfunc;
509 break;
510
511 case XFmode:
512 libcall = extendsfxf2_libfunc;
513 break;
514
515 case TFmode:
516 libcall = extendsftf2_libfunc;
517 break;
518 }
519 break;
520
521 case DFmode:
522 switch (to_mode)
523 {
524 case SFmode:
525 libcall = truncdfsf2_libfunc;
526 break;
527
528 case XFmode:
529 libcall = extenddfxf2_libfunc;
530 break;
531
532 case TFmode:
533 libcall = extenddftf2_libfunc;
534 break;
535 }
536 break;
537
538 case XFmode:
539 switch (to_mode)
540 {
541 case SFmode:
542 libcall = truncxfsf2_libfunc;
543 break;
544
545 case DFmode:
546 libcall = truncxfdf2_libfunc;
547 break;
548 }
549 break;
550
551 case TFmode:
552 switch (to_mode)
553 {
554 case SFmode:
555 libcall = trunctfsf2_libfunc;
556 break;
557
558 case DFmode:
559 libcall = trunctfdf2_libfunc;
560 break;
561 }
562 break;
563 }
564
565 if (libcall == (rtx) 0)
566 /* This conversion is not implemented yet. */
bbf6f052
RK
567 abort ();
568
e87b4f3f 569 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
570 emit_move_insn (to, hard_libcall_value (to_mode));
571 return;
572 }
573
574 /* Now both modes are integers. */
575
576 /* Handle expanding beyond a word. */
577 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
578 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
579 {
580 rtx insns;
581 rtx lowpart;
582 rtx fill_value;
583 rtx lowfrom;
584 int i;
585 enum machine_mode lowpart_mode;
586 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
587
588 /* Try converting directly if the insn is supported. */
589 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
590 != CODE_FOR_nothing)
591 {
cd1b4b44
RK
592 /* If FROM is a SUBREG, put it into a register. Do this
593 so that we always generate the same set of insns for
594 better cse'ing; if an intermediate assignment occurred,
595 we won't be doing the operation directly on the SUBREG. */
596 if (optimize > 0 && GET_CODE (from) == SUBREG)
597 from = force_reg (from_mode, from);
bbf6f052
RK
598 emit_unop_insn (code, to, from, equiv_code);
599 return;
600 }
601 /* Next, try converting via full word. */
602 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
603 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
604 != CODE_FOR_nothing))
605 {
606 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
607 emit_unop_insn (code, to,
608 gen_lowpart (word_mode, to), equiv_code);
609 return;
610 }
611
612 /* No special multiword conversion insn; do it by hand. */
613 start_sequence ();
614
615 /* Get a copy of FROM widened to a word, if necessary. */
616 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
617 lowpart_mode = word_mode;
618 else
619 lowpart_mode = from_mode;
620
621 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
622
623 lowpart = gen_lowpart (lowpart_mode, to);
624 emit_move_insn (lowpart, lowfrom);
625
626 /* Compute the value to put in each remaining word. */
627 if (unsignedp)
628 fill_value = const0_rtx;
629 else
630 {
631#ifdef HAVE_slt
632 if (HAVE_slt
633 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
634 && STORE_FLAG_VALUE == -1)
635 {
906c4e36
RK
636 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
637 lowpart_mode, 0, 0);
bbf6f052
RK
638 fill_value = gen_reg_rtx (word_mode);
639 emit_insn (gen_slt (fill_value));
640 }
641 else
642#endif
643 {
644 fill_value
645 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
646 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 647 NULL_RTX, 0);
bbf6f052
RK
648 fill_value = convert_to_mode (word_mode, fill_value, 1);
649 }
650 }
651
652 /* Fill the remaining words. */
653 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
654 {
655 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
656 rtx subword = operand_subword (to, index, 1, to_mode);
657
658 if (subword == 0)
659 abort ();
660
661 if (fill_value != subword)
662 emit_move_insn (subword, fill_value);
663 }
664
665 insns = get_insns ();
666 end_sequence ();
667
906c4e36 668 emit_no_conflict_block (insns, to, from, NULL_RTX,
bbf6f052
RK
669 gen_rtx (equiv_code, to_mode, from));
670 return;
671 }
672
673 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
674 {
675 convert_move (to, gen_lowpart (word_mode, from), 0);
676 return;
677 }
678
679 /* Handle pointer conversion */ /* SPEE 900220 */
680 if (to_mode == PSImode)
681 {
682 if (from_mode != SImode)
683 from = convert_to_mode (SImode, from, unsignedp);
684
685#ifdef HAVE_truncsipsi
686 if (HAVE_truncsipsi)
687 {
688 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
689 return;
690 }
691#endif /* HAVE_truncsipsi */
692 abort ();
693 }
694
695 if (from_mode == PSImode)
696 {
697 if (to_mode != SImode)
698 {
699 from = convert_to_mode (SImode, from, unsignedp);
700 from_mode = SImode;
701 }
702 else
703 {
704#ifdef HAVE_extendpsisi
705 if (HAVE_extendpsisi)
706 {
707 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
708 return;
709 }
710#endif /* HAVE_extendpsisi */
711 abort ();
712 }
713 }
714
715 /* Now follow all the conversions between integers
716 no more than a word long. */
717
718 /* For truncation, usually we can just refer to FROM in a narrower mode. */
719 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
720 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
721 GET_MODE_BITSIZE (from_mode))
722 && ((GET_CODE (from) == MEM
723 && ! MEM_VOLATILE_P (from)
4fa52007 724 && direct_load[(int) to_mode]
bbf6f052
RK
725 && ! mode_dependent_address_p (XEXP (from, 0)))
726 || GET_CODE (from) == REG
727 || GET_CODE (from) == SUBREG))
728 {
729 emit_move_insn (to, gen_lowpart (to_mode, from));
730 return;
731 }
732
733 /* For truncation, usually we can just refer to FROM in a narrower mode. */
734 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
735 {
736 /* Convert directly if that works. */
737 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
738 != CODE_FOR_nothing)
739 {
3dc4195c
RK
740 /* If FROM is a SUBREG, put it into a register. Do this
741 so that we always generate the same set of insns for
742 better cse'ing; if an intermediate assignment occurred,
743 we won't be doing the operation directly on the SUBREG. */
744 if (optimize > 0 && GET_CODE (from) == SUBREG)
745 from = force_reg (from_mode, from);
bbf6f052
RK
746 emit_unop_insn (code, to, from, equiv_code);
747 return;
748 }
749 else
750 {
751 enum machine_mode intermediate;
752
753 /* Search for a mode to convert via. */
754 for (intermediate = from_mode; intermediate != VOIDmode;
755 intermediate = GET_MODE_WIDER_MODE (intermediate))
756 if ((can_extend_p (to_mode, intermediate, unsignedp)
757 != CODE_FOR_nothing)
758 && (can_extend_p (intermediate, from_mode, unsignedp)
759 != CODE_FOR_nothing))
760 {
761 convert_move (to, convert_to_mode (intermediate, from,
762 unsignedp), unsignedp);
763 return;
764 }
765
766 /* No suitable intermediate mode. */
767 abort ();
768 }
769 }
770
771 /* Support special truncate insns for certain modes. */
772
773 if (from_mode == DImode && to_mode == SImode)
774 {
775#ifdef HAVE_truncdisi2
776 if (HAVE_truncdisi2)
777 {
778 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
779 return;
780 }
781#endif
782 convert_move (to, force_reg (from_mode, from), unsignedp);
783 return;
784 }
785
786 if (from_mode == DImode && to_mode == HImode)
787 {
788#ifdef HAVE_truncdihi2
789 if (HAVE_truncdihi2)
790 {
791 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
792 return;
793 }
794#endif
795 convert_move (to, force_reg (from_mode, from), unsignedp);
796 return;
797 }
798
799 if (from_mode == DImode && to_mode == QImode)
800 {
801#ifdef HAVE_truncdiqi2
802 if (HAVE_truncdiqi2)
803 {
804 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
805 return;
806 }
807#endif
808 convert_move (to, force_reg (from_mode, from), unsignedp);
809 return;
810 }
811
812 if (from_mode == SImode && to_mode == HImode)
813 {
814#ifdef HAVE_truncsihi2
815 if (HAVE_truncsihi2)
816 {
817 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
818 return;
819 }
820#endif
821 convert_move (to, force_reg (from_mode, from), unsignedp);
822 return;
823 }
824
825 if (from_mode == SImode && to_mode == QImode)
826 {
827#ifdef HAVE_truncsiqi2
828 if (HAVE_truncsiqi2)
829 {
830 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
831 return;
832 }
833#endif
834 convert_move (to, force_reg (from_mode, from), unsignedp);
835 return;
836 }
837
838 if (from_mode == HImode && to_mode == QImode)
839 {
840#ifdef HAVE_trunchiqi2
841 if (HAVE_trunchiqi2)
842 {
843 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
844 return;
845 }
846#endif
847 convert_move (to, force_reg (from_mode, from), unsignedp);
848 return;
849 }
850
851 /* Handle truncation of volatile memrefs, and so on;
852 the things that couldn't be truncated directly,
853 and for which there was no special instruction. */
854 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
855 {
856 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
857 emit_move_insn (to, temp);
858 return;
859 }
860
861 /* Mode combination is not recognized. */
862 abort ();
863}
864
865/* Return an rtx for a value that would result
866 from converting X to mode MODE.
867 Both X and MODE may be floating, or both integer.
868 UNSIGNEDP is nonzero if X is an unsigned value.
869 This can be done by referring to a part of X in place
5d901c31
RS
870 or by copying to a new temporary with conversion.
871
872 This function *must not* call protect_from_queue
873 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
874
875rtx
876convert_to_mode (mode, x, unsignedp)
877 enum machine_mode mode;
878 rtx x;
879 int unsignedp;
880{
881 register rtx temp;
882
bbf6f052
RK
883 if (mode == GET_MODE (x))
884 return x;
885
886 /* There is one case that we must handle specially: If we are converting
906c4e36 887 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
888 we are to interpret the constant as unsigned, gen_lowpart will do
889 the wrong if the constant appears negative. What we want to do is
890 make the high-order word of the constant zero, not all ones. */
891
892 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 893 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 894 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 895 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
896
897 /* We can do this with a gen_lowpart if both desired and current modes
898 are integer, and this is either a constant integer, a register, or a
899 non-volatile MEM. Except for the constant case, we must be narrowing
900 the operand. */
901
902 if (GET_CODE (x) == CONST_INT
903 || (GET_MODE_CLASS (mode) == MODE_INT
904 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
905 && (GET_CODE (x) == CONST_DOUBLE
906 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
907 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 908 && direct_load[(int) mode]
bbf6f052
RK
909 || GET_CODE (x) == REG)))))
910 return gen_lowpart (mode, x);
911
912 temp = gen_reg_rtx (mode);
913 convert_move (temp, x, unsignedp);
914 return temp;
915}
916\f
917/* Generate several move instructions to copy LEN bytes
918 from block FROM to block TO. (These are MEM rtx's with BLKmode).
919 The caller must pass FROM and TO
920 through protect_from_queue before calling.
921 ALIGN (in bytes) is maximum alignment we can assume. */
922
923struct move_by_pieces
924{
925 rtx to;
926 rtx to_addr;
927 int autinc_to;
928 int explicit_inc_to;
929 rtx from;
930 rtx from_addr;
931 int autinc_from;
932 int explicit_inc_from;
933 int len;
934 int offset;
935 int reverse;
936};
937
938static void move_by_pieces_1 ();
939static int move_by_pieces_ninsns ();
940
941static void
942move_by_pieces (to, from, len, align)
943 rtx to, from;
944 int len, align;
945{
946 struct move_by_pieces data;
947 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 948 int max_size = MOVE_MAX + 1;
bbf6f052
RK
949
950 data.offset = 0;
951 data.to_addr = to_addr;
952 data.from_addr = from_addr;
953 data.to = to;
954 data.from = from;
955 data.autinc_to
956 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
957 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
958 data.autinc_from
959 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
960 || GET_CODE (from_addr) == POST_INC
961 || GET_CODE (from_addr) == POST_DEC);
962
963 data.explicit_inc_from = 0;
964 data.explicit_inc_to = 0;
965 data.reverse
966 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
967 if (data.reverse) data.offset = len;
968 data.len = len;
969
970 /* If copying requires more than two move insns,
971 copy addresses to registers (to make displacements shorter)
972 and use post-increment if available. */
973 if (!(data.autinc_from && data.autinc_to)
974 && move_by_pieces_ninsns (len, align) > 2)
975 {
976#ifdef HAVE_PRE_DECREMENT
977 if (data.reverse && ! data.autinc_from)
978 {
979 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
980 data.autinc_from = 1;
981 data.explicit_inc_from = -1;
982 }
983#endif
984#ifdef HAVE_POST_INCREMENT
985 if (! data.autinc_from)
986 {
987 data.from_addr = copy_addr_to_reg (from_addr);
988 data.autinc_from = 1;
989 data.explicit_inc_from = 1;
990 }
991#endif
992 if (!data.autinc_from && CONSTANT_P (from_addr))
993 data.from_addr = copy_addr_to_reg (from_addr);
994#ifdef HAVE_PRE_DECREMENT
995 if (data.reverse && ! data.autinc_to)
996 {
997 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
998 data.autinc_to = 1;
999 data.explicit_inc_to = -1;
1000 }
1001#endif
1002#ifdef HAVE_POST_INCREMENT
1003 if (! data.reverse && ! data.autinc_to)
1004 {
1005 data.to_addr = copy_addr_to_reg (to_addr);
1006 data.autinc_to = 1;
1007 data.explicit_inc_to = 1;
1008 }
1009#endif
1010 if (!data.autinc_to && CONSTANT_P (to_addr))
1011 data.to_addr = copy_addr_to_reg (to_addr);
1012 }
1013
e87b4f3f
RS
1014 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1015 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1016 align = MOVE_MAX;
bbf6f052
RK
1017
1018 /* First move what we can in the largest integer mode, then go to
1019 successively smaller modes. */
1020
1021 while (max_size > 1)
1022 {
1023 enum machine_mode mode = VOIDmode, tmode;
1024 enum insn_code icode;
1025
e7c33f54
RK
1026 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1027 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1028 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1029 mode = tmode;
1030
1031 if (mode == VOIDmode)
1032 break;
1033
1034 icode = mov_optab->handlers[(int) mode].insn_code;
1035 if (icode != CODE_FOR_nothing
1036 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1037 GET_MODE_SIZE (mode)))
1038 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1039
1040 max_size = GET_MODE_SIZE (mode);
1041 }
1042
1043 /* The code above should have handled everything. */
1044 if (data.len != 0)
1045 abort ();
1046}
1047
1048/* Return number of insns required to move L bytes by pieces.
1049 ALIGN (in bytes) is maximum alignment we can assume. */
1050
1051static int
1052move_by_pieces_ninsns (l, align)
1053 unsigned int l;
1054 int align;
1055{
1056 register int n_insns = 0;
e87b4f3f 1057 int max_size = MOVE_MAX + 1;
bbf6f052 1058
e87b4f3f
RS
1059 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1060 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1061 align = MOVE_MAX;
bbf6f052
RK
1062
1063 while (max_size > 1)
1064 {
1065 enum machine_mode mode = VOIDmode, tmode;
1066 enum insn_code icode;
1067
e7c33f54
RK
1068 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1069 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1070 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1071 mode = tmode;
1072
1073 if (mode == VOIDmode)
1074 break;
1075
1076 icode = mov_optab->handlers[(int) mode].insn_code;
1077 if (icode != CODE_FOR_nothing
1078 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1079 GET_MODE_SIZE (mode)))
1080 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1081
1082 max_size = GET_MODE_SIZE (mode);
1083 }
1084
1085 return n_insns;
1086}
1087
1088/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1089 with move instructions for mode MODE. GENFUN is the gen_... function
1090 to make a move insn for that mode. DATA has all the other info. */
1091
1092static void
1093move_by_pieces_1 (genfun, mode, data)
1094 rtx (*genfun) ();
1095 enum machine_mode mode;
1096 struct move_by_pieces *data;
1097{
1098 register int size = GET_MODE_SIZE (mode);
1099 register rtx to1, from1;
1100
1101 while (data->len >= size)
1102 {
1103 if (data->reverse) data->offset -= size;
1104
1105 to1 = (data->autinc_to
1106 ? gen_rtx (MEM, mode, data->to_addr)
1107 : change_address (data->to, mode,
1108 plus_constant (data->to_addr, data->offset)));
1109 from1 =
1110 (data->autinc_from
1111 ? gen_rtx (MEM, mode, data->from_addr)
1112 : change_address (data->from, mode,
1113 plus_constant (data->from_addr, data->offset)));
1114
1115#ifdef HAVE_PRE_DECREMENT
1116 if (data->explicit_inc_to < 0)
906c4e36 1117 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1118 if (data->explicit_inc_from < 0)
906c4e36 1119 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1120#endif
1121
1122 emit_insn ((*genfun) (to1, from1));
1123#ifdef HAVE_POST_INCREMENT
1124 if (data->explicit_inc_to > 0)
906c4e36 1125 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1126 if (data->explicit_inc_from > 0)
906c4e36 1127 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1128#endif
1129
1130 if (! data->reverse) data->offset += size;
1131
1132 data->len -= size;
1133 }
1134}
1135\f
1136/* Emit code to move a block Y to a block X.
1137 This may be done with string-move instructions,
1138 with multiple scalar move instructions, or with a library call.
1139
1140 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1141 with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have,
1144 measured in bytes. */
1145
1146void
1147emit_block_move (x, y, size, align)
1148 rtx x, y;
1149 rtx size;
1150 int align;
1151{
1152 if (GET_MODE (x) != BLKmode)
1153 abort ();
1154
1155 if (GET_MODE (y) != BLKmode)
1156 abort ();
1157
1158 x = protect_from_queue (x, 1);
1159 y = protect_from_queue (y, 0);
5d901c31 1160 size = protect_from_queue (size, 0);
bbf6f052
RK
1161
1162 if (GET_CODE (x) != MEM)
1163 abort ();
1164 if (GET_CODE (y) != MEM)
1165 abort ();
1166 if (size == 0)
1167 abort ();
1168
1169 if (GET_CODE (size) == CONST_INT
906c4e36 1170 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1171 move_by_pieces (x, y, INTVAL (size), align);
1172 else
1173 {
1174 /* Try the most limited insn first, because there's no point
1175 including more than one in the machine description unless
1176 the more limited one has some advantage. */
266007a7 1177
0bba3f6f 1178 rtx opalign = GEN_INT (align);
266007a7
RK
1179 enum machine_mode mode;
1180
1181 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1182 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1183 {
266007a7 1184 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1185
1186 if (code != CODE_FOR_nothing
0bba3f6f 1187 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
266007a7 1188 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1189 && (insn_operand_predicate[(int) code][0] == 0
1190 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1191 && (insn_operand_predicate[(int) code][1] == 0
1192 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1193 && (insn_operand_predicate[(int) code][3] == 0
1194 || (*insn_operand_predicate[(int) code][3]) (opalign,
1195 VOIDmode)))
bbf6f052 1196 {
266007a7
RK
1197 rtx op2 = size;
1198 rtx last = get_last_insn ();
1199 rtx pat;
1200
0bba3f6f
RK
1201 if (insn_operand_predicate[(int) code][2] != 0
1202 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1203 op2 = copy_to_mode_reg (mode, op2);
1204
1205 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1206 if (pat)
1207 {
1208 emit_insn (pat);
1209 return;
1210 }
1211 else
1212 delete_insns_since (last);
bbf6f052
RK
1213 }
1214 }
bbf6f052
RK
1215
1216#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1217 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1218 VOIDmode, 3, XEXP (x, 0), Pmode,
1219 XEXP (y, 0), Pmode,
5a2724d7 1220 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052 1221#else
e87b4f3f 1222 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1223 VOIDmode, 3, XEXP (y, 0), Pmode,
1224 XEXP (x, 0), Pmode,
5a2724d7 1225 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052
RK
1226#endif
1227 }
1228}
1229\f
1230/* Copy all or part of a value X into registers starting at REGNO.
1231 The number of registers to be filled is NREGS. */
1232
1233void
1234move_block_to_reg (regno, x, nregs, mode)
1235 int regno;
1236 rtx x;
1237 int nregs;
1238 enum machine_mode mode;
1239{
1240 int i;
1241 rtx pat, last;
1242
1243 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1244 x = validize_mem (force_const_mem (mode, x));
1245
1246 /* See if the machine can do this with a load multiple insn. */
1247#ifdef HAVE_load_multiple
1248 last = get_last_insn ();
1249 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1250 GEN_INT (nregs));
bbf6f052
RK
1251 if (pat)
1252 {
1253 emit_insn (pat);
1254 return;
1255 }
1256 else
1257 delete_insns_since (last);
1258#endif
1259
1260 for (i = 0; i < nregs; i++)
1261 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1262 operand_subword_force (x, i, mode));
1263}
1264
1265/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1266 The number of registers to be filled is NREGS. */
1267
1268void
1269move_block_from_reg (regno, x, nregs)
1270 int regno;
1271 rtx x;
1272 int nregs;
1273{
1274 int i;
1275 rtx pat, last;
1276
1277 /* See if the machine can do this with a store multiple insn. */
1278#ifdef HAVE_store_multiple
1279 last = get_last_insn ();
1280 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1281 GEN_INT (nregs));
bbf6f052
RK
1282 if (pat)
1283 {
1284 emit_insn (pat);
1285 return;
1286 }
1287 else
1288 delete_insns_since (last);
1289#endif
1290
1291 for (i = 0; i < nregs; i++)
1292 {
1293 rtx tem = operand_subword (x, i, 1, BLKmode);
1294
1295 if (tem == 0)
1296 abort ();
1297
1298 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1299 }
1300}
1301
1302/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1303
1304void
1305use_regs (regno, nregs)
1306 int regno;
1307 int nregs;
1308{
1309 int i;
1310
1311 for (i = 0; i < nregs; i++)
1312 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1313}
1314\f
1315/* Write zeros through the storage of OBJECT.
1316 If OBJECT has BLKmode, SIZE is its length in bytes. */
1317
1318void
1319clear_storage (object, size)
1320 rtx object;
1321 int size;
1322{
1323 if (GET_MODE (object) == BLKmode)
1324 {
1325#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1326 emit_library_call (memset_libfunc, 1,
bbf6f052
RK
1327 VOIDmode, 3,
1328 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1329 GEN_INT (size), Pmode);
bbf6f052 1330#else
e87b4f3f 1331 emit_library_call (bzero_libfunc, 1,
bbf6f052
RK
1332 VOIDmode, 2,
1333 XEXP (object, 0), Pmode,
906c4e36 1334 GEN_INT (size), Pmode);
bbf6f052
RK
1335#endif
1336 }
1337 else
1338 emit_move_insn (object, const0_rtx);
1339}
1340
1341/* Generate code to copy Y into X.
1342 Both Y and X must have the same mode, except that
1343 Y can be a constant with VOIDmode.
1344 This mode cannot be BLKmode; use emit_block_move for that.
1345
1346 Return the last instruction emitted. */
1347
1348rtx
1349emit_move_insn (x, y)
1350 rtx x, y;
1351{
1352 enum machine_mode mode = GET_MODE (x);
1353 int i;
1354
1355 x = protect_from_queue (x, 1);
1356 y = protect_from_queue (y, 0);
1357
1358 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1359 abort ();
1360
1361 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1362 y = force_const_mem (mode, y);
1363
1364 /* If X or Y are memory references, verify that their addresses are valid
1365 for the machine. */
1366 if (GET_CODE (x) == MEM
1367 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1368 && ! push_operand (x, GET_MODE (x)))
1369 || (flag_force_addr
1370 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1371 x = change_address (x, VOIDmode, XEXP (x, 0));
1372
1373 if (GET_CODE (y) == MEM
1374 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1375 || (flag_force_addr
1376 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1377 y = change_address (y, VOIDmode, XEXP (y, 0));
1378
1379 if (mode == BLKmode)
1380 abort ();
1381
1382 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1383 return
1384 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1385
1386 /* This will handle any multi-word mode that lacks a move_insn pattern.
1387 However, you will get better code if you define such patterns,
1388 even if they must turn into multiple assembler instructions. */
a4320483 1389 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1390 {
1391 rtx last_insn = 0;
1392
1393 for (i = 0;
1394 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1395 i++)
1396 {
1397 rtx xpart = operand_subword (x, i, 1, mode);
1398 rtx ypart = operand_subword (y, i, 1, mode);
1399
1400 /* If we can't get a part of Y, put Y into memory if it is a
1401 constant. Otherwise, force it into a register. If we still
1402 can't get a part of Y, abort. */
1403 if (ypart == 0 && CONSTANT_P (y))
1404 {
1405 y = force_const_mem (mode, y);
1406 ypart = operand_subword (y, i, 1, mode);
1407 }
1408 else if (ypart == 0)
1409 ypart = operand_subword_force (y, i, mode);
1410
1411 if (xpart == 0 || ypart == 0)
1412 abort ();
1413
1414 last_insn = emit_move_insn (xpart, ypart);
1415 }
1416 return last_insn;
1417 }
1418 else
1419 abort ();
1420}
1421\f
1422/* Pushing data onto the stack. */
1423
1424/* Push a block of length SIZE (perhaps variable)
1425 and return an rtx to address the beginning of the block.
1426 Note that it is not possible for the value returned to be a QUEUED.
1427 The value may be virtual_outgoing_args_rtx.
1428
1429 EXTRA is the number of bytes of padding to push in addition to SIZE.
1430 BELOW nonzero means this padding comes at low addresses;
1431 otherwise, the padding comes at high addresses. */
1432
1433rtx
1434push_block (size, extra, below)
1435 rtx size;
1436 int extra, below;
1437{
1438 register rtx temp;
1439 if (CONSTANT_P (size))
1440 anti_adjust_stack (plus_constant (size, extra));
1441 else if (GET_CODE (size) == REG && extra == 0)
1442 anti_adjust_stack (size);
1443 else
1444 {
1445 rtx temp = copy_to_mode_reg (Pmode, size);
1446 if (extra != 0)
906c4e36 1447 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1448 temp, 0, OPTAB_LIB_WIDEN);
1449 anti_adjust_stack (temp);
1450 }
1451
1452#ifdef STACK_GROWS_DOWNWARD
1453 temp = virtual_outgoing_args_rtx;
1454 if (extra != 0 && below)
1455 temp = plus_constant (temp, extra);
1456#else
1457 if (GET_CODE (size) == CONST_INT)
1458 temp = plus_constant (virtual_outgoing_args_rtx,
1459 - INTVAL (size) - (below ? 0 : extra));
1460 else if (extra != 0 && !below)
1461 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1462 negate_rtx (Pmode, plus_constant (size, extra)));
1463 else
1464 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1465 negate_rtx (Pmode, size));
1466#endif
1467
1468 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1469}
1470
1471static rtx
1472gen_push_operand ()
1473{
1474 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1475}
1476
1477/* Generate code to push X onto the stack, assuming it has mode MODE and
1478 type TYPE.
1479 MODE is redundant except when X is a CONST_INT (since they don't
1480 carry mode info).
1481 SIZE is an rtx for the size of data to be copied (in bytes),
1482 needed only if X is BLKmode.
1483
1484 ALIGN (in bytes) is maximum alignment we can assume.
1485
1486 If PARTIAL is nonzero, then copy that many of the first words
1487 of X into registers starting with REG, and push the rest of X.
1488 The amount of space pushed is decreased by PARTIAL words,
1489 rounded *down* to a multiple of PARM_BOUNDARY.
1490 REG must be a hard register in this case.
1491
1492 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1493 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1494
1495 On a machine that lacks real push insns, ARGS_ADDR is the address of
1496 the bottom of the argument block for this call. We use indexing off there
1497 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1498 argument block has not been preallocated.
1499
1500 ARGS_SO_FAR is the size of args previously pushed for this call. */
1501
1502void
1503emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1504 args_addr, args_so_far)
1505 register rtx x;
1506 enum machine_mode mode;
1507 tree type;
1508 rtx size;
1509 int align;
1510 int partial;
1511 rtx reg;
1512 int extra;
1513 rtx args_addr;
1514 rtx args_so_far;
1515{
1516 rtx xinner;
1517 enum direction stack_direction
1518#ifdef STACK_GROWS_DOWNWARD
1519 = downward;
1520#else
1521 = upward;
1522#endif
1523
1524 /* Decide where to pad the argument: `downward' for below,
1525 `upward' for above, or `none' for don't pad it.
1526 Default is below for small data on big-endian machines; else above. */
1527 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1528
1529 /* Invert direction if stack is post-update. */
1530 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1531 if (where_pad != none)
1532 where_pad = (where_pad == downward ? upward : downward);
1533
1534 xinner = x = protect_from_queue (x, 0);
1535
1536 if (mode == BLKmode)
1537 {
1538 /* Copy a block into the stack, entirely or partially. */
1539
1540 register rtx temp;
1541 int used = partial * UNITS_PER_WORD;
1542 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1543 int skip;
1544
1545 if (size == 0)
1546 abort ();
1547
1548 used -= offset;
1549
1550 /* USED is now the # of bytes we need not copy to the stack
1551 because registers will take care of them. */
1552
1553 if (partial != 0)
1554 xinner = change_address (xinner, BLKmode,
1555 plus_constant (XEXP (xinner, 0), used));
1556
1557 /* If the partial register-part of the arg counts in its stack size,
1558 skip the part of stack space corresponding to the registers.
1559 Otherwise, start copying to the beginning of the stack space,
1560 by setting SKIP to 0. */
1561#ifndef REG_PARM_STACK_SPACE
1562 skip = 0;
1563#else
1564 skip = used;
1565#endif
1566
1567#ifdef PUSH_ROUNDING
1568 /* Do it with several push insns if that doesn't take lots of insns
1569 and if there is no difficulty with push insns that skip bytes
1570 on the stack for alignment purposes. */
1571 if (args_addr == 0
1572 && GET_CODE (size) == CONST_INT
1573 && skip == 0
1574 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1575 < MOVE_RATIO)
bbf6f052
RK
1576 /* Here we avoid the case of a structure whose weak alignment
1577 forces many pushes of a small amount of data,
1578 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1579 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1580 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1581 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1582 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1583 {
1584 /* Push padding now if padding above and stack grows down,
1585 or if padding below and stack grows up.
1586 But if space already allocated, this has already been done. */
1587 if (extra && args_addr == 0
1588 && where_pad != none && where_pad != stack_direction)
906c4e36 1589 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1590
1591 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1592 INTVAL (size) - used, align);
1593 }
1594 else
1595#endif /* PUSH_ROUNDING */
1596 {
1597 /* Otherwise make space on the stack and copy the data
1598 to the address of that space. */
1599
1600 /* Deduct words put into registers from the size we must copy. */
1601 if (partial != 0)
1602 {
1603 if (GET_CODE (size) == CONST_INT)
906c4e36 1604 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1605 else
1606 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1607 GEN_INT (used), NULL_RTX, 0,
1608 OPTAB_LIB_WIDEN);
bbf6f052
RK
1609 }
1610
1611 /* Get the address of the stack space.
1612 In this case, we do not deal with EXTRA separately.
1613 A single stack adjust will do. */
1614 if (! args_addr)
1615 {
1616 temp = push_block (size, extra, where_pad == downward);
1617 extra = 0;
1618 }
1619 else if (GET_CODE (args_so_far) == CONST_INT)
1620 temp = memory_address (BLKmode,
1621 plus_constant (args_addr,
1622 skip + INTVAL (args_so_far)));
1623 else
1624 temp = memory_address (BLKmode,
1625 plus_constant (gen_rtx (PLUS, Pmode,
1626 args_addr, args_so_far),
1627 skip));
1628
1629 /* TEMP is the address of the block. Copy the data there. */
1630 if (GET_CODE (size) == CONST_INT
1631 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1632 < MOVE_RATIO))
1633 {
1634 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1635 INTVAL (size), align);
1636 goto ret;
1637 }
1638 /* Try the most limited insn first, because there's no point
1639 including more than one in the machine description unless
1640 the more limited one has some advantage. */
1641#ifdef HAVE_movstrqi
1642 if (HAVE_movstrqi
1643 && GET_CODE (size) == CONST_INT
1644 && ((unsigned) INTVAL (size)
1645 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1646 {
1647 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1648 xinner, size, GEN_INT (align)));
bbf6f052
RK
1649 goto ret;
1650 }
1651#endif
1652#ifdef HAVE_movstrhi
1653 if (HAVE_movstrhi
1654 && GET_CODE (size) == CONST_INT
1655 && ((unsigned) INTVAL (size)
1656 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1657 {
1658 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1659 xinner, size, GEN_INT (align)));
bbf6f052
RK
1660 goto ret;
1661 }
1662#endif
1663#ifdef HAVE_movstrsi
1664 if (HAVE_movstrsi)
1665 {
1666 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1667 xinner, size, GEN_INT (align)));
bbf6f052
RK
1668 goto ret;
1669 }
1670#endif
1671#ifdef HAVE_movstrdi
1672 if (HAVE_movstrdi)
1673 {
1674 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1675 xinner, size, GEN_INT (align)));
bbf6f052
RK
1676 goto ret;
1677 }
1678#endif
1679
1680#ifndef ACCUMULATE_OUTGOING_ARGS
1681 /* If the source is referenced relative to the stack pointer,
1682 copy it to another register to stabilize it. We do not need
1683 to do this if we know that we won't be changing sp. */
1684
1685 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1686 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1687 temp = copy_to_reg (temp);
1688#endif
1689
1690 /* Make inhibit_defer_pop nonzero around the library call
1691 to force it to pop the bcopy-arguments right away. */
1692 NO_DEFER_POP;
1693#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1694 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1695 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1696 size, Pmode);
1697#else
e87b4f3f 1698 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1699 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1700 size, Pmode);
1701#endif
1702 OK_DEFER_POP;
1703 }
1704 }
1705 else if (partial > 0)
1706 {
1707 /* Scalar partly in registers. */
1708
1709 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1710 int i;
1711 int not_stack;
1712 /* # words of start of argument
1713 that we must make space for but need not store. */
1714 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1715 int args_offset = INTVAL (args_so_far);
1716 int skip;
1717
1718 /* Push padding now if padding above and stack grows down,
1719 or if padding below and stack grows up.
1720 But if space already allocated, this has already been done. */
1721 if (extra && args_addr == 0
1722 && where_pad != none && where_pad != stack_direction)
906c4e36 1723 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1724
1725 /* If we make space by pushing it, we might as well push
1726 the real data. Otherwise, we can leave OFFSET nonzero
1727 and leave the space uninitialized. */
1728 if (args_addr == 0)
1729 offset = 0;
1730
1731 /* Now NOT_STACK gets the number of words that we don't need to
1732 allocate on the stack. */
1733 not_stack = partial - offset;
1734
1735 /* If the partial register-part of the arg counts in its stack size,
1736 skip the part of stack space corresponding to the registers.
1737 Otherwise, start copying to the beginning of the stack space,
1738 by setting SKIP to 0. */
1739#ifndef REG_PARM_STACK_SPACE
1740 skip = 0;
1741#else
1742 skip = not_stack;
1743#endif
1744
1745 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1746 x = validize_mem (force_const_mem (mode, x));
1747
1748 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1749 SUBREGs of such registers are not allowed. */
1750 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1751 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1752 x = copy_to_reg (x);
1753
1754 /* Loop over all the words allocated on the stack for this arg. */
1755 /* We can do it by words, because any scalar bigger than a word
1756 has a size a multiple of a word. */
1757#ifndef PUSH_ARGS_REVERSED
1758 for (i = not_stack; i < size; i++)
1759#else
1760 for (i = size - 1; i >= not_stack; i--)
1761#endif
1762 if (i >= not_stack + offset)
1763 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
1764 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1765 0, args_addr,
1766 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
1767 * UNITS_PER_WORD)));
1768 }
1769 else
1770 {
1771 rtx addr;
1772
1773 /* Push padding now if padding above and stack grows down,
1774 or if padding below and stack grows up.
1775 But if space already allocated, this has already been done. */
1776 if (extra && args_addr == 0
1777 && where_pad != none && where_pad != stack_direction)
906c4e36 1778 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1779
1780#ifdef PUSH_ROUNDING
1781 if (args_addr == 0)
1782 addr = gen_push_operand ();
1783 else
1784#endif
1785 if (GET_CODE (args_so_far) == CONST_INT)
1786 addr
1787 = memory_address (mode,
1788 plus_constant (args_addr, INTVAL (args_so_far)));
1789 else
1790 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1791 args_so_far));
1792
1793 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1794 }
1795
1796 ret:
1797 /* If part should go in registers, copy that part
1798 into the appropriate registers. Do this now, at the end,
1799 since mem-to-mem copies above may do function calls. */
1800 if (partial > 0)
1801 move_block_to_reg (REGNO (reg), x, partial, mode);
1802
1803 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 1804 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1805}
1806\f
1807/* Output a library call to function FUN (a SYMBOL_REF rtx)
1808 (emitting the queue unless NO_QUEUE is nonzero),
1809 for a value of mode OUTMODE,
1810 with NARGS different arguments, passed as alternating rtx values
1811 and machine_modes to convert them to.
1812 The rtx values should have been passed through protect_from_queue already.
1813
1814 NO_QUEUE will be true if and only if the library call is a `const' call
1815 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1816 to the variable is_const in expand_call. */
1817
1818void
1819emit_library_call (va_alist)
1820 va_dcl
1821{
1822 va_list p;
1823 struct args_size args_size;
1824 register int argnum;
1825 enum machine_mode outmode;
1826 int nargs;
1827 rtx fun;
1828 rtx orgfun;
1829 int inc;
1830 int count;
1831 rtx argblock = 0;
1832 CUMULATIVE_ARGS args_so_far;
1833 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1834 struct args_size offset; struct args_size size; };
1835 struct arg *argvec;
1836 int old_inhibit_defer_pop = inhibit_defer_pop;
1837 int no_queue = 0;
1838 rtx use_insns;
1839
1840 va_start (p);
1841 orgfun = fun = va_arg (p, rtx);
1842 no_queue = va_arg (p, int);
1843 outmode = va_arg (p, enum machine_mode);
1844 nargs = va_arg (p, int);
1845
1846 /* Copy all the libcall-arguments out of the varargs data
1847 and into a vector ARGVEC.
1848
1849 Compute how to pass each argument. We only support a very small subset
1850 of the full argument passing conventions to limit complexity here since
1851 library functions shouldn't have many args. */
1852
1853 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1854
1855 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1856
1857 args_size.constant = 0;
1858 args_size.var = 0;
1859
1860 for (count = 0; count < nargs; count++)
1861 {
1862 rtx val = va_arg (p, rtx);
1863 enum machine_mode mode = va_arg (p, enum machine_mode);
1864
1865 /* We cannot convert the arg value to the mode the library wants here;
1866 must do it earlier where we know the signedness of the arg. */
1867 if (mode == BLKmode
1868 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1869 abort ();
1870
1871 /* On some machines, there's no way to pass a float to a library fcn.
1872 Pass it as a double instead. */
1873#ifdef LIBGCC_NEEDS_DOUBLE
1874 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1875 val = convert_to_mode (DFmode, val), mode = DFmode;
1876#endif
1877
5d901c31
RS
1878 /* There's no need to call protect_from_queue, because
1879 either emit_move_insn or emit_push_insn will do that. */
1880
bbf6f052
RK
1881 /* Make sure it is a reasonable operand for a move or push insn. */
1882 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1883 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
906c4e36 1884 val = force_operand (val, NULL_RTX);
bbf6f052
RK
1885
1886 argvec[count].value = val;
1887 argvec[count].mode = mode;
1888
1889#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
906c4e36 1890 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
bbf6f052
RK
1891 abort ();
1892#endif
1893
906c4e36 1894 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
1895 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1896 abort ();
1897#ifdef FUNCTION_ARG_PARTIAL_NREGS
1898 argvec[count].partial
906c4e36 1899 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
1900#else
1901 argvec[count].partial = 0;
1902#endif
1903
906c4e36 1904 locate_and_pad_parm (mode, NULL_TREE,
bbf6f052 1905 argvec[count].reg && argvec[count].partial == 0,
906c4e36 1906 NULL_TREE, &args_size, &argvec[count].offset,
bbf6f052
RK
1907 &argvec[count].size);
1908
1909 if (argvec[count].size.var)
1910 abort ();
1911
1912#ifndef REG_PARM_STACK_SPACE
1913 if (argvec[count].partial)
1914 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1915#endif
1916
1917 if (argvec[count].reg == 0 || argvec[count].partial != 0
1918#ifdef REG_PARM_STACK_SPACE
1919 || 1
1920#endif
1921 )
1922 args_size.constant += argvec[count].size.constant;
1923
1924#ifdef ACCUMULATE_OUTGOING_ARGS
1925 /* If this arg is actually passed on the stack, it might be
1926 clobbering something we already put there (this library call might
1927 be inside the evaluation of an argument to a function whose call
1928 requires the stack). This will only occur when the library call
1929 has sufficient args to run out of argument registers. Abort in
1930 this case; if this ever occurs, code must be added to save and
1931 restore the arg slot. */
1932
1933 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1934 abort ();
1935#endif
1936
1937 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1938 }
1939 va_end (p);
1940
1941 /* If this machine requires an external definition for library
1942 functions, write one out. */
1943 assemble_external_libcall (fun);
1944
1945#ifdef STACK_BOUNDARY
1946 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1947 / STACK_BYTES) * STACK_BYTES);
1948#endif
1949
1950#ifdef REG_PARM_STACK_SPACE
1951 args_size.constant = MAX (args_size.constant,
1952 REG_PARM_STACK_SPACE ((tree) 0));
1953#endif
1954
1955#ifdef ACCUMULATE_OUTGOING_ARGS
1956 if (args_size.constant > current_function_outgoing_args_size)
1957 current_function_outgoing_args_size = args_size.constant;
1958 args_size.constant = 0;
1959#endif
1960
1961#ifndef PUSH_ROUNDING
906c4e36 1962 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
bbf6f052
RK
1963#endif
1964
1965#ifdef PUSH_ARGS_REVERSED
1966 inc = -1;
1967 argnum = nargs - 1;
1968#else
1969 inc = 1;
1970 argnum = 0;
1971#endif
1972
1973 /* Push the args that need to be pushed. */
1974
1975 for (count = 0; count < nargs; count++, argnum += inc)
1976 {
1977 register enum machine_mode mode = argvec[argnum].mode;
1978 register rtx val = argvec[argnum].value;
1979 rtx reg = argvec[argnum].reg;
1980 int partial = argvec[argnum].partial;
1981
1982 if (! (reg != 0 && partial == 0))
906c4e36
RK
1983 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1984 argblock, GEN_INT (argvec[count].offset.constant));
bbf6f052
RK
1985 NO_DEFER_POP;
1986 }
1987
1988#ifdef PUSH_ARGS_REVERSED
1989 argnum = nargs - 1;
1990#else
1991 argnum = 0;
1992#endif
1993
1994 /* Now load any reg parms into their regs. */
1995
1996 for (count = 0; count < nargs; count++, argnum += inc)
1997 {
1998 register enum machine_mode mode = argvec[argnum].mode;
1999 register rtx val = argvec[argnum].value;
2000 rtx reg = argvec[argnum].reg;
2001 int partial = argvec[argnum].partial;
2002
2003 if (reg != 0 && partial == 0)
2004 emit_move_insn (reg, val);
2005 NO_DEFER_POP;
2006 }
2007
2008 /* For version 1.37, try deleting this entirely. */
2009 if (! no_queue)
2010 emit_queue ();
2011
2012 /* Any regs containing parms remain in use through the call. */
2013 start_sequence ();
2014 for (count = 0; count < nargs; count++)
2015 if (argvec[count].reg != 0)
2016 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2017
2018 use_insns = get_insns ();
2019 end_sequence ();
2020
906c4e36 2021 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
bbf6f052
RK
2022
2023 /* Don't allow popping to be deferred, since then
2024 cse'ing of library calls could delete a call and leave the pop. */
2025 NO_DEFER_POP;
2026
2027 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2028 will set inhibit_defer_pop to that value. */
2029
2030 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2031 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
906c4e36 2032 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
bbf6f052
RK
2033 old_inhibit_defer_pop + 1, use_insns, no_queue);
2034
2035 /* Now restore inhibit_defer_pop to its actual original value. */
2036 OK_DEFER_POP;
2037}
2038\f
2039/* Expand an assignment that stores the value of FROM into TO.
2040 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2041 (This may contain a QUEUED rtx.)
2042 Otherwise, the returned value is not meaningful.
2043
2044 SUGGEST_REG is no longer actually used.
2045 It used to mean, copy the value through a register
2046 and return that register, if that is possible.
2047 But now we do this if WANT_VALUE.
2048
2049 If the value stored is a constant, we return the constant. */
2050
2051rtx
2052expand_assignment (to, from, want_value, suggest_reg)
2053 tree to, from;
2054 int want_value;
2055 int suggest_reg;
2056{
2057 register rtx to_rtx = 0;
2058 rtx result;
2059
2060 /* Don't crash if the lhs of the assignment was erroneous. */
2061
2062 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2063 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2064
2065 /* Assignment of a structure component needs special treatment
2066 if the structure component's rtx is not simply a MEM.
2067 Assignment of an array element at a constant index
2068 has the same problem. */
2069
2070 if (TREE_CODE (to) == COMPONENT_REF
2071 || TREE_CODE (to) == BIT_FIELD_REF
2072 || (TREE_CODE (to) == ARRAY_REF
2073 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2074 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2075 {
2076 enum machine_mode mode1;
2077 int bitsize;
2078 int bitpos;
7bb0943f 2079 tree offset;
bbf6f052
RK
2080 int unsignedp;
2081 int volatilep = 0;
7bb0943f 2082 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2083 &mode1, &unsignedp, &volatilep);
2084
2085 /* If we are going to use store_bit_field and extract_bit_field,
2086 make sure to_rtx will be safe for multiple use. */
2087
2088 if (mode1 == VOIDmode && want_value)
2089 tem = stabilize_reference (tem);
2090
906c4e36 2091 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2092 if (offset != 0)
2093 {
906c4e36 2094 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2095
2096 if (GET_CODE (to_rtx) != MEM)
2097 abort ();
2098 to_rtx = change_address (to_rtx, VOIDmode,
2099 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2100 force_reg (Pmode, offset_rtx)));
2101 }
bbf6f052
RK
2102 if (volatilep)
2103 {
2104 if (GET_CODE (to_rtx) == MEM)
2105 MEM_VOLATILE_P (to_rtx) = 1;
2106#if 0 /* This was turned off because, when a field is volatile
2107 in an object which is not volatile, the object may be in a register,
2108 and then we would abort over here. */
2109 else
2110 abort ();
2111#endif
2112 }
2113
2114 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2115 (want_value
2116 /* Spurious cast makes HPUX compiler happy. */
2117 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2118 : VOIDmode),
2119 unsignedp,
2120 /* Required alignment of containing datum. */
2121 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2122 int_size_in_bytes (TREE_TYPE (tem)));
2123 preserve_temp_slots (result);
2124 free_temp_slots ();
2125
2126 return result;
2127 }
2128
2129 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2130 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2131
2132 if (to_rtx == 0)
906c4e36 2133 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2134
2135 /* In case we are returning the contents of an object which overlaps
2136 the place the value is being stored, use a safe function when copying
2137 a value through a pointer into a structure value return block. */
2138 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2139 && current_function_returns_struct
2140 && !current_function_returns_pcc_struct)
2141 {
906c4e36 2142 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2143 rtx size = expr_size (from);
2144
2145#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 2146 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
2147 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2148 XEXP (from_rtx, 0), Pmode,
2149 size, Pmode);
2150#else
e87b4f3f 2151 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
2152 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2153 XEXP (to_rtx, 0), Pmode,
2154 size, Pmode);
2155#endif
2156
2157 preserve_temp_slots (to_rtx);
2158 free_temp_slots ();
2159 return to_rtx;
2160 }
2161
2162 /* Compute FROM and store the value in the rtx we got. */
2163
2164 result = store_expr (from, to_rtx, want_value);
2165 preserve_temp_slots (result);
2166 free_temp_slots ();
2167 return result;
2168}
2169
2170/* Generate code for computing expression EXP,
2171 and storing the value into TARGET.
2172 Returns TARGET or an equivalent value.
2173 TARGET may contain a QUEUED rtx.
2174
2175 If SUGGEST_REG is nonzero, copy the value through a register
2176 and return that register, if that is possible.
2177
2178 If the value stored is a constant, we return the constant. */
2179
2180rtx
2181store_expr (exp, target, suggest_reg)
2182 register tree exp;
2183 register rtx target;
2184 int suggest_reg;
2185{
2186 register rtx temp;
2187 int dont_return_target = 0;
2188
2189 if (TREE_CODE (exp) == COMPOUND_EXPR)
2190 {
2191 /* Perform first part of compound expression, then assign from second
2192 part. */
2193 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2194 emit_queue ();
2195 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2196 }
2197 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2198 {
2199 /* For conditional expression, get safe form of the target. Then
2200 test the condition, doing the appropriate assignment on either
2201 side. This avoids the creation of unnecessary temporaries.
2202 For non-BLKmode, it is more efficient not to do this. */
2203
2204 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2205
2206 emit_queue ();
2207 target = protect_from_queue (target, 1);
2208
2209 NO_DEFER_POP;
2210 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2211 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2212 emit_queue ();
2213 emit_jump_insn (gen_jump (lab2));
2214 emit_barrier ();
2215 emit_label (lab1);
2216 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2217 emit_queue ();
2218 emit_label (lab2);
2219 OK_DEFER_POP;
2220 return target;
2221 }
2222 else if (suggest_reg && GET_CODE (target) == MEM
2223 && GET_MODE (target) != BLKmode)
2224 /* If target is in memory and caller wants value in a register instead,
2225 arrange that. Pass TARGET as target for expand_expr so that,
2226 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2227 We know expand_expr will not use the target in that case. */
2228 {
906c4e36 2229 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2230 GET_MODE (target), 0);
2231 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2232 temp = copy_to_reg (temp);
2233 dont_return_target = 1;
2234 }
2235 else if (queued_subexp_p (target))
2236 /* If target contains a postincrement, it is not safe
2237 to use as the returned value. It would access the wrong
2238 place by the time the queued increment gets output.
2239 So copy the value through a temporary and use that temp
2240 as the result. */
2241 {
2242 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2243 {
2244 /* Expand EXP into a new pseudo. */
2245 temp = gen_reg_rtx (GET_MODE (target));
2246 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2247 }
2248 else
906c4e36 2249 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2250 dont_return_target = 1;
2251 }
2252 else
2253 {
2254 temp = expand_expr (exp, target, GET_MODE (target), 0);
2255 /* DO return TARGET if it's a specified hardware register.
2256 expand_return relies on this. */
2257 if (!(target && GET_CODE (target) == REG
2258 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2259 && CONSTANT_P (temp))
2260 dont_return_target = 1;
2261 }
2262
2263 /* If value was not generated in the target, store it there.
2264 Convert the value to TARGET's type first if nec. */
2265
2266 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2267 {
2268 target = protect_from_queue (target, 1);
2269 if (GET_MODE (temp) != GET_MODE (target)
2270 && GET_MODE (temp) != VOIDmode)
2271 {
2272 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2273 if (dont_return_target)
2274 {
2275 /* In this case, we will return TEMP,
2276 so make sure it has the proper mode.
2277 But don't forget to store the value into TARGET. */
2278 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2279 emit_move_insn (target, temp);
2280 }
2281 else
2282 convert_move (target, temp, unsignedp);
2283 }
2284
2285 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2286 {
2287 /* Handle copying a string constant into an array.
2288 The string constant may be shorter than the array.
2289 So copy just the string's actual length, and clear the rest. */
2290 rtx size;
2291
e87b4f3f
RS
2292 /* Get the size of the data type of the string,
2293 which is actually the size of the target. */
2294 size = expr_size (exp);
2295 if (GET_CODE (size) == CONST_INT
2296 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2297 emit_block_move (target, temp, size,
2298 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2299 else
bbf6f052 2300 {
e87b4f3f
RS
2301 /* Compute the size of the data to copy from the string. */
2302 tree copy_size
2303 = fold (build (MIN_EXPR, sizetype,
2304 size_binop (CEIL_DIV_EXPR,
2305 TYPE_SIZE (TREE_TYPE (exp)),
2306 size_int (BITS_PER_UNIT)),
2307 convert (sizetype,
2308 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
906c4e36
RK
2309 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2310 VOIDmode, 0);
e87b4f3f
RS
2311 rtx label = 0;
2312
2313 /* Copy that much. */
2314 emit_block_move (target, temp, copy_size_rtx,
2315 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2316
2317 /* Figure out how much is left in TARGET
2318 that we have to clear. */
2319 if (GET_CODE (copy_size_rtx) == CONST_INT)
2320 {
2321 temp = plus_constant (XEXP (target, 0),
2322 TREE_STRING_LENGTH (exp));
2323 size = plus_constant (size,
2324 - TREE_STRING_LENGTH (exp));
2325 }
2326 else
2327 {
2328 enum machine_mode size_mode = Pmode;
2329
2330 temp = force_reg (Pmode, XEXP (target, 0));
2331 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2332 copy_size_rtx, NULL_RTX, 0,
2333 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2334
2335 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2336 copy_size_rtx, NULL_RTX, 0,
2337 OPTAB_LIB_WIDEN);
e87b4f3f 2338
906c4e36 2339 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2340 GET_MODE (size), 0, 0);
2341 label = gen_label_rtx ();
2342 emit_jump_insn (gen_blt (label));
2343 }
2344
2345 if (size != const0_rtx)
2346 {
bbf6f052 2347#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f
RS
2348 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2349 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2350#else
e87b4f3f
RS
2351 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2352 temp, Pmode, size, Pmode);
bbf6f052 2353#endif
e87b4f3f
RS
2354 }
2355 if (label)
2356 emit_label (label);
bbf6f052
RK
2357 }
2358 }
2359 else if (GET_MODE (temp) == BLKmode)
2360 emit_block_move (target, temp, expr_size (exp),
2361 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2362 else
2363 emit_move_insn (target, temp);
2364 }
2365 if (dont_return_target)
2366 return temp;
2367 return target;
2368}
2369\f
2370/* Store the value of constructor EXP into the rtx TARGET.
2371 TARGET is either a REG or a MEM. */
2372
2373static void
2374store_constructor (exp, target)
2375 tree exp;
2376 rtx target;
2377{
4af3895e
JVA
2378 tree type = TREE_TYPE (exp);
2379
bbf6f052
RK
2380 /* We know our target cannot conflict, since safe_from_p has been called. */
2381#if 0
2382 /* Don't try copying piece by piece into a hard register
2383 since that is vulnerable to being clobbered by EXP.
2384 Instead, construct in a pseudo register and then copy it all. */
2385 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2386 {
2387 rtx temp = gen_reg_rtx (GET_MODE (target));
2388 store_constructor (exp, temp);
2389 emit_move_insn (target, temp);
2390 return;
2391 }
2392#endif
2393
4af3895e 2394 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2395 {
2396 register tree elt;
2397
4af3895e
JVA
2398 /* Inform later passes that the whole union value is dead. */
2399 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2400 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2401
2402 /* If we are building a static constructor into a register,
2403 set the initial value as zero so we can fold the value into
2404 a constant. */
2405 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2406 emit_move_insn (target, const0_rtx);
2407
bbf6f052
RK
2408 /* If the constructor has fewer fields than the structure,
2409 clear the whole structure first. */
2410 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2411 != list_length (TYPE_FIELDS (type)))
2412 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2413 else
2414 /* Inform later passes that the old value is dead. */
2415 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2416
2417 /* Store each element of the constructor into
2418 the corresponding field of TARGET. */
2419
2420 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2421 {
2422 register tree field = TREE_PURPOSE (elt);
2423 register enum machine_mode mode;
2424 int bitsize;
2425 int bitpos;
2426 int unsignedp;
2427
f32fd778
RS
2428 /* Just ignore missing fields.
2429 We cleared the whole structure, above,
2430 if any fields are missing. */
2431 if (field == 0)
2432 continue;
2433
bbf6f052
RK
2434 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2435 unsignedp = TREE_UNSIGNED (field);
2436 mode = DECL_MODE (field);
2437 if (DECL_BIT_FIELD (field))
2438 mode = VOIDmode;
2439
2440 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2441 /* ??? This case remains to be written. */
2442 abort ();
2443
2444 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2445
2446 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2447 /* The alignment of TARGET is
2448 at least what its type requires. */
2449 VOIDmode, 0,
4af3895e
JVA
2450 TYPE_ALIGN (type) / BITS_PER_UNIT,
2451 int_size_in_bytes (type));
bbf6f052
RK
2452 }
2453 }
4af3895e 2454 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2455 {
2456 register tree elt;
2457 register int i;
4af3895e 2458 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2459 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2460 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2461 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2462
2463 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2464 clear the whole structure first. Similarly if this this is
2465 static constructor of a non-BLKmode object. */
bbf6f052 2466
4af3895e
JVA
2467 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2468 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2469 clear_storage (target, maxelt - minelt + 1);
2470 else
2471 /* Inform later passes that the old value is dead. */
2472 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2473
2474 /* Store each element of the constructor into
2475 the corresponding element of TARGET, determined
2476 by counting the elements. */
2477 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2478 elt;
2479 elt = TREE_CHAIN (elt), i++)
2480 {
2481 register enum machine_mode mode;
2482 int bitsize;
2483 int bitpos;
2484 int unsignedp;
2485
2486 mode = TYPE_MODE (elttype);
2487 bitsize = GET_MODE_BITSIZE (mode);
2488 unsignedp = TREE_UNSIGNED (elttype);
2489
2490 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2491
2492 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2493 /* The alignment of TARGET is
2494 at least what its type requires. */
2495 VOIDmode, 0,
4af3895e
JVA
2496 TYPE_ALIGN (type) / BITS_PER_UNIT,
2497 int_size_in_bytes (type));
bbf6f052
RK
2498 }
2499 }
2500
2501 else
2502 abort ();
2503}
2504
2505/* Store the value of EXP (an expression tree)
2506 into a subfield of TARGET which has mode MODE and occupies
2507 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2508 If MODE is VOIDmode, it means that we are storing into a bit-field.
2509
2510 If VALUE_MODE is VOIDmode, return nothing in particular.
2511 UNSIGNEDP is not used in this case.
2512
2513 Otherwise, return an rtx for the value stored. This rtx
2514 has mode VALUE_MODE if that is convenient to do.
2515 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2516
2517 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2518 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2519
2520static rtx
2521store_field (target, bitsize, bitpos, mode, exp, value_mode,
2522 unsignedp, align, total_size)
2523 rtx target;
2524 int bitsize, bitpos;
2525 enum machine_mode mode;
2526 tree exp;
2527 enum machine_mode value_mode;
2528 int unsignedp;
2529 int align;
2530 int total_size;
2531{
906c4e36 2532 HOST_WIDE_INT width_mask = 0;
bbf6f052 2533
906c4e36
RK
2534 if (bitsize < HOST_BITS_PER_WIDE_INT)
2535 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2536
2537 /* If we are storing into an unaligned field of an aligned union that is
2538 in a register, we may have the mode of TARGET being an integer mode but
2539 MODE == BLKmode. In that case, get an aligned object whose size and
2540 alignment are the same as TARGET and store TARGET into it (we can avoid
2541 the store if the field being stored is the entire width of TARGET). Then
2542 call ourselves recursively to store the field into a BLKmode version of
2543 that object. Finally, load from the object into TARGET. This is not
2544 very efficient in general, but should only be slightly more expensive
2545 than the otherwise-required unaligned accesses. Perhaps this can be
2546 cleaned up later. */
2547
2548 if (mode == BLKmode
2549 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2550 {
2551 rtx object = assign_stack_temp (GET_MODE (target),
2552 GET_MODE_SIZE (GET_MODE (target)), 0);
2553 rtx blk_object = copy_rtx (object);
2554
2555 PUT_MODE (blk_object, BLKmode);
2556
2557 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2558 emit_move_insn (object, target);
2559
2560 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2561 align, total_size);
2562
2563 emit_move_insn (target, object);
2564
2565 return target;
2566 }
2567
2568 /* If the structure is in a register or if the component
2569 is a bit field, we cannot use addressing to access it.
2570 Use bit-field techniques or SUBREG to store in it. */
2571
4fa52007
RK
2572 if (mode == VOIDmode
2573 || (mode != BLKmode && ! direct_store[(int) mode])
2574 || GET_CODE (target) == REG
bbf6f052
RK
2575 || GET_CODE (target) == SUBREG)
2576 {
906c4e36 2577 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2578 /* Store the value in the bitfield. */
2579 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2580 if (value_mode != VOIDmode)
2581 {
2582 /* The caller wants an rtx for the value. */
2583 /* If possible, avoid refetching from the bitfield itself. */
2584 if (width_mask != 0
2585 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
906c4e36 2586 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
bbf6f052 2587 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2588 NULL_RTX, value_mode, 0, align,
2589 total_size);
bbf6f052
RK
2590 }
2591 return const0_rtx;
2592 }
2593 else
2594 {
2595 rtx addr = XEXP (target, 0);
2596 rtx to_rtx;
2597
2598 /* If a value is wanted, it must be the lhs;
2599 so make the address stable for multiple use. */
2600
2601 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2602 && ! CONSTANT_ADDRESS_P (addr)
2603 /* A frame-pointer reference is already stable. */
2604 && ! (GET_CODE (addr) == PLUS
2605 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2606 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2607 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2608 addr = copy_to_reg (addr);
2609
2610 /* Now build a reference to just the desired component. */
2611
2612 to_rtx = change_address (target, mode,
2613 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2614 MEM_IN_STRUCT_P (to_rtx) = 1;
2615
2616 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2617 }
2618}
2619\f
2620/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2621 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2622 ARRAY_REFs at constant positions and find the ultimate containing object,
2623 which we return.
2624
2625 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2626 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2627 If the position of the field is variable, we store a tree
2628 giving the variable offset (in units) in *POFFSET.
2629 This offset is in addition to the bit position.
2630 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2631
2632 If any of the extraction expressions is volatile,
2633 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2634
2635 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2636 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2637 is redundant.
2638
2639 If the field describes a variable-sized object, *PMODE is set to
2640 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2641 this case, but the address of the object can be found. */
bbf6f052
RK
2642
2643tree
7bb0943f 2644get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
bbf6f052
RK
2645 tree exp;
2646 int *pbitsize;
2647 int *pbitpos;
7bb0943f 2648 tree *poffset;
bbf6f052
RK
2649 enum machine_mode *pmode;
2650 int *punsignedp;
2651 int *pvolatilep;
2652{
2653 tree size_tree = 0;
2654 enum machine_mode mode = VOIDmode;
7bb0943f 2655 tree offset = 0;
bbf6f052
RK
2656
2657 if (TREE_CODE (exp) == COMPONENT_REF)
2658 {
2659 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2660 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2661 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2662 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2663 }
2664 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2665 {
2666 size_tree = TREE_OPERAND (exp, 1);
2667 *punsignedp = TREE_UNSIGNED (exp);
2668 }
2669 else
2670 {
2671 mode = TYPE_MODE (TREE_TYPE (exp));
2672 *pbitsize = GET_MODE_BITSIZE (mode);
2673 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2674 }
2675
2676 if (size_tree)
2677 {
2678 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2679 mode = BLKmode, *pbitsize = -1;
2680 else
2681 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2682 }
2683
2684 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2685 and find the ultimate containing object. */
2686
2687 *pbitpos = 0;
2688
2689 while (1)
2690 {
7bb0943f 2691 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2692 {
7bb0943f
RS
2693 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2694 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2695 : TREE_OPERAND (exp, 2));
bbf6f052 2696
7bb0943f
RS
2697 if (TREE_CODE (pos) == PLUS_EXPR)
2698 {
2699 tree constant, var;
2700 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2701 {
2702 constant = TREE_OPERAND (pos, 0);
2703 var = TREE_OPERAND (pos, 1);
2704 }
2705 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2706 {
2707 constant = TREE_OPERAND (pos, 1);
2708 var = TREE_OPERAND (pos, 0);
2709 }
2710 else
2711 abort ();
2712 *pbitpos += TREE_INT_CST_LOW (constant);
2713 if (offset)
2714 offset = size_binop (PLUS_EXPR, offset,
2715 size_binop (FLOOR_DIV_EXPR, var,
2716 size_int (BITS_PER_UNIT)));
2717 else
2718 offset = size_binop (FLOOR_DIV_EXPR, var,
2719 size_int (BITS_PER_UNIT));
2720 }
2721 else if (TREE_CODE (pos) == INTEGER_CST)
2722 *pbitpos += TREE_INT_CST_LOW (pos);
2723 else
2724 {
2725 /* Assume here that the offset is a multiple of a unit.
2726 If not, there should be an explicitly added constant. */
2727 if (offset)
2728 offset = size_binop (PLUS_EXPR, offset,
2729 size_binop (FLOOR_DIV_EXPR, pos,
2730 size_int (BITS_PER_UNIT)));
2731 else
2732 offset = size_binop (FLOOR_DIV_EXPR, pos,
2733 size_int (BITS_PER_UNIT));
2734 }
bbf6f052 2735 }
bbf6f052 2736
bbf6f052
RK
2737 else if (TREE_CODE (exp) == ARRAY_REF
2738 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2739 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2740 {
2741 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2742 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
2743 }
2744 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2745 && ! ((TREE_CODE (exp) == NOP_EXPR
2746 || TREE_CODE (exp) == CONVERT_EXPR)
2747 && (TYPE_MODE (TREE_TYPE (exp))
2748 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2749 break;
7bb0943f
RS
2750
2751 /* If any reference in the chain is volatile, the effect is volatile. */
2752 if (TREE_THIS_VOLATILE (exp))
2753 *pvolatilep = 1;
bbf6f052
RK
2754 exp = TREE_OPERAND (exp, 0);
2755 }
2756
2757 /* If this was a bit-field, see if there is a mode that allows direct
2758 access in case EXP is in memory. */
2759 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2760 {
2761 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2762 if (mode == BLKmode)
2763 mode = VOIDmode;
2764 }
2765
2766 *pmode = mode;
7bb0943f
RS
2767 *poffset = offset;
2768#if 0
2769 /* We aren't finished fixing the callers to really handle nonzero offset. */
2770 if (offset != 0)
2771 abort ();
2772#endif
bbf6f052
RK
2773
2774 return exp;
2775}
2776\f
2777/* Given an rtx VALUE that may contain additions and multiplications,
2778 return an equivalent value that just refers to a register or memory.
2779 This is done by generating instructions to perform the arithmetic
2780 and returning a pseudo-register containing the value. */
2781
2782rtx
2783force_operand (value, target)
2784 rtx value, target;
2785{
2786 register optab binoptab = 0;
2787 /* Use a temporary to force order of execution of calls to
2788 `force_operand'. */
2789 rtx tmp;
2790 register rtx op2;
2791 /* Use subtarget as the target for operand 0 of a binary operation. */
2792 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2793
2794 if (GET_CODE (value) == PLUS)
2795 binoptab = add_optab;
2796 else if (GET_CODE (value) == MINUS)
2797 binoptab = sub_optab;
2798 else if (GET_CODE (value) == MULT)
2799 {
2800 op2 = XEXP (value, 1);
2801 if (!CONSTANT_P (op2)
2802 && !(GET_CODE (op2) == REG && op2 != subtarget))
2803 subtarget = 0;
2804 tmp = force_operand (XEXP (value, 0), subtarget);
2805 return expand_mult (GET_MODE (value), tmp,
906c4e36 2806 force_operand (op2, NULL_RTX),
bbf6f052
RK
2807 target, 0);
2808 }
2809
2810 if (binoptab)
2811 {
2812 op2 = XEXP (value, 1);
2813 if (!CONSTANT_P (op2)
2814 && !(GET_CODE (op2) == REG && op2 != subtarget))
2815 subtarget = 0;
2816 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2817 {
2818 binoptab = add_optab;
2819 op2 = negate_rtx (GET_MODE (value), op2);
2820 }
2821
2822 /* Check for an addition with OP2 a constant integer and our first
2823 operand a PLUS of a virtual register and something else. In that
2824 case, we want to emit the sum of the virtual register and the
2825 constant first and then add the other value. This allows virtual
2826 register instantiation to simply modify the constant rather than
2827 creating another one around this addition. */
2828 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2829 && GET_CODE (XEXP (value, 0)) == PLUS
2830 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2831 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2832 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2833 {
2834 rtx temp = expand_binop (GET_MODE (value), binoptab,
2835 XEXP (XEXP (value, 0), 0), op2,
2836 subtarget, 0, OPTAB_LIB_WIDEN);
2837 return expand_binop (GET_MODE (value), binoptab, temp,
2838 force_operand (XEXP (XEXP (value, 0), 1), 0),
2839 target, 0, OPTAB_LIB_WIDEN);
2840 }
2841
2842 tmp = force_operand (XEXP (value, 0), subtarget);
2843 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 2844 force_operand (op2, NULL_RTX),
bbf6f052
RK
2845 target, 0, OPTAB_LIB_WIDEN);
2846 /* We give UNSIGNEP = 0 to expand_binop
2847 because the only operations we are expanding here are signed ones. */
2848 }
2849 return value;
2850}
2851\f
2852/* Subroutine of expand_expr:
2853 save the non-copied parts (LIST) of an expr (LHS), and return a list
2854 which can restore these values to their previous values,
2855 should something modify their storage. */
2856
2857static tree
2858save_noncopied_parts (lhs, list)
2859 tree lhs;
2860 tree list;
2861{
2862 tree tail;
2863 tree parts = 0;
2864
2865 for (tail = list; tail; tail = TREE_CHAIN (tail))
2866 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2867 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2868 else
2869 {
2870 tree part = TREE_VALUE (tail);
2871 tree part_type = TREE_TYPE (part);
906c4e36 2872 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
2873 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2874 int_size_in_bytes (part_type), 0);
2875 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 2876 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 2877 parts = tree_cons (to_be_saved,
906c4e36
RK
2878 build (RTL_EXPR, part_type, NULL_TREE,
2879 (tree) target),
bbf6f052
RK
2880 parts);
2881 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2882 }
2883 return parts;
2884}
2885
2886/* Subroutine of expand_expr:
2887 record the non-copied parts (LIST) of an expr (LHS), and return a list
2888 which specifies the initial values of these parts. */
2889
2890static tree
2891init_noncopied_parts (lhs, list)
2892 tree lhs;
2893 tree list;
2894{
2895 tree tail;
2896 tree parts = 0;
2897
2898 for (tail = list; tail; tail = TREE_CHAIN (tail))
2899 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2900 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2901 else
2902 {
2903 tree part = TREE_VALUE (tail);
2904 tree part_type = TREE_TYPE (part);
906c4e36 2905 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
2906 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2907 }
2908 return parts;
2909}
2910
2911/* Subroutine of expand_expr: return nonzero iff there is no way that
2912 EXP can reference X, which is being modified. */
2913
2914static int
2915safe_from_p (x, exp)
2916 rtx x;
2917 tree exp;
2918{
2919 rtx exp_rtl = 0;
2920 int i, nops;
2921
2922 if (x == 0)
2923 return 1;
2924
2925 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2926 find the underlying pseudo. */
2927 if (GET_CODE (x) == SUBREG)
2928 {
2929 x = SUBREG_REG (x);
2930 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2931 return 0;
2932 }
2933
2934 /* If X is a location in the outgoing argument area, it is always safe. */
2935 if (GET_CODE (x) == MEM
2936 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2937 || (GET_CODE (XEXP (x, 0)) == PLUS
2938 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2939 return 1;
2940
2941 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2942 {
2943 case 'd':
2944 exp_rtl = DECL_RTL (exp);
2945 break;
2946
2947 case 'c':
2948 return 1;
2949
2950 case 'x':
2951 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
2952 return ((TREE_VALUE (exp) == 0
2953 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
2954 && (TREE_CHAIN (exp) == 0
2955 || safe_from_p (x, TREE_CHAIN (exp))));
2956 else
2957 return 0;
2958
2959 case '1':
2960 return safe_from_p (x, TREE_OPERAND (exp, 0));
2961
2962 case '2':
2963 case '<':
2964 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2965 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2966
2967 case 'e':
2968 case 'r':
2969 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2970 the expression. If it is set, we conflict iff we are that rtx or
2971 both are in memory. Otherwise, we check all operands of the
2972 expression recursively. */
2973
2974 switch (TREE_CODE (exp))
2975 {
2976 case ADDR_EXPR:
2977 return staticp (TREE_OPERAND (exp, 0));
2978
2979 case INDIRECT_REF:
2980 if (GET_CODE (x) == MEM)
2981 return 0;
2982 break;
2983
2984 case CALL_EXPR:
2985 exp_rtl = CALL_EXPR_RTL (exp);
2986 if (exp_rtl == 0)
2987 {
2988 /* Assume that the call will clobber all hard registers and
2989 all of memory. */
2990 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2991 || GET_CODE (x) == MEM)
2992 return 0;
2993 }
2994
2995 break;
2996
2997 case RTL_EXPR:
2998 exp_rtl = RTL_EXPR_RTL (exp);
2999 if (exp_rtl == 0)
3000 /* We don't know what this can modify. */
3001 return 0;
3002
3003 break;
3004
3005 case WITH_CLEANUP_EXPR:
3006 exp_rtl = RTL_EXPR_RTL (exp);
3007 break;
3008
3009 case SAVE_EXPR:
3010 exp_rtl = SAVE_EXPR_RTL (exp);
3011 break;
3012
8129842c
RS
3013 case BIND_EXPR:
3014 /* The only operand we look at is operand 1. The rest aren't
3015 part of the expression. */
3016 return safe_from_p (x, TREE_OPERAND (exp, 1));
3017
bbf6f052
RK
3018 case METHOD_CALL_EXPR:
3019 /* This takes a rtx argument, but shouldn't appear here. */
3020 abort ();
3021 }
3022
3023 /* If we have an rtx, we do not need to scan our operands. */
3024 if (exp_rtl)
3025 break;
3026
3027 nops = tree_code_length[(int) TREE_CODE (exp)];
3028 for (i = 0; i < nops; i++)
3029 if (TREE_OPERAND (exp, i) != 0
3030 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3031 return 0;
3032 }
3033
3034 /* If we have an rtl, find any enclosed object. Then see if we conflict
3035 with it. */
3036 if (exp_rtl)
3037 {
3038 if (GET_CODE (exp_rtl) == SUBREG)
3039 {
3040 exp_rtl = SUBREG_REG (exp_rtl);
3041 if (GET_CODE (exp_rtl) == REG
3042 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3043 return 0;
3044 }
3045
3046 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3047 are memory and EXP is not readonly. */
3048 return ! (rtx_equal_p (x, exp_rtl)
3049 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3050 && ! TREE_READONLY (exp)));
3051 }
3052
3053 /* If we reach here, it is safe. */
3054 return 1;
3055}
3056
3057/* Subroutine of expand_expr: return nonzero iff EXP is an
3058 expression whose type is statically determinable. */
3059
3060static int
3061fixed_type_p (exp)
3062 tree exp;
3063{
3064 if (TREE_CODE (exp) == PARM_DECL
3065 || TREE_CODE (exp) == VAR_DECL
3066 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3067 || TREE_CODE (exp) == COMPONENT_REF
3068 || TREE_CODE (exp) == ARRAY_REF)
3069 return 1;
3070 return 0;
3071}
3072\f
3073/* expand_expr: generate code for computing expression EXP.
3074 An rtx for the computed value is returned. The value is never null.
3075 In the case of a void EXP, const0_rtx is returned.
3076
3077 The value may be stored in TARGET if TARGET is nonzero.
3078 TARGET is just a suggestion; callers must assume that
3079 the rtx returned may not be the same as TARGET.
3080
3081 If TARGET is CONST0_RTX, it means that the value will be ignored.
3082
3083 If TMODE is not VOIDmode, it suggests generating the
3084 result in mode TMODE. But this is done only when convenient.
3085 Otherwise, TMODE is ignored and the value generated in its natural mode.
3086 TMODE is just a suggestion; callers must assume that
3087 the rtx returned may not have mode TMODE.
3088
3089 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3090 with a constant address even if that address is not normally legitimate.
3091 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3092
3093 If MODIFIER is EXPAND_SUM then when EXP is an addition
3094 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3095 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3096 products as above, or REG or MEM, or constant.
3097 Ordinarily in such cases we would output mul or add instructions
3098 and then return a pseudo reg containing the sum.
3099
3100 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3101 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3102 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3103 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3104
3105rtx
3106expand_expr (exp, target, tmode, modifier)
3107 register tree exp;
3108 rtx target;
3109 enum machine_mode tmode;
3110 enum expand_modifier modifier;
3111{
3112 register rtx op0, op1, temp;
3113 tree type = TREE_TYPE (exp);
3114 int unsignedp = TREE_UNSIGNED (type);
3115 register enum machine_mode mode = TYPE_MODE (type);
3116 register enum tree_code code = TREE_CODE (exp);
3117 optab this_optab;
3118 /* Use subtarget as the target for operand 0 of a binary operation. */
3119 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3120 rtx original_target = target;
3121 int ignore = target == const0_rtx;
3122 tree context;
3123
3124 /* Don't use hard regs as subtargets, because the combiner
3125 can only handle pseudo regs. */
3126 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3127 subtarget = 0;
3128 /* Avoid subtargets inside loops,
3129 since they hide some invariant expressions. */
3130 if (preserve_subexpressions_p ())
3131 subtarget = 0;
3132
3133 if (ignore) target = 0, original_target = 0;
3134
3135 /* If will do cse, generate all results into pseudo registers
3136 since 1) that allows cse to find more things
3137 and 2) otherwise cse could produce an insn the machine
3138 cannot support. */
3139
3140 if (! cse_not_expected && mode != BLKmode && target
3141 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3142 target = subtarget;
3143
3144 /* Ensure we reference a volatile object even if value is ignored. */
3145 if (ignore && TREE_THIS_VOLATILE (exp)
3146 && mode != VOIDmode && mode != BLKmode)
3147 {
3148 target = gen_reg_rtx (mode);
3149 temp = expand_expr (exp, target, VOIDmode, modifier);
3150 if (temp != target)
3151 emit_move_insn (target, temp);
3152 return target;
3153 }
3154
3155 switch (code)
3156 {
3157 case LABEL_DECL:
b552441b
RS
3158 {
3159 tree function = decl_function_context (exp);
3160 /* Handle using a label in a containing function. */
3161 if (function != current_function_decl && function != 0)
3162 {
3163 struct function *p = find_function_data (function);
3164 /* Allocate in the memory associated with the function
3165 that the label is in. */
3166 push_obstacks (p->function_obstack,
3167 p->function_maybepermanent_obstack);
3168
3169 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3170 label_rtx (exp), p->forced_labels);
3171 pop_obstacks ();
3172 }
3173 else if (modifier == EXPAND_INITIALIZER)
3174 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3175 label_rtx (exp), forced_labels);
26fcb35a 3176 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3177 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3178 if (function != current_function_decl && function != 0)
3179 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3180 return temp;
b552441b 3181 }
bbf6f052
RK
3182
3183 case PARM_DECL:
3184 if (DECL_RTL (exp) == 0)
3185 {
3186 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3187 return CONST0_RTX (mode);
bbf6f052
RK
3188 }
3189
3190 case FUNCTION_DECL:
3191 case VAR_DECL:
3192 case RESULT_DECL:
3193 if (DECL_RTL (exp) == 0)
3194 abort ();
3195 /* Ensure variable marked as used
3196 even if it doesn't go through a parser. */
3197 TREE_USED (exp) = 1;
3198 /* Handle variables inherited from containing functions. */
3199 context = decl_function_context (exp);
3200
3201 /* We treat inline_function_decl as an alias for the current function
3202 because that is the inline function whose vars, types, etc.
3203 are being merged into the current function.
3204 See expand_inline_function. */
3205 if (context != 0 && context != current_function_decl
3206 && context != inline_function_decl
3207 /* If var is static, we don't need a static chain to access it. */
3208 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3209 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3210 {
3211 rtx addr;
3212
3213 /* Mark as non-local and addressable. */
81feeecb 3214 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3215 mark_addressable (exp);
3216 if (GET_CODE (DECL_RTL (exp)) != MEM)
3217 abort ();
3218 addr = XEXP (DECL_RTL (exp), 0);
3219 if (GET_CODE (addr) == MEM)
3220 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3221 else
3222 addr = fix_lexical_addr (addr, exp);
3223 return change_address (DECL_RTL (exp), mode, addr);
3224 }
4af3895e 3225
bbf6f052
RK
3226 /* This is the case of an array whose size is to be determined
3227 from its initializer, while the initializer is still being parsed.
3228 See expand_decl. */
3229 if (GET_CODE (DECL_RTL (exp)) == MEM
3230 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3231 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3232 XEXP (DECL_RTL (exp), 0));
3233 if (GET_CODE (DECL_RTL (exp)) == MEM
3234 && modifier != EXPAND_CONST_ADDRESS
3235 && modifier != EXPAND_SUM
3236 && modifier != EXPAND_INITIALIZER)
3237 {
3238 /* DECL_RTL probably contains a constant address.
3239 On RISC machines where a constant address isn't valid,
3240 make some insns to get that address into a register. */
3241 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3242 || (flag_force_addr
3243 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3244 return change_address (DECL_RTL (exp), VOIDmode,
3245 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3246 }
3247 return DECL_RTL (exp);
3248
3249 case INTEGER_CST:
3250 return immed_double_const (TREE_INT_CST_LOW (exp),
3251 TREE_INT_CST_HIGH (exp),
3252 mode);
3253
3254 case CONST_DECL:
3255 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3256
3257 case REAL_CST:
3258 /* If optimized, generate immediate CONST_DOUBLE
3259 which will be turned into memory by reload if necessary.
3260
3261 We used to force a register so that loop.c could see it. But
3262 this does not allow gen_* patterns to perform optimizations with
3263 the constants. It also produces two insns in cases like "x = 1.0;".
3264 On most machines, floating-point constants are not permitted in
3265 many insns, so we'd end up copying it to a register in any case.
3266
3267 Now, we do the copying in expand_binop, if appropriate. */
3268 return immed_real_const (exp);
3269
3270 case COMPLEX_CST:
3271 case STRING_CST:
3272 if (! TREE_CST_RTL (exp))
3273 output_constant_def (exp);
3274
3275 /* TREE_CST_RTL probably contains a constant address.
3276 On RISC machines where a constant address isn't valid,
3277 make some insns to get that address into a register. */
3278 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3279 && modifier != EXPAND_CONST_ADDRESS
3280 && modifier != EXPAND_INITIALIZER
3281 && modifier != EXPAND_SUM
3282 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3283 return change_address (TREE_CST_RTL (exp), VOIDmode,
3284 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3285 return TREE_CST_RTL (exp);
3286
3287 case SAVE_EXPR:
3288 context = decl_function_context (exp);
3289 /* We treat inline_function_decl as an alias for the current function
3290 because that is the inline function whose vars, types, etc.
3291 are being merged into the current function.
3292 See expand_inline_function. */
3293 if (context == current_function_decl || context == inline_function_decl)
3294 context = 0;
3295
3296 /* If this is non-local, handle it. */
3297 if (context)
3298 {
3299 temp = SAVE_EXPR_RTL (exp);
3300 if (temp && GET_CODE (temp) == REG)
3301 {
3302 put_var_into_stack (exp);
3303 temp = SAVE_EXPR_RTL (exp);
3304 }
3305 if (temp == 0 || GET_CODE (temp) != MEM)
3306 abort ();
3307 return change_address (temp, mode,
3308 fix_lexical_addr (XEXP (temp, 0), exp));
3309 }
3310 if (SAVE_EXPR_RTL (exp) == 0)
3311 {
3312 if (mode == BLKmode)
3313 temp
3314 = assign_stack_temp (mode,
3315 int_size_in_bytes (TREE_TYPE (exp)), 0);
3316 else
3317 temp = gen_reg_rtx (mode);
3318 SAVE_EXPR_RTL (exp) = temp;
3319 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3320 if (!optimize && GET_CODE (temp) == REG)
3321 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3322 save_expr_regs);
3323 }
3324 return SAVE_EXPR_RTL (exp);
3325
3326 case EXIT_EXPR:
3327 /* Exit the current loop if the body-expression is true. */
3328 {
3329 rtx label = gen_label_rtx ();
906c4e36
RK
3330 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3331 expand_exit_loop (NULL_PTR);
bbf6f052
RK
3332 emit_label (label);
3333 }
3334 return const0_rtx;
3335
3336 case LOOP_EXPR:
3337 expand_start_loop (1);
3338 expand_expr_stmt (TREE_OPERAND (exp, 0));
3339 expand_end_loop ();
3340
3341 return const0_rtx;
3342
3343 case BIND_EXPR:
3344 {
3345 tree vars = TREE_OPERAND (exp, 0);
3346 int vars_need_expansion = 0;
3347
3348 /* Need to open a binding contour here because
3349 if there are any cleanups they most be contained here. */
3350 expand_start_bindings (0);
3351
3352 /* Mark the corresponding BLOCK for output. */
3353 if (TREE_OPERAND (exp, 2) != 0)
3354 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3355
3356 /* If VARS have not yet been expanded, expand them now. */
3357 while (vars)
3358 {
3359 if (DECL_RTL (vars) == 0)
3360 {
3361 vars_need_expansion = 1;
3362 expand_decl (vars);
3363 }
3364 expand_decl_init (vars);
3365 vars = TREE_CHAIN (vars);
3366 }
3367
3368 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3369
3370 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3371
3372 return temp;
3373 }
3374
3375 case RTL_EXPR:
3376 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3377 abort ();
3378 emit_insns (RTL_EXPR_SEQUENCE (exp));
3379 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3380 return RTL_EXPR_RTL (exp);
3381
3382 case CONSTRUCTOR:
4af3895e
JVA
3383 /* All elts simple constants => refer to a constant in memory. But
3384 if this is a non-BLKmode mode, let it store a field at a time
3385 since that should make a CONST_INT or CONST_DOUBLE when we
3386 fold. */
3387 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3388 {
3389 rtx constructor = output_constant_def (exp);
b552441b
RS
3390 if (modifier != EXPAND_CONST_ADDRESS
3391 && modifier != EXPAND_INITIALIZER
3392 && modifier != EXPAND_SUM
3393 && !memory_address_p (GET_MODE (constructor),
3394 XEXP (constructor, 0)))
bbf6f052
RK
3395 constructor = change_address (constructor, VOIDmode,
3396 XEXP (constructor, 0));
3397 return constructor;
3398 }
3399
3400 if (ignore)
3401 {
3402 tree elt;
3403 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3404 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3405 return const0_rtx;
3406 }
3407 else
3408 {
3409 if (target == 0 || ! safe_from_p (target, exp))
3410 {
3411 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3412 target = gen_reg_rtx (mode);
3413 else
3414 {
3415 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3416 if (target)
3417 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3418 target = safe_target;
3419 }
3420 }
3421 store_constructor (exp, target);
3422 return target;
3423 }
3424
3425 case INDIRECT_REF:
3426 {
3427 tree exp1 = TREE_OPERAND (exp, 0);
3428 tree exp2;
3429
3430 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3431 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3432 This code has the same general effect as simply doing
3433 expand_expr on the save expr, except that the expression PTR
3434 is computed for use as a memory address. This means different
3435 code, suitable for indexing, may be generated. */
3436 if (TREE_CODE (exp1) == SAVE_EXPR
3437 && SAVE_EXPR_RTL (exp1) == 0
3438 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3439 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3440 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3441 {
906c4e36
RK
3442 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3443 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3444 op0 = memory_address (mode, temp);
3445 op0 = copy_all_regs (op0);
3446 SAVE_EXPR_RTL (exp1) = op0;
3447 }
3448 else
3449 {
906c4e36 3450 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3451 op0 = memory_address (mode, op0);
3452 }
8c8a8e34
JW
3453
3454 temp = gen_rtx (MEM, mode, op0);
3455 /* If address was computed by addition,
3456 mark this as an element of an aggregate. */
3457 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3458 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3459 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3460 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3461 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3462 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3463 || (TREE_CODE (exp1) == ADDR_EXPR
3464 && (exp2 = TREE_OPERAND (exp1, 0))
3465 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3466 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3467 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3468 MEM_IN_STRUCT_P (temp) = 1;
3469 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
bbf6f052
RK
3470#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3471 a location is accessed through a pointer to const does not mean
3472 that the value there can never change. */
8c8a8e34 3473 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3474#endif
8c8a8e34
JW
3475 return temp;
3476 }
bbf6f052
RK
3477
3478 case ARRAY_REF:
3479 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3480 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3481 {
3482 /* Nonconstant array index or nonconstant element size.
3483 Generate the tree for *(&array+index) and expand that,
3484 except do it in a language-independent way
3485 and don't complain about non-lvalue arrays.
3486 `mark_addressable' should already have been called
3487 for any array for which this case will be reached. */
3488
3489 /* Don't forget the const or volatile flag from the array element. */
3490 tree variant_type = build_type_variant (type,
3491 TREE_READONLY (exp),
3492 TREE_THIS_VOLATILE (exp));
3493 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3494 TREE_OPERAND (exp, 0));
3495 tree index = TREE_OPERAND (exp, 1);
3496 tree elt;
3497
3498 /* Convert the integer argument to a type the same size as a pointer
3499 so the multiply won't overflow spuriously. */
3500 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3501 index = convert (type_for_size (POINTER_SIZE, 0), index);
3502
3503 /* Don't think the address has side effects
3504 just because the array does.
3505 (In some cases the address might have side effects,
3506 and we fail to record that fact here. However, it should not
3507 matter, since expand_expr should not care.) */
3508 TREE_SIDE_EFFECTS (array_adr) = 0;
3509
3510 elt = build1 (INDIRECT_REF, type,
3511 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3512 array_adr,
3513 fold (build (MULT_EXPR,
3514 TYPE_POINTER_TO (variant_type),
3515 index, size_in_bytes (type))))));
3516
3517 /* Volatility, etc., of new expression is same as old expression. */
3518 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3519 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3520 TREE_READONLY (elt) = TREE_READONLY (exp);
3521
3522 return expand_expr (elt, target, tmode, modifier);
3523 }
3524
3525 /* Fold an expression like: "foo"[2].
3526 This is not done in fold so it won't happen inside &. */
3527 {
3528 int i;
3529 tree arg0 = TREE_OPERAND (exp, 0);
3530 tree arg1 = TREE_OPERAND (exp, 1);
3531
3532 if (TREE_CODE (arg0) == STRING_CST
3533 && TREE_CODE (arg1) == INTEGER_CST
3534 && !TREE_INT_CST_HIGH (arg1)
3535 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3536 {
3537 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3538 {
3539 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3540 TREE_TYPE (exp) = integer_type_node;
3541 return expand_expr (exp, target, tmode, modifier);
3542 }
3543 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3544 {
3545 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3546 TREE_TYPE (exp) = integer_type_node;
3547 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3548 }
3549 }
3550 }
3551
3552 /* If this is a constant index into a constant array,
4af3895e
JVA
3553 just get the value from the array. Handle both the cases when
3554 we have an explicit constructor and when our operand is a variable
3555 that was declared const. */
3556
3557 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3558 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3559 {
3560 tree index = fold (TREE_OPERAND (exp, 1));
3561 if (TREE_CODE (index) == INTEGER_CST
3562 && TREE_INT_CST_HIGH (index) == 0)
3563 {
3564 int i = TREE_INT_CST_LOW (index);
3565 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3566
3567 while (elem && i--)
3568 elem = TREE_CHAIN (elem);
3569 if (elem)
3570 return expand_expr (fold (TREE_VALUE (elem)), target,
3571 tmode, modifier);
3572 }
3573 }
3574
3575 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3576 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3577 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3578 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3579 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3580 && optimize >= 1
3581 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3582 != ERROR_MARK))
bbf6f052
RK
3583 {
3584 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3585 if (TREE_CODE (index) == INTEGER_CST
3586 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3587 {
3588 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3589 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3590
8c8a8e34
JW
3591 if (TREE_CODE (init) == CONSTRUCTOR)
3592 {
3593 tree elem = CONSTRUCTOR_ELTS (init);
3594
3595 while (elem && i--)
3596 elem = TREE_CHAIN (elem);
3597 if (elem)
3598 return expand_expr (fold (TREE_VALUE (elem)), target,
3599 tmode, modifier);
3600 }
3601 else if (TREE_CODE (init) == STRING_CST
3602 && i < TREE_STRING_LENGTH (init))
3603 {
906c4e36 3604 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
8c8a8e34
JW
3605 return convert_to_mode (mode, temp, 0);
3606 }
bbf6f052
RK
3607 }
3608 }
3609 /* Treat array-ref with constant index as a component-ref. */
3610
3611 case COMPONENT_REF:
3612 case BIT_FIELD_REF:
4af3895e
JVA
3613 /* If the operand is a CONSTRUCTOR, we can just extract the
3614 appropriate field if it is present. */
3615 if (code != ARRAY_REF
3616 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3617 {
3618 tree elt;
3619
3620 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3621 elt = TREE_CHAIN (elt))
3622 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3623 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3624 }
3625
bbf6f052
RK
3626 {
3627 enum machine_mode mode1;
3628 int bitsize;
3629 int bitpos;
7bb0943f 3630 tree offset;
bbf6f052 3631 int volatilep = 0;
7bb0943f 3632 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3633 &mode1, &unsignedp, &volatilep);
3634
3635 /* In some cases, we will be offsetting OP0's address by a constant.
3636 So get it as a sum, if possible. If we will be using it
3637 directly in an insn, we validate it. */
906c4e36 3638 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3639
8c8a8e34
JW
3640 /* If this is a constant, put it into a register if it is a
3641 legimate constant and memory if it isn't. */
3642 if (CONSTANT_P (op0))
3643 {
3644 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3645 if (LEGITIMATE_CONSTANT_P (op0))
3646 op0 = force_reg (mode, op0);
3647 else
3648 op0 = validize_mem (force_const_mem (mode, op0));
3649 }
3650
7bb0943f
RS
3651 if (offset != 0)
3652 {
906c4e36 3653 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3654
3655 if (GET_CODE (op0) != MEM)
3656 abort ();
3657 op0 = change_address (op0, VOIDmode,
3658 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3659 force_reg (Pmode, offset_rtx)));
3660 }
3661
bbf6f052
RK
3662 /* Don't forget about volatility even if this is a bitfield. */
3663 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3664 {
3665 op0 = copy_rtx (op0);
3666 MEM_VOLATILE_P (op0) = 1;
3667 }
3668
3669 if (mode1 == VOIDmode
0bba3f6f
RK
3670 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3671 && modifier != EXPAND_CONST_ADDRESS
3672 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
3673 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3674 {
3675 /* In cases where an aligned union has an unaligned object
3676 as a field, we might be extracting a BLKmode value from
3677 an integer-mode (e.g., SImode) object. Handle this case
3678 by doing the extract into an object as wide as the field
3679 (which we know to be the width of a basic mode), then
3680 storing into memory, and changing the mode to BLKmode. */
3681 enum machine_mode ext_mode = mode;
3682
3683 if (ext_mode == BLKmode)
3684 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3685
3686 if (ext_mode == BLKmode)
3687 abort ();
3688
3689 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3690 unsignedp, target, ext_mode, ext_mode,
3691 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3692 int_size_in_bytes (TREE_TYPE (tem)));
3693 if (mode == BLKmode)
3694 {
3695 rtx new = assign_stack_temp (ext_mode,
3696 bitsize / BITS_PER_UNIT, 0);
3697
3698 emit_move_insn (new, op0);
3699 op0 = copy_rtx (new);
3700 PUT_MODE (op0, BLKmode);
3701 }
3702
3703 return op0;
3704 }
3705
3706 /* Get a reference to just this component. */
3707 if (modifier == EXPAND_CONST_ADDRESS
3708 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3709 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3710 (bitpos / BITS_PER_UNIT)));
3711 else
3712 op0 = change_address (op0, mode1,
3713 plus_constant (XEXP (op0, 0),
3714 (bitpos / BITS_PER_UNIT)));
3715 MEM_IN_STRUCT_P (op0) = 1;
3716 MEM_VOLATILE_P (op0) |= volatilep;
3717 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3718 return op0;
3719 if (target == 0)
3720 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3721 convert_move (target, op0, unsignedp);
3722 return target;
3723 }
3724
3725 case OFFSET_REF:
3726 {
3727 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3728 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 3729 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3730 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3731 MEM_IN_STRUCT_P (temp) = 1;
3732 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3733#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3734 a location is accessed through a pointer to const does not mean
3735 that the value there can never change. */
3736 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3737#endif
3738 return temp;
3739 }
3740
3741 /* Intended for a reference to a buffer of a file-object in Pascal.
3742 But it's not certain that a special tree code will really be
3743 necessary for these. INDIRECT_REF might work for them. */
3744 case BUFFER_REF:
3745 abort ();
3746
3747 case WITH_CLEANUP_EXPR:
3748 if (RTL_EXPR_RTL (exp) == 0)
3749 {
3750 RTL_EXPR_RTL (exp)
3751 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
3752 cleanups_this_call
3753 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
3754 /* That's it for this cleanup. */
3755 TREE_OPERAND (exp, 2) = 0;
3756 }
3757 return RTL_EXPR_RTL (exp);
3758
3759 case CALL_EXPR:
3760 /* Check for a built-in function. */
3761 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3762 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3763 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3764 return expand_builtin (exp, target, subtarget, tmode, ignore);
3765 /* If this call was expanded already by preexpand_calls,
3766 just return the result we got. */
3767 if (CALL_EXPR_RTL (exp) != 0)
3768 return CALL_EXPR_RTL (exp);
8129842c 3769 return expand_call (exp, target, ignore);
bbf6f052
RK
3770
3771 case NON_LVALUE_EXPR:
3772 case NOP_EXPR:
3773 case CONVERT_EXPR:
3774 case REFERENCE_EXPR:
3775 if (TREE_CODE (type) == VOID_TYPE || ignore)
3776 {
3777 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3778 return const0_rtx;
3779 }
3780 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3781 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3782 if (TREE_CODE (type) == UNION_TYPE)
3783 {
3784 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3785 if (target == 0)
3786 {
3787 if (mode == BLKmode)
3788 {
3789 if (TYPE_SIZE (type) == 0
3790 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3791 abort ();
3792 target = assign_stack_temp (BLKmode,
3793 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3794 + BITS_PER_UNIT - 1)
3795 / BITS_PER_UNIT, 0);
3796 }
3797 else
3798 target = gen_reg_rtx (mode);
3799 }
3800 if (GET_CODE (target) == MEM)
3801 /* Store data into beginning of memory target. */
3802 store_expr (TREE_OPERAND (exp, 0),
906c4e36
RK
3803 change_address (target, TYPE_MODE (valtype), 0),
3804 NULL_RTX);
bbf6f052
RK
3805 else if (GET_CODE (target) == REG)
3806 /* Store this field into a union of the proper type. */
3807 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3808 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3809 VOIDmode, 0, 1,
3810 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3811 else
3812 abort ();
3813
3814 /* Return the entire union. */
3815 return target;
3816 }
26fcb35a 3817 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
bbf6f052
RK
3818 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3819 return op0;
26fcb35a
RS
3820 if (modifier == EXPAND_INITIALIZER)
3821 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
3822 if (flag_force_mem && GET_CODE (op0) == MEM)
3823 op0 = copy_to_reg (op0);
3824
3825 if (target == 0)
3826 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3827 else
3828 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3829 return target;
3830
3831 case PLUS_EXPR:
3832 /* We come here from MINUS_EXPR when the second operand is a constant. */
3833 plus_expr:
3834 this_optab = add_optab;
3835
3836 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3837 something else, make sure we add the register to the constant and
3838 then to the other thing. This case can occur during strength
3839 reduction and doing it this way will produce better code if the
3840 frame pointer or argument pointer is eliminated.
3841
3842 fold-const.c will ensure that the constant is always in the inner
3843 PLUS_EXPR, so the only case we need to do anything about is if
3844 sp, ap, or fp is our second argument, in which case we must swap
3845 the innermost first argument and our second argument. */
3846
3847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3848 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3849 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3850 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3851 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3852 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3853 {
3854 tree t = TREE_OPERAND (exp, 1);
3855
3856 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3857 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3858 }
3859
3860 /* If the result is to be Pmode and we are adding an integer to
3861 something, we might be forming a constant. So try to use
3862 plus_constant. If it produces a sum and we can't accept it,
3863 use force_operand. This allows P = &ARR[const] to generate
3864 efficient code on machines where a SYMBOL_REF is not a valid
3865 address.
3866
3867 If this is an EXPAND_SUM call, always return the sum. */
3868 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 3869 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
3870 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3871 || mode == Pmode))
3872 {
3873 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3874 EXPAND_SUM);
3875 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3876 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3877 op1 = force_operand (op1, target);
3878 return op1;
3879 }
3880
3881 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3882 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3883 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3884 || mode == Pmode))
3885 {
3886 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3887 EXPAND_SUM);
3888 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3889 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3890 op0 = force_operand (op0, target);
3891 return op0;
3892 }
3893
3894 /* No sense saving up arithmetic to be done
3895 if it's all in the wrong mode to form part of an address.
3896 And force_operand won't know whether to sign-extend or
3897 zero-extend. */
3898 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3899 || mode != Pmode) goto binop;
3900
3901 preexpand_calls (exp);
3902 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3903 subtarget = 0;
3904
3905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 3906 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
3907
3908 /* Make sure any term that's a sum with a constant comes last. */
3909 if (GET_CODE (op0) == PLUS
3910 && CONSTANT_P (XEXP (op0, 1)))
3911 {
3912 temp = op0;
3913 op0 = op1;
3914 op1 = temp;
3915 }
3916 /* If adding to a sum including a constant,
3917 associate it to put the constant outside. */
3918 if (GET_CODE (op1) == PLUS
3919 && CONSTANT_P (XEXP (op1, 1)))
3920 {
3921 rtx constant_term = const0_rtx;
3922
3923 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3924 if (temp != 0)
3925 op0 = temp;
6f90e075
JW
3926 /* Ensure that MULT comes first if there is one. */
3927 else if (GET_CODE (op0) == MULT)
3928 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
3929 else
3930 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3931
3932 /* Let's also eliminate constants from op0 if possible. */
3933 op0 = eliminate_constant_term (op0, &constant_term);
3934
3935 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3936 their sum should be a constant. Form it into OP1, since the
3937 result we want will then be OP0 + OP1. */
3938
3939 temp = simplify_binary_operation (PLUS, mode, constant_term,
3940 XEXP (op1, 1));
3941 if (temp != 0)
3942 op1 = temp;
3943 else
3944 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3945 }
3946
3947 /* Put a constant term last and put a multiplication first. */
3948 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3949 temp = op1, op1 = op0, op0 = temp;
3950
3951 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3952 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3953
3954 case MINUS_EXPR:
3955 /* Handle difference of two symbolic constants,
3956 for the sake of an initializer. */
3957 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3958 && really_constant_p (TREE_OPERAND (exp, 0))
3959 && really_constant_p (TREE_OPERAND (exp, 1)))
3960 {
906c4e36
RK
3961 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3962 VOIDmode, modifier);
3963 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3964 VOIDmode, modifier);
bbf6f052
RK
3965 return gen_rtx (MINUS, mode, op0, op1);
3966 }
3967 /* Convert A - const to A + (-const). */
3968 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3969 {
3970 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3971 fold (build1 (NEGATE_EXPR, type,
3972 TREE_OPERAND (exp, 1))));
3973 goto plus_expr;
3974 }
3975 this_optab = sub_optab;
3976 goto binop;
3977
3978 case MULT_EXPR:
3979 preexpand_calls (exp);
3980 /* If first operand is constant, swap them.
3981 Thus the following special case checks need only
3982 check the second operand. */
3983 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3984 {
3985 register tree t1 = TREE_OPERAND (exp, 0);
3986 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3987 TREE_OPERAND (exp, 1) = t1;
3988 }
3989
3990 /* Attempt to return something suitable for generating an
3991 indexed address, for machines that support that. */
3992
3993 if (modifier == EXPAND_SUM && mode == Pmode
3994 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 3995 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
3996 {
3997 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3998
3999 /* Apply distributive law if OP0 is x+c. */
4000 if (GET_CODE (op0) == PLUS
4001 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4002 return gen_rtx (PLUS, mode,
4003 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4004 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4005 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4006 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4007
4008 if (GET_CODE (op0) != REG)
906c4e36 4009 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4010 if (GET_CODE (op0) != REG)
4011 op0 = copy_to_mode_reg (mode, op0);
4012
4013 return gen_rtx (MULT, mode, op0,
906c4e36 4014 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4015 }
4016
4017 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4018 subtarget = 0;
4019
4020 /* Check for multiplying things that have been extended
4021 from a narrower type. If this machine supports multiplying
4022 in that narrower type with a result in the desired type,
4023 do it that way, and avoid the explicit type-conversion. */
4024 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4025 && TREE_CODE (type) == INTEGER_TYPE
4026 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4027 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4028 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4029 && int_fits_type_p (TREE_OPERAND (exp, 1),
4030 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4031 /* Don't use a widening multiply if a shift will do. */
4032 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4033 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4034 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4035 ||
4036 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4037 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4038 ==
4039 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4040 /* If both operands are extended, they must either both
4041 be zero-extended or both be sign-extended. */
4042 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4043 ==
4044 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4045 {
4046 enum machine_mode innermode
4047 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4048 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4049 ? umul_widen_optab : smul_widen_optab);
4050 if (mode == GET_MODE_WIDER_MODE (innermode)
4051 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4052 {
4053 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4054 NULL_RTX, VOIDmode, 0);
bbf6f052 4055 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4056 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4057 VOIDmode, 0);
bbf6f052
RK
4058 else
4059 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4060 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4061 goto binop2;
4062 }
4063 }
4064 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4065 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4066 return expand_mult (mode, op0, op1, target, unsignedp);
4067
4068 case TRUNC_DIV_EXPR:
4069 case FLOOR_DIV_EXPR:
4070 case CEIL_DIV_EXPR:
4071 case ROUND_DIV_EXPR:
4072 case EXACT_DIV_EXPR:
4073 preexpand_calls (exp);
4074 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4075 subtarget = 0;
4076 /* Possible optimization: compute the dividend with EXPAND_SUM
4077 then if the divisor is constant can optimize the case
4078 where some terms of the dividend have coeffs divisible by it. */
4079 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4080 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4081 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4082
4083 case RDIV_EXPR:
4084 this_optab = flodiv_optab;
4085 goto binop;
4086
4087 case TRUNC_MOD_EXPR:
4088 case FLOOR_MOD_EXPR:
4089 case CEIL_MOD_EXPR:
4090 case ROUND_MOD_EXPR:
4091 preexpand_calls (exp);
4092 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4093 subtarget = 0;
4094 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4095 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4096 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4097
4098 case FIX_ROUND_EXPR:
4099 case FIX_FLOOR_EXPR:
4100 case FIX_CEIL_EXPR:
4101 abort (); /* Not used for C. */
4102
4103 case FIX_TRUNC_EXPR:
906c4e36 4104 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4105 if (target == 0)
4106 target = gen_reg_rtx (mode);
4107 expand_fix (target, op0, unsignedp);
4108 return target;
4109
4110 case FLOAT_EXPR:
906c4e36 4111 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4112 if (target == 0)
4113 target = gen_reg_rtx (mode);
4114 /* expand_float can't figure out what to do if FROM has VOIDmode.
4115 So give it the correct mode. With -O, cse will optimize this. */
4116 if (GET_MODE (op0) == VOIDmode)
4117 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4118 op0);
4119 expand_float (target, op0,
4120 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4121 return target;
4122
4123 case NEGATE_EXPR:
4124 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4125 temp = expand_unop (mode, neg_optab, op0, target, 0);
4126 if (temp == 0)
4127 abort ();
4128 return temp;
4129
4130 case ABS_EXPR:
4131 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4132
4133 /* Unsigned abs is simply the operand. Testing here means we don't
4134 risk generating incorrect code below. */
4135 if (TREE_UNSIGNED (type))
4136 return op0;
4137
4138 /* First try to do it with a special abs instruction. */
4139 temp = expand_unop (mode, abs_optab, op0, target, 0);
4140 if (temp != 0)
4141 return temp;
4142
4143 /* If this machine has expensive jumps, we can do integer absolute
4144 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4145 where W is the width of MODE. */
4146
4147 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4148 {
4149 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4150 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4151 NULL_RTX, 0);
bbf6f052
RK
4152
4153 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4154 OPTAB_LIB_WIDEN);
4155 if (temp != 0)
4156 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4157 OPTAB_LIB_WIDEN);
4158
4159 if (temp != 0)
4160 return temp;
4161 }
4162
4163 /* If that does not win, use conditional jump and negate. */
4164 target = original_target;
4165 temp = gen_label_rtx ();
4166 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4167 || (GET_CODE (target) == REG
4168 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4169 target = gen_reg_rtx (mode);
4170 emit_move_insn (target, op0);
4171 emit_cmp_insn (target,
4172 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4173 NULL_RTX, VOIDmode, 0),
4174 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4175 NO_DEFER_POP;
4176 emit_jump_insn (gen_bge (temp));
4177 op0 = expand_unop (mode, neg_optab, target, target, 0);
4178 if (op0 != target)
4179 emit_move_insn (target, op0);
4180 emit_label (temp);
4181 OK_DEFER_POP;
4182 return target;
4183
4184 case MAX_EXPR:
4185 case MIN_EXPR:
4186 target = original_target;
4187 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4188 || (GET_CODE (target) == REG
4189 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4190 target = gen_reg_rtx (mode);
906c4e36 4191 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4192 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4193
4194 /* First try to do it with a special MIN or MAX instruction.
4195 If that does not win, use a conditional jump to select the proper
4196 value. */
4197 this_optab = (TREE_UNSIGNED (type)
4198 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4199 : (code == MIN_EXPR ? smin_optab : smax_optab));
4200
4201 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4202 OPTAB_WIDEN);
4203 if (temp != 0)
4204 return temp;
4205
4206 if (target != op0)
4207 emit_move_insn (target, op0);
4208 op0 = gen_label_rtx ();
4209 if (code == MAX_EXPR)
4210 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4211 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4212 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4213 else
4214 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4215 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4216 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4217 if (temp == const0_rtx)
4218 emit_move_insn (target, op1);
4219 else if (temp != const_true_rtx)
4220 {
4221 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4222 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4223 else
4224 abort ();
4225 emit_move_insn (target, op1);
4226 }
4227 emit_label (op0);
4228 return target;
4229
4230/* ??? Can optimize when the operand of this is a bitwise operation,
4231 by using a different bitwise operation. */
4232 case BIT_NOT_EXPR:
4233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4234 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4235 if (temp == 0)
4236 abort ();
4237 return temp;
4238
4239 case FFS_EXPR:
4240 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4241 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4242 if (temp == 0)
4243 abort ();
4244 return temp;
4245
4246/* ??? Can optimize bitwise operations with one arg constant.
4247 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4248 and (a bitwise1 b) bitwise2 b (etc)
4249 but that is probably not worth while. */
4250
4251/* BIT_AND_EXPR is for bitwise anding.
4252 TRUTH_AND_EXPR is for anding two boolean values
4253 when we want in all cases to compute both of them.
4254 In general it is fastest to do TRUTH_AND_EXPR by
4255 computing both operands as actual zero-or-1 values
4256 and then bitwise anding. In cases where there cannot
4257 be any side effects, better code would be made by
4258 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4259 but the question is how to recognize those cases. */
4260
4261 case TRUTH_AND_EXPR:
4262 case BIT_AND_EXPR:
4263 this_optab = and_optab;
4264 goto binop;
4265
4266/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4267 case TRUTH_OR_EXPR:
4268 case BIT_IOR_EXPR:
4269 this_optab = ior_optab;
4270 goto binop;
4271
4272 case BIT_XOR_EXPR:
4273 this_optab = xor_optab;
4274 goto binop;
4275
4276 case LSHIFT_EXPR:
4277 case RSHIFT_EXPR:
4278 case LROTATE_EXPR:
4279 case RROTATE_EXPR:
4280 preexpand_calls (exp);
4281 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4282 subtarget = 0;
4283 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4284 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4285 unsignedp);
4286
4287/* Could determine the answer when only additive constants differ.
4288 Also, the addition of one can be handled by changing the condition. */
4289 case LT_EXPR:
4290 case LE_EXPR:
4291 case GT_EXPR:
4292 case GE_EXPR:
4293 case EQ_EXPR:
4294 case NE_EXPR:
4295 preexpand_calls (exp);
4296 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4297 if (temp != 0)
4298 return temp;
4299 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4300 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4301 && original_target
4302 && GET_CODE (original_target) == REG
4303 && (GET_MODE (original_target)
4304 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4305 {
4306 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4307 if (temp != original_target)
4308 temp = copy_to_reg (temp);
4309 op1 = gen_label_rtx ();
906c4e36 4310 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4311 GET_MODE (temp), unsignedp, 0);
4312 emit_jump_insn (gen_beq (op1));
4313 emit_move_insn (temp, const1_rtx);
4314 emit_label (op1);
4315 return temp;
4316 }
4317 /* If no set-flag instruction, must generate a conditional
4318 store into a temporary variable. Drop through
4319 and handle this like && and ||. */
4320
4321 case TRUTH_ANDIF_EXPR:
4322 case TRUTH_ORIF_EXPR:
4323 if (target == 0 || ! safe_from_p (target, exp)
4324 /* Make sure we don't have a hard reg (such as function's return
4325 value) live across basic blocks, if not optimizing. */
4326 || (!optimize && GET_CODE (target) == REG
4327 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4328 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4329 emit_clr_insn (target);
4330 op1 = gen_label_rtx ();
4331 jumpifnot (exp, op1);
4332 emit_0_to_1_insn (target);
4333 emit_label (op1);
4334 return target;
4335
4336 case TRUTH_NOT_EXPR:
4337 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4338 /* The parser is careful to generate TRUTH_NOT_EXPR
4339 only with operands that are always zero or one. */
906c4e36 4340 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4341 target, 1, OPTAB_LIB_WIDEN);
4342 if (temp == 0)
4343 abort ();
4344 return temp;
4345
4346 case COMPOUND_EXPR:
4347 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4348 emit_queue ();
4349 return expand_expr (TREE_OPERAND (exp, 1),
4350 (ignore ? const0_rtx : target),
4351 VOIDmode, 0);
4352
4353 case COND_EXPR:
4354 {
4355 /* Note that COND_EXPRs whose type is a structure or union
4356 are required to be constructed to contain assignments of
4357 a temporary variable, so that we can evaluate them here
4358 for side effect only. If type is void, we must do likewise. */
4359
4360 /* If an arm of the branch requires a cleanup,
4361 only that cleanup is performed. */
4362
4363 tree singleton = 0;
4364 tree binary_op = 0, unary_op = 0;
4365 tree old_cleanups = cleanups_this_call;
4366 cleanups_this_call = 0;
4367
4368 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4369 convert it to our mode, if necessary. */
4370 if (integer_onep (TREE_OPERAND (exp, 1))
4371 && integer_zerop (TREE_OPERAND (exp, 2))
4372 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4373 {
4374 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4375 if (GET_MODE (op0) == mode)
4376 return op0;
4377 if (target == 0)
4378 target = gen_reg_rtx (mode);
4379 convert_move (target, op0, unsignedp);
4380 return target;
4381 }
4382
4383 /* If we are not to produce a result, we have no target. Otherwise,
4384 if a target was specified use it; it will not be used as an
4385 intermediate target unless it is safe. If no target, use a
4386 temporary. */
4387
4388 if (mode == VOIDmode || ignore)
4389 temp = 0;
4390 else if (original_target
4391 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4392 temp = original_target;
4393 else if (mode == BLKmode)
4394 {
4395 if (TYPE_SIZE (type) == 0
4396 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4397 abort ();
4398 temp = assign_stack_temp (BLKmode,
4399 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4400 + BITS_PER_UNIT - 1)
4401 / BITS_PER_UNIT, 0);
4402 }
4403 else
4404 temp = gen_reg_rtx (mode);
4405
4406 /* Check for X ? A + B : A. If we have this, we can copy
4407 A to the output and conditionally add B. Similarly for unary
4408 operations. Don't do this if X has side-effects because
4409 those side effects might affect A or B and the "?" operation is
4410 a sequence point in ANSI. (We test for side effects later.) */
4411
4412 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4413 && operand_equal_p (TREE_OPERAND (exp, 2),
4414 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4415 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4416 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4417 && operand_equal_p (TREE_OPERAND (exp, 1),
4418 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4419 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4420 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4421 && operand_equal_p (TREE_OPERAND (exp, 2),
4422 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4423 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4424 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4425 && operand_equal_p (TREE_OPERAND (exp, 1),
4426 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4427 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4428
4429 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4430 operation, do this as A + (X != 0). Similarly for other simple
4431 binary operators. */
4432 if (singleton && binary_op
4433 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4434 && (TREE_CODE (binary_op) == PLUS_EXPR
4435 || TREE_CODE (binary_op) == MINUS_EXPR
4436 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4437 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4438 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4439 && integer_onep (TREE_OPERAND (binary_op, 1))
4440 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4441 {
4442 rtx result;
4443 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4444 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4445 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4446 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4447 : and_optab);
4448
4449 /* If we had X ? A : A + 1, do this as A + (X == 0).
4450
4451 We have to invert the truth value here and then put it
4452 back later if do_store_flag fails. We cannot simply copy
4453 TREE_OPERAND (exp, 0) to another variable and modify that
4454 because invert_truthvalue can modify the tree pointed to
4455 by its argument. */
4456 if (singleton == TREE_OPERAND (exp, 1))
4457 TREE_OPERAND (exp, 0)
4458 = invert_truthvalue (TREE_OPERAND (exp, 0));
4459
4460 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4461 (safe_from_p (temp, singleton)
4462 ? temp : NULL_RTX),
bbf6f052
RK
4463 mode, BRANCH_COST <= 1);
4464
4465 if (result)
4466 {
906c4e36 4467 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4468 return expand_binop (mode, boptab, op1, result, temp,
4469 unsignedp, OPTAB_LIB_WIDEN);
4470 }
4471 else if (singleton == TREE_OPERAND (exp, 1))
4472 TREE_OPERAND (exp, 0)
4473 = invert_truthvalue (TREE_OPERAND (exp, 0));
4474 }
4475
4476 NO_DEFER_POP;
4477 op0 = gen_label_rtx ();
4478
4479 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4480 {
4481 if (temp != 0)
4482 {
4483 /* If the target conflicts with the other operand of the
4484 binary op, we can't use it. Also, we can't use the target
4485 if it is a hard register, because evaluating the condition
4486 might clobber it. */
4487 if ((binary_op
4488 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4489 || (GET_CODE (temp) == REG
4490 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4491 temp = gen_reg_rtx (mode);
4492 store_expr (singleton, temp, 0);
4493 }
4494 else
906c4e36
RK
4495 expand_expr (singleton,
4496 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4497 if (cleanups_this_call)
4498 {
4499 sorry ("aggregate value in COND_EXPR");
4500 cleanups_this_call = 0;
4501 }
4502 if (singleton == TREE_OPERAND (exp, 1))
4503 jumpif (TREE_OPERAND (exp, 0), op0);
4504 else
4505 jumpifnot (TREE_OPERAND (exp, 0), op0);
4506
4507 if (binary_op && temp == 0)
4508 /* Just touch the other operand. */
4509 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 4510 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4511 else if (binary_op)
4512 store_expr (build (TREE_CODE (binary_op), type,
4513 make_tree (type, temp),
4514 TREE_OPERAND (binary_op, 1)),
4515 temp, 0);
4516 else
4517 store_expr (build1 (TREE_CODE (unary_op), type,
4518 make_tree (type, temp)),
4519 temp, 0);
4520 op1 = op0;
4521 }
4522#if 0
4523 /* This is now done in jump.c and is better done there because it
4524 produces shorter register lifetimes. */
4525
4526 /* Check for both possibilities either constants or variables
4527 in registers (but not the same as the target!). If so, can
4528 save branches by assigning one, branching, and assigning the
4529 other. */
4530 else if (temp && GET_MODE (temp) != BLKmode
4531 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4532 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4533 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4534 && DECL_RTL (TREE_OPERAND (exp, 1))
4535 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4536 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4537 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4538 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4539 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4540 && DECL_RTL (TREE_OPERAND (exp, 2))
4541 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4542 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4543 {
4544 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4545 temp = gen_reg_rtx (mode);
4546 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4547 jumpifnot (TREE_OPERAND (exp, 0), op0);
4548 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4549 op1 = op0;
4550 }
4551#endif
4552 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4553 comparison operator. If we have one of these cases, set the
4554 output to A, branch on A (cse will merge these two references),
4555 then set the output to FOO. */
4556 else if (temp
4557 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4558 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4559 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4560 TREE_OPERAND (exp, 1), 0)
4561 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4562 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4563 {
4564 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4565 temp = gen_reg_rtx (mode);
4566 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4567 jumpif (TREE_OPERAND (exp, 0), op0);
4568 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4569 op1 = op0;
4570 }
4571 else if (temp
4572 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4573 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4574 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4575 TREE_OPERAND (exp, 2), 0)
4576 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4577 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4578 {
4579 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4580 temp = gen_reg_rtx (mode);
4581 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4582 jumpifnot (TREE_OPERAND (exp, 0), op0);
4583 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4584 op1 = op0;
4585 }
4586 else
4587 {
4588 op1 = gen_label_rtx ();
4589 jumpifnot (TREE_OPERAND (exp, 0), op0);
4590 if (temp != 0)
4591 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4592 else
906c4e36
RK
4593 expand_expr (TREE_OPERAND (exp, 1),
4594 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4595 if (cleanups_this_call)
4596 {
4597 sorry ("aggregate value in COND_EXPR");
4598 cleanups_this_call = 0;
4599 }
4600
4601 emit_queue ();
4602 emit_jump_insn (gen_jump (op1));
4603 emit_barrier ();
4604 emit_label (op0);
4605 if (temp != 0)
4606 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4607 else
906c4e36
RK
4608 expand_expr (TREE_OPERAND (exp, 2),
4609 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4610 }
4611
4612 if (cleanups_this_call)
4613 {
4614 sorry ("aggregate value in COND_EXPR");
4615 cleanups_this_call = 0;
4616 }
4617
4618 emit_queue ();
4619 emit_label (op1);
4620 OK_DEFER_POP;
4621 cleanups_this_call = old_cleanups;
4622 return temp;
4623 }
4624
4625 case TARGET_EXPR:
4626 {
4627 /* Something needs to be initialized, but we didn't know
4628 where that thing was when building the tree. For example,
4629 it could be the return value of a function, or a parameter
4630 to a function which lays down in the stack, or a temporary
4631 variable which must be passed by reference.
4632
4633 We guarantee that the expression will either be constructed
4634 or copied into our original target. */
4635
4636 tree slot = TREE_OPERAND (exp, 0);
4637
4638 if (TREE_CODE (slot) != VAR_DECL)
4639 abort ();
4640
4641 if (target == 0)
4642 {
4643 if (DECL_RTL (slot) != 0)
4644 target = DECL_RTL (slot);
4645 else
4646 {
4647 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4648 /* All temp slots at this level must not conflict. */
4649 preserve_temp_slots (target);
4650 DECL_RTL (slot) = target;
4651 }
4652
4653#if 0
4654 /* Since SLOT is not known to the called function
4655 to belong to its stack frame, we must build an explicit
4656 cleanup. This case occurs when we must build up a reference
4657 to pass the reference as an argument. In this case,
4658 it is very likely that such a reference need not be
4659 built here. */
4660
4661 if (TREE_OPERAND (exp, 2) == 0)
4662 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4663 if (TREE_OPERAND (exp, 2))
906c4e36
RK
4664 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4665 cleanups_this_call);
bbf6f052
RK
4666#endif
4667 }
4668 else
4669 {
4670 /* This case does occur, when expanding a parameter which
4671 needs to be constructed on the stack. The target
4672 is the actual stack address that we want to initialize.
4673 The function we call will perform the cleanup in this case. */
4674
4675 DECL_RTL (slot) = target;
4676 }
4677
4678 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4679 }
4680
4681 case INIT_EXPR:
4682 {
4683 tree lhs = TREE_OPERAND (exp, 0);
4684 tree rhs = TREE_OPERAND (exp, 1);
4685 tree noncopied_parts = 0;
4686 tree lhs_type = TREE_TYPE (lhs);
4687
4688 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4689 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4690 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4691 TYPE_NONCOPIED_PARTS (lhs_type));
4692 while (noncopied_parts != 0)
4693 {
4694 expand_assignment (TREE_VALUE (noncopied_parts),
4695 TREE_PURPOSE (noncopied_parts), 0, 0);
4696 noncopied_parts = TREE_CHAIN (noncopied_parts);
4697 }
4698 return temp;
4699 }
4700
4701 case MODIFY_EXPR:
4702 {
4703 /* If lhs is complex, expand calls in rhs before computing it.
4704 That's so we don't compute a pointer and save it over a call.
4705 If lhs is simple, compute it first so we can give it as a
4706 target if the rhs is just a call. This avoids an extra temp and copy
4707 and that prevents a partial-subsumption which makes bad code.
4708 Actually we could treat component_ref's of vars like vars. */
4709
4710 tree lhs = TREE_OPERAND (exp, 0);
4711 tree rhs = TREE_OPERAND (exp, 1);
4712 tree noncopied_parts = 0;
4713 tree lhs_type = TREE_TYPE (lhs);
4714
4715 temp = 0;
4716
4717 if (TREE_CODE (lhs) != VAR_DECL
4718 && TREE_CODE (lhs) != RESULT_DECL
4719 && TREE_CODE (lhs) != PARM_DECL)
4720 preexpand_calls (exp);
4721
4722 /* Check for |= or &= of a bitfield of size one into another bitfield
4723 of size 1. In this case, (unless we need the result of the
4724 assignment) we can do this more efficiently with a
4725 test followed by an assignment, if necessary.
4726
4727 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4728 things change so we do, this code should be enhanced to
4729 support it. */
4730 if (ignore
4731 && TREE_CODE (lhs) == COMPONENT_REF
4732 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4733 || TREE_CODE (rhs) == BIT_AND_EXPR)
4734 && TREE_OPERAND (rhs, 0) == lhs
4735 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4736 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4737 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4738 {
4739 rtx label = gen_label_rtx ();
4740
4741 do_jump (TREE_OPERAND (rhs, 1),
4742 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4743 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4744 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4745 (TREE_CODE (rhs) == BIT_IOR_EXPR
4746 ? integer_one_node
4747 : integer_zero_node)),
4748 0, 0);
e7c33f54 4749 do_pending_stack_adjust ();
bbf6f052
RK
4750 emit_label (label);
4751 return const0_rtx;
4752 }
4753
4754 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4755 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4756 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4757 TYPE_NONCOPIED_PARTS (lhs_type));
4758
4759 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4760 while (noncopied_parts != 0)
4761 {
4762 expand_assignment (TREE_PURPOSE (noncopied_parts),
4763 TREE_VALUE (noncopied_parts), 0, 0);
4764 noncopied_parts = TREE_CHAIN (noncopied_parts);
4765 }
4766 return temp;
4767 }
4768
4769 case PREINCREMENT_EXPR:
4770 case PREDECREMENT_EXPR:
4771 return expand_increment (exp, 0);
4772
4773 case POSTINCREMENT_EXPR:
4774 case POSTDECREMENT_EXPR:
4775 /* Faster to treat as pre-increment if result is not used. */
4776 return expand_increment (exp, ! ignore);
4777
4778 case ADDR_EXPR:
4779 /* Are we taking the address of a nested function? */
4780 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4781 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4782 {
4783 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4784 op0 = force_operand (op0, target);
4785 }
4786 else
4787 {
906c4e36 4788 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
4789 (modifier == EXPAND_INITIALIZER
4790 ? modifier : EXPAND_CONST_ADDRESS));
4791 if (GET_CODE (op0) != MEM)
4792 abort ();
4793
4794 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4795 return XEXP (op0, 0);
4796 op0 = force_operand (XEXP (op0, 0), target);
4797 }
4798 if (flag_force_addr && GET_CODE (op0) != REG)
4799 return force_reg (Pmode, op0);
4800 return op0;
4801
4802 case ENTRY_VALUE_EXPR:
4803 abort ();
4804
4805 case ERROR_MARK:
4806 return const0_rtx;
4807
4808 default:
4809 return (*lang_expand_expr) (exp, target, tmode, modifier);
4810 }
4811
4812 /* Here to do an ordinary binary operator, generating an instruction
4813 from the optab already placed in `this_optab'. */
4814 binop:
4815 preexpand_calls (exp);
4816 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4817 subtarget = 0;
4818 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4819 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4820 binop2:
4821 temp = expand_binop (mode, this_optab, op0, op1, target,
4822 unsignedp, OPTAB_LIB_WIDEN);
4823 if (temp == 0)
4824 abort ();
4825 return temp;
4826}
4827\f
e87b4f3f
RS
4828/* Return the alignment in bits of EXP, a pointer valued expression.
4829 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
4830 The alignment returned is, by default, the alignment of the thing that
4831 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4832
4833 Otherwise, look at the expression to see if we can do better, i.e., if the
4834 expression is actually pointing at an object whose alignment is tighter. */
4835
4836static int
4837get_pointer_alignment (exp, max_align)
4838 tree exp;
4839 unsigned max_align;
4840{
4841 unsigned align, inner;
4842
4843 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4844 return 0;
4845
4846 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4847 align = MIN (align, max_align);
4848
4849 while (1)
4850 {
4851 switch (TREE_CODE (exp))
4852 {
4853 case NOP_EXPR:
4854 case CONVERT_EXPR:
4855 case NON_LVALUE_EXPR:
4856 exp = TREE_OPERAND (exp, 0);
4857 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4858 return align;
4859 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4860 inner = MIN (inner, max_align);
4861 align = MAX (align, inner);
4862 break;
4863
4864 case PLUS_EXPR:
4865 /* If sum of pointer + int, restrict our maximum alignment to that
4866 imposed by the integer. If not, we can't do any better than
4867 ALIGN. */
4868 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4869 return align;
4870
e87b4f3f
RS
4871 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4872 & (max_align - 1))
4873 != 0)
bbf6f052
RK
4874 max_align >>= 1;
4875
4876 exp = TREE_OPERAND (exp, 0);
4877 break;
4878
4879 case ADDR_EXPR:
4880 /* See what we are pointing at and look at its alignment. */
4881 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
4882 if (TREE_CODE (exp) == FUNCTION_DECL)
4883 align = MAX (align, FUNCTION_BOUNDARY);
4884 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
4885 align = MAX (align, DECL_ALIGN (exp));
4886#ifdef CONSTANT_ALIGNMENT
4887 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4888 align = CONSTANT_ALIGNMENT (exp, align);
4889#endif
4890 return MIN (align, max_align);
4891
4892 default:
4893 return align;
4894 }
4895 }
4896}
4897\f
4898/* Return the tree node and offset if a given argument corresponds to
4899 a string constant. */
4900
4901static tree
4902string_constant (arg, ptr_offset)
4903 tree arg;
4904 tree *ptr_offset;
4905{
4906 STRIP_NOPS (arg);
4907
4908 if (TREE_CODE (arg) == ADDR_EXPR
4909 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4910 {
4911 *ptr_offset = integer_zero_node;
4912 return TREE_OPERAND (arg, 0);
4913 }
4914 else if (TREE_CODE (arg) == PLUS_EXPR)
4915 {
4916 tree arg0 = TREE_OPERAND (arg, 0);
4917 tree arg1 = TREE_OPERAND (arg, 1);
4918
4919 STRIP_NOPS (arg0);
4920 STRIP_NOPS (arg1);
4921
4922 if (TREE_CODE (arg0) == ADDR_EXPR
4923 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4924 {
4925 *ptr_offset = arg1;
4926 return TREE_OPERAND (arg0, 0);
4927 }
4928 else if (TREE_CODE (arg1) == ADDR_EXPR
4929 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4930 {
4931 *ptr_offset = arg0;
4932 return TREE_OPERAND (arg1, 0);
4933 }
4934 }
4935
4936 return 0;
4937}
4938
4939/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4940 way, because it could contain a zero byte in the middle.
4941 TREE_STRING_LENGTH is the size of the character array, not the string.
4942
4943 Unfortunately, string_constant can't access the values of const char
4944 arrays with initializers, so neither can we do so here. */
4945
4946static tree
4947c_strlen (src)
4948 tree src;
4949{
4950 tree offset_node;
4951 int offset, max;
4952 char *ptr;
4953
4954 src = string_constant (src, &offset_node);
4955 if (src == 0)
4956 return 0;
4957 max = TREE_STRING_LENGTH (src);
4958 ptr = TREE_STRING_POINTER (src);
4959 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4960 {
4961 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4962 compute the offset to the following null if we don't know where to
4963 start searching for it. */
4964 int i;
4965 for (i = 0; i < max; i++)
4966 if (ptr[i] == 0)
4967 return 0;
4968 /* We don't know the starting offset, but we do know that the string
4969 has no internal zero bytes. We can assume that the offset falls
4970 within the bounds of the string; otherwise, the programmer deserves
4971 what he gets. Subtract the offset from the length of the string,
4972 and return that. */
4973 /* This would perhaps not be valid if we were dealing with named
4974 arrays in addition to literal string constants. */
4975 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4976 }
4977
4978 /* We have a known offset into the string. Start searching there for
4979 a null character. */
4980 if (offset_node == 0)
4981 offset = 0;
4982 else
4983 {
4984 /* Did we get a long long offset? If so, punt. */
4985 if (TREE_INT_CST_HIGH (offset_node) != 0)
4986 return 0;
4987 offset = TREE_INT_CST_LOW (offset_node);
4988 }
4989 /* If the offset is known to be out of bounds, warn, and call strlen at
4990 runtime. */
4991 if (offset < 0 || offset > max)
4992 {
4993 warning ("offset outside bounds of constant string");
4994 return 0;
4995 }
4996 /* Use strlen to search for the first zero byte. Since any strings
4997 constructed with build_string will have nulls appended, we win even
4998 if we get handed something like (char[4])"abcd".
4999
5000 Since OFFSET is our starting index into the string, no further
5001 calculation is needed. */
5002 return size_int (strlen (ptr + offset));
5003}
5004\f
5005/* Expand an expression EXP that calls a built-in function,
5006 with result going to TARGET if that's convenient
5007 (and in mode MODE if that's convenient).
5008 SUBTARGET may be used as the target for computing one of EXP's operands.
5009 IGNORE is nonzero if the value is to be ignored. */
5010
5011static rtx
5012expand_builtin (exp, target, subtarget, mode, ignore)
5013 tree exp;
5014 rtx target;
5015 rtx subtarget;
5016 enum machine_mode mode;
5017 int ignore;
5018{
5019 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5020 tree arglist = TREE_OPERAND (exp, 1);
5021 rtx op0;
60bac6ea 5022 rtx lab1, insns;
bbf6f052
RK
5023 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5024
5025 switch (DECL_FUNCTION_CODE (fndecl))
5026 {
5027 case BUILT_IN_ABS:
5028 case BUILT_IN_LABS:
5029 case BUILT_IN_FABS:
5030 /* build_function_call changes these into ABS_EXPR. */
5031 abort ();
5032
e87b4f3f
RS
5033 case BUILT_IN_FSQRT:
5034 /* If not optimizing, call the library function. */
8c8a8e34 5035 if (! optimize)
e87b4f3f
RS
5036 break;
5037
5038 if (arglist == 0
19deaec9 5039 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5040 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5041 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5042
db0e6d01
RS
5043 /* Stabilize and compute the argument. */
5044 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5045 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5046 {
5047 exp = copy_node (exp);
5048 arglist = copy_node (arglist);
5049 TREE_OPERAND (exp, 1) = arglist;
5050 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5051 }
e87b4f3f 5052 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5053
5054 /* Make a suitable register to place result in. */
5055 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5056
c1f7c223 5057 emit_queue ();
8c8a8e34 5058 start_sequence ();
e7c33f54 5059
60bac6ea 5060 /* Compute sqrt into TARGET.
e87b4f3f
RS
5061 Set TARGET to wherever the result comes back. */
5062 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8c8a8e34 5063 sqrt_optab, op0, target, 0);
e7c33f54
RK
5064
5065 /* If we were unable to expand via the builtin, stop the
5066 sequence (without outputting the insns) and break, causing
5067 a call the the library function. */
e87b4f3f 5068 if (target == 0)
e7c33f54 5069 {
8c8a8e34 5070 end_sequence ();
e7c33f54
RK
5071 break;
5072 }
e87b4f3f 5073
60bac6ea
RS
5074 /* Check the results by default. But if flag_fast_math is turned on,
5075 then assume sqrt will always be called with valid arguments. */
5076
5077 if (! flag_fast_math)
5078 {
5079 /* Don't define the sqrt instructions
5080 if your machine is not IEEE. */
5081 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5082 abort ();
5083
5084 lab1 = gen_label_rtx ();
5085
5086 /* Test the result; if it is NaN, set errno=EDOM because
5087 the argument was not in the domain. */
5088 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5089 emit_jump_insn (gen_beq (lab1));
5090
5091#if TARGET_EDOM
5092 {
5093#ifdef GEN_ERRNO_RTX
5094 rtx errno_rtx = GEN_ERRNO_RTX;
5095#else
5096 rtx errno_rtx
5097 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5098#endif
5099
5100 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5101 }
5102#else
5103 /* We can't set errno=EDOM directly; let the library call do it.
5104 Pop the arguments right away in case the call gets deleted. */
5105 NO_DEFER_POP;
5106 expand_call (exp, target, 0);
5107 OK_DEFER_POP;
5108#endif
5109
5110 emit_label (lab1);
5111 }
e87b4f3f 5112
e7c33f54 5113 /* Output the entire sequence. */
8c8a8e34
JW
5114 insns = get_insns ();
5115 end_sequence ();
5116 emit_insns (insns);
e7c33f54
RK
5117
5118 return target;
5119
bbf6f052
RK
5120 case BUILT_IN_SAVEREGS:
5121 /* Don't do __builtin_saveregs more than once in a function.
5122 Save the result of the first call and reuse it. */
5123 if (saveregs_value != 0)
5124 return saveregs_value;
5125 {
5126 /* When this function is called, it means that registers must be
5127 saved on entry to this function. So we migrate the
5128 call to the first insn of this function. */
5129 rtx temp;
5130 rtx seq;
5131 rtx valreg, saved_valreg;
5132
5133 /* Now really call the function. `expand_call' does not call
5134 expand_builtin, so there is no danger of infinite recursion here. */
5135 start_sequence ();
5136
5137#ifdef EXPAND_BUILTIN_SAVEREGS
5138 /* Do whatever the machine needs done in this case. */
5139 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5140#else
5141 /* The register where the function returns its value
5142 is likely to have something else in it, such as an argument.
5143 So preserve that register around the call. */
5144 if (value_mode != VOIDmode)
5145 {
5146 valreg = hard_libcall_value (value_mode);
5147 saved_valreg = gen_reg_rtx (value_mode);
5148 emit_move_insn (saved_valreg, valreg);
5149 }
5150
5151 /* Generate the call, putting the value in a pseudo. */
5152 temp = expand_call (exp, target, ignore);
5153
5154 if (value_mode != VOIDmode)
5155 emit_move_insn (valreg, saved_valreg);
5156#endif
5157
5158 seq = get_insns ();
5159 end_sequence ();
5160
5161 saveregs_value = temp;
5162
5163 /* This won't work inside a SEQUENCE--it really has to be
5164 at the start of the function. */
5165 if (in_sequence_p ())
5166 {
5167 /* Better to do this than to crash. */
5168 error ("`va_start' used within `({...})'");
5169 return temp;
5170 }
5171
5172 /* Put the sequence after the NOTE that starts the function. */
5173 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5174 return temp;
5175 }
5176
5177 /* __builtin_args_info (N) returns word N of the arg space info
5178 for the current function. The number and meanings of words
5179 is controlled by the definition of CUMULATIVE_ARGS. */
5180 case BUILT_IN_ARGS_INFO:
5181 {
5182 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5183 int i;
5184 int *word_ptr = (int *) &current_function_args_info;
5185 tree type, elts, result;
5186
5187 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5188 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5189 __FILE__, __LINE__);
5190
5191 if (arglist != 0)
5192 {
5193 tree arg = TREE_VALUE (arglist);
5194 if (TREE_CODE (arg) != INTEGER_CST)
5195 error ("argument of __builtin_args_info must be constant");
5196 else
5197 {
5198 int wordnum = TREE_INT_CST_LOW (arg);
5199
5200 if (wordnum < 0 || wordnum >= nwords)
5201 error ("argument of __builtin_args_info out of range");
5202 else
906c4e36 5203 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5204 }
5205 }
5206 else
5207 error ("missing argument in __builtin_args_info");
5208
5209 return const0_rtx;
5210
5211#if 0
5212 for (i = 0; i < nwords; i++)
5213 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5214
5215 type = build_array_type (integer_type_node,
5216 build_index_type (build_int_2 (nwords, 0)));
5217 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5218 TREE_CONSTANT (result) = 1;
5219 TREE_STATIC (result) = 1;
5220 result = build (INDIRECT_REF, build_pointer_type (type), result);
5221 TREE_CONSTANT (result) = 1;
906c4e36 5222 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5223#endif
5224 }
5225
5226 /* Return the address of the first anonymous stack arg. */
5227 case BUILT_IN_NEXT_ARG:
5228 {
5229 tree fntype = TREE_TYPE (current_function_decl);
5230 if (!(TYPE_ARG_TYPES (fntype) != 0
5231 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5232 != void_type_node)))
5233 {
5234 error ("`va_start' used in function with fixed args");
5235 return const0_rtx;
5236 }
5237 }
5238
5239 return expand_binop (Pmode, add_optab,
5240 current_function_internal_arg_pointer,
5241 current_function_arg_offset_rtx,
906c4e36 5242 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5243
5244 case BUILT_IN_CLASSIFY_TYPE:
5245 if (arglist != 0)
5246 {
5247 tree type = TREE_TYPE (TREE_VALUE (arglist));
5248 enum tree_code code = TREE_CODE (type);
5249 if (code == VOID_TYPE)
906c4e36 5250 return GEN_INT (void_type_class);
bbf6f052 5251 if (code == INTEGER_TYPE)
906c4e36 5252 return GEN_INT (integer_type_class);
bbf6f052 5253 if (code == CHAR_TYPE)
906c4e36 5254 return GEN_INT (char_type_class);
bbf6f052 5255 if (code == ENUMERAL_TYPE)
906c4e36 5256 return GEN_INT (enumeral_type_class);
bbf6f052 5257 if (code == BOOLEAN_TYPE)
906c4e36 5258 return GEN_INT (boolean_type_class);
bbf6f052 5259 if (code == POINTER_TYPE)
906c4e36 5260 return GEN_INT (pointer_type_class);
bbf6f052 5261 if (code == REFERENCE_TYPE)
906c4e36 5262 return GEN_INT (reference_type_class);
bbf6f052 5263 if (code == OFFSET_TYPE)
906c4e36 5264 return GEN_INT (offset_type_class);
bbf6f052 5265 if (code == REAL_TYPE)
906c4e36 5266 return GEN_INT (real_type_class);
bbf6f052 5267 if (code == COMPLEX_TYPE)
906c4e36 5268 return GEN_INT (complex_type_class);
bbf6f052 5269 if (code == FUNCTION_TYPE)
906c4e36 5270 return GEN_INT (function_type_class);
bbf6f052 5271 if (code == METHOD_TYPE)
906c4e36 5272 return GEN_INT (method_type_class);
bbf6f052 5273 if (code == RECORD_TYPE)
906c4e36 5274 return GEN_INT (record_type_class);
bbf6f052 5275 if (code == UNION_TYPE)
906c4e36 5276 return GEN_INT (union_type_class);
bbf6f052 5277 if (code == ARRAY_TYPE)
906c4e36 5278 return GEN_INT (array_type_class);
bbf6f052 5279 if (code == STRING_TYPE)
906c4e36 5280 return GEN_INT (string_type_class);
bbf6f052 5281 if (code == SET_TYPE)
906c4e36 5282 return GEN_INT (set_type_class);
bbf6f052 5283 if (code == FILE_TYPE)
906c4e36 5284 return GEN_INT (file_type_class);
bbf6f052 5285 if (code == LANG_TYPE)
906c4e36 5286 return GEN_INT (lang_type_class);
bbf6f052 5287 }
906c4e36 5288 return GEN_INT (no_type_class);
bbf6f052
RK
5289
5290 case BUILT_IN_CONSTANT_P:
5291 if (arglist == 0)
5292 return const0_rtx;
5293 else
cda0ec81 5294 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5295 ? const1_rtx : const0_rtx);
5296
5297 case BUILT_IN_FRAME_ADDRESS:
5298 /* The argument must be a nonnegative integer constant.
5299 It counts the number of frames to scan up the stack.
5300 The value is the address of that frame. */
5301 case BUILT_IN_RETURN_ADDRESS:
5302 /* The argument must be a nonnegative integer constant.
5303 It counts the number of frames to scan up the stack.
5304 The value is the return address saved in that frame. */
5305 if (arglist == 0)
5306 /* Warning about missing arg was already issued. */
5307 return const0_rtx;
5308 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5309 {
5310 error ("invalid arg to __builtin_return_address");
5311 return const0_rtx;
5312 }
5313 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5314 {
5315 error ("invalid arg to __builtin_return_address");
5316 return const0_rtx;
5317 }
5318 else
5319 {
5320 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5321 rtx tem = frame_pointer_rtx;
5322 int i;
5323
5324 /* Scan back COUNT frames to the specified frame. */
5325 for (i = 0; i < count; i++)
5326 {
5327 /* Assume the dynamic chain pointer is in the word that
5328 the frame address points to, unless otherwise specified. */
5329#ifdef DYNAMIC_CHAIN_ADDRESS
5330 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5331#endif
5332 tem = memory_address (Pmode, tem);
5333 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5334 }
5335
5336 /* For __builtin_frame_address, return what we've got. */
5337 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5338 return tem;
5339
5340 /* For __builtin_return_address,
5341 Get the return address from that frame. */
5342#ifdef RETURN_ADDR_RTX
5343 return RETURN_ADDR_RTX (count, tem);
5344#else
5345 tem = memory_address (Pmode,
5346 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5347 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5348#endif
5349 }
5350
5351 case BUILT_IN_ALLOCA:
5352 if (arglist == 0
5353 /* Arg could be non-integer if user redeclared this fcn wrong. */
5354 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5355 return const0_rtx;
5356 current_function_calls_alloca = 1;
5357 /* Compute the argument. */
906c4e36 5358 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5359
5360 /* Allocate the desired space. */
8c8a8e34 5361 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5362
5363 /* Record the new stack level for nonlocal gotos. */
6dc42e49 5364 if (nonlocal_goto_handler_slot != 0)
906c4e36 5365 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
5366 return target;
5367
5368 case BUILT_IN_FFS:
5369 /* If not optimizing, call the library function. */
5370 if (!optimize)
5371 break;
5372
5373 if (arglist == 0
5374 /* Arg could be non-integer if user redeclared this fcn wrong. */
5375 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5376 return const0_rtx;
5377
5378 /* Compute the argument. */
5379 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5380 /* Compute ffs, into TARGET if possible.
5381 Set TARGET to wherever the result comes back. */
5382 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5383 ffs_optab, op0, target, 1);
5384 if (target == 0)
5385 abort ();
5386 return target;
5387
5388 case BUILT_IN_STRLEN:
5389 /* If not optimizing, call the library function. */
5390 if (!optimize)
5391 break;
5392
5393 if (arglist == 0
5394 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5395 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5396 return const0_rtx;
5397 else
5398 {
e7c33f54
RK
5399 tree src = TREE_VALUE (arglist);
5400 tree len = c_strlen (src);
bbf6f052 5401
e7c33f54
RK
5402 int align
5403 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5404
5405 rtx result, src_rtx, char_rtx;
5406 enum machine_mode insn_mode = value_mode, char_mode;
5407 enum insn_code icode;
5408
5409 /* If the length is known, just return it. */
5410 if (len != 0)
5411 return expand_expr (len, target, mode, 0);
5412
5413 /* If SRC is not a pointer type, don't do this operation inline. */
5414 if (align == 0)
5415 break;
5416
5417 /* Call a function if we can't compute strlen in the right mode. */
5418
5419 while (insn_mode != VOIDmode)
5420 {
5421 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5422 if (icode != CODE_FOR_nothing)
5423 break;
5424
5425 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5426 }
5427 if (insn_mode == VOIDmode)
bbf6f052 5428 break;
e7c33f54
RK
5429
5430 /* Make a place to write the result of the instruction. */
5431 result = target;
5432 if (! (result != 0
5433 && GET_CODE (result) == REG
5434 && GET_MODE (result) == insn_mode
5435 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5436 result = gen_reg_rtx (insn_mode);
5437
4d613828 5438 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 5439
4d613828 5440 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
5441 result = gen_reg_rtx (insn_mode);
5442
5443 src_rtx = memory_address (BLKmode,
906c4e36 5444 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 5445 EXPAND_NORMAL));
4d613828 5446 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
5447 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5448
5449 char_rtx = const0_rtx;
4d613828
RS
5450 char_mode = insn_operand_mode[(int)icode][2];
5451 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
5452 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5453
5454 emit_insn (GEN_FCN (icode) (result,
5455 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 5456 char_rtx, GEN_INT (align)));
e7c33f54
RK
5457
5458 /* Return the value in the proper mode for this function. */
5459 if (GET_MODE (result) == value_mode)
5460 return result;
5461 else if (target != 0)
5462 {
5463 convert_move (target, result, 0);
5464 return target;
5465 }
5466 else
5467 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
5468 }
5469
5470 case BUILT_IN_STRCPY:
5471 /* If not optimizing, call the library function. */
5472 if (!optimize)
5473 break;
5474
5475 if (arglist == 0
5476 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5477 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5478 || TREE_CHAIN (arglist) == 0
5479 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5480 return const0_rtx;
5481 else
5482 {
5483 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5484
5485 if (len == 0)
5486 break;
5487
5488 len = size_binop (PLUS_EXPR, len, integer_one_node);
5489
906c4e36 5490 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
5491 }
5492
5493 /* Drops in. */
5494 case BUILT_IN_MEMCPY:
5495 /* If not optimizing, call the library function. */
5496 if (!optimize)
5497 break;
5498
5499 if (arglist == 0
5500 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5501 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5502 || TREE_CHAIN (arglist) == 0
5503 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5504 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5505 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5506 return const0_rtx;
5507 else
5508 {
5509 tree dest = TREE_VALUE (arglist);
5510 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5511 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5512
5513 int src_align
5514 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5515 int dest_align
5516 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5517 rtx dest_rtx;
5518
5519 /* If either SRC or DEST is not a pointer type, don't do
5520 this operation in-line. */
5521 if (src_align == 0 || dest_align == 0)
5522 {
5523 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5524 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5525 break;
5526 }
5527
906c4e36 5528 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
bbf6f052
RK
5529
5530 /* Copy word part most expediently. */
5531 emit_block_move (gen_rtx (MEM, BLKmode,
5532 memory_address (BLKmode, dest_rtx)),
5533 gen_rtx (MEM, BLKmode,
5534 memory_address (BLKmode,
906c4e36
RK
5535 expand_expr (src, NULL_RTX,
5536 Pmode,
bbf6f052 5537 EXPAND_NORMAL))),
906c4e36 5538 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
5539 MIN (src_align, dest_align));
5540 return dest_rtx;
5541 }
5542
5543/* These comparison functions need an instruction that returns an actual
5544 index. An ordinary compare that just sets the condition codes
5545 is not enough. */
5546#ifdef HAVE_cmpstrsi
5547 case BUILT_IN_STRCMP:
5548 /* If not optimizing, call the library function. */
5549 if (!optimize)
5550 break;
5551
5552 if (arglist == 0
5553 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5554 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5555 || TREE_CHAIN (arglist) == 0
5556 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5557 return const0_rtx;
5558 else if (!HAVE_cmpstrsi)
5559 break;
5560 {
5561 tree arg1 = TREE_VALUE (arglist);
5562 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5563 tree offset;
5564 tree len, len2;
5565
5566 len = c_strlen (arg1);
5567 if (len)
5568 len = size_binop (PLUS_EXPR, integer_one_node, len);
5569 len2 = c_strlen (arg2);
5570 if (len2)
5571 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5572
5573 /* If we don't have a constant length for the first, use the length
5574 of the second, if we know it. We don't require a constant for
5575 this case; some cost analysis could be done if both are available
5576 but neither is constant. For now, assume they're equally cheap.
5577
5578 If both strings have constant lengths, use the smaller. This
5579 could arise if optimization results in strcpy being called with
5580 two fixed strings, or if the code was machine-generated. We should
5581 add some code to the `memcmp' handler below to deal with such
5582 situations, someday. */
5583 if (!len || TREE_CODE (len) != INTEGER_CST)
5584 {
5585 if (len2)
5586 len = len2;
5587 else if (len == 0)
5588 break;
5589 }
5590 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5591 {
5592 if (tree_int_cst_lt (len2, len))
5593 len = len2;
5594 }
5595
906c4e36 5596 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
5597 }
5598
5599 /* Drops in. */
5600 case BUILT_IN_MEMCMP:
5601 /* If not optimizing, call the library function. */
5602 if (!optimize)
5603 break;
5604
5605 if (arglist == 0
5606 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5607 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5608 || TREE_CHAIN (arglist) == 0
5609 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5610 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5611 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5612 return const0_rtx;
5613 else if (!HAVE_cmpstrsi)
5614 break;
5615 {
5616 tree arg1 = TREE_VALUE (arglist);
5617 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5618 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5619 rtx result;
5620
5621 int arg1_align
5622 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5623 int arg2_align
5624 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5625 enum machine_mode insn_mode
5626 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5627
5628 /* If we don't have POINTER_TYPE, call the function. */
5629 if (arg1_align == 0 || arg2_align == 0)
5630 {
5631 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5632 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5633 break;
5634 }
5635
5636 /* Make a place to write the result of the instruction. */
5637 result = target;
5638 if (! (result != 0
5639 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5640 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5641 result = gen_reg_rtx (insn_mode);
5642
5643 emit_insn (gen_cmpstrsi (result,
5644 gen_rtx (MEM, BLKmode,
906c4e36
RK
5645 expand_expr (arg1, NULL_RTX, Pmode,
5646 EXPAND_NORMAL)),
bbf6f052 5647 gen_rtx (MEM, BLKmode,
906c4e36
RK
5648 expand_expr (arg2, NULL_RTX, Pmode,
5649 EXPAND_NORMAL)),
5650 expand_expr (len, NULL_RTX, VOIDmode, 0),
5651 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
5652
5653 /* Return the value in the proper mode for this function. */
5654 mode = TYPE_MODE (TREE_TYPE (exp));
5655 if (GET_MODE (result) == mode)
5656 return result;
5657 else if (target != 0)
5658 {
5659 convert_move (target, result, 0);
5660 return target;
5661 }
5662 else
5663 return convert_to_mode (mode, result, 0);
5664 }
5665#else
5666 case BUILT_IN_STRCMP:
5667 case BUILT_IN_MEMCMP:
5668 break;
5669#endif
5670
5671 default: /* just do library call, if unknown builtin */
5672 error ("built-in function %s not currently supported",
5673 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5674 }
5675
5676 /* The switch statement above can drop through to cause the function
5677 to be called normally. */
5678
5679 return expand_call (exp, target, ignore);
5680}
5681\f
5682/* Expand code for a post- or pre- increment or decrement
5683 and return the RTX for the result.
5684 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5685
5686static rtx
5687expand_increment (exp, post)
5688 register tree exp;
5689 int post;
5690{
5691 register rtx op0, op1;
5692 register rtx temp, value;
5693 register tree incremented = TREE_OPERAND (exp, 0);
5694 optab this_optab = add_optab;
5695 int icode;
5696 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5697 int op0_is_copy = 0;
5698
5699 /* Stabilize any component ref that might need to be
5700 evaluated more than once below. */
5701 if (TREE_CODE (incremented) == BIT_FIELD_REF
5702 || (TREE_CODE (incremented) == COMPONENT_REF
5703 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5704 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5705 incremented = stabilize_reference (incremented);
5706
5707 /* Compute the operands as RTX.
5708 Note whether OP0 is the actual lvalue or a copy of it:
94a58076
RS
5709 I believe it is a copy iff it is a register or subreg
5710 and insns were generated in computing it. */
bbf6f052 5711 temp = get_last_insn ();
906c4e36 5712 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
94a58076
RS
5713 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5714 && temp != get_last_insn ());
906c4e36 5715 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5716
5717 /* Decide whether incrementing or decrementing. */
5718 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5719 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5720 this_optab = sub_optab;
5721
5722 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5723 then we cannot just increment OP0. We must
5724 therefore contrive to increment the original value.
5725 Then we can return OP0 since it is a copy of the old value. */
5726 if (op0_is_copy)
5727 {
5728 /* This is the easiest way to increment the value wherever it is.
5729 Problems with multiple evaluation of INCREMENTED
5730 are prevented because either (1) it is a component_ref,
5731 in which case it was stabilized above, or (2) it is an array_ref
5732 with constant index in an array in a register, which is
5733 safe to reevaluate. */
5734 tree newexp = build ((this_optab == add_optab
5735 ? PLUS_EXPR : MINUS_EXPR),
5736 TREE_TYPE (exp),
5737 incremented,
5738 TREE_OPERAND (exp, 1));
5739 temp = expand_assignment (incremented, newexp, ! post, 0);
5740 return post ? op0 : temp;
5741 }
5742
5743 /* Convert decrement by a constant into a negative increment. */
5744 if (this_optab == sub_optab
5745 && GET_CODE (op1) == CONST_INT)
5746 {
906c4e36 5747 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
5748 this_optab = add_optab;
5749 }
5750
5751 if (post)
5752 {
5753 /* We have a true reference to the value in OP0.
5754 If there is an insn to add or subtract in this mode, queue it. */
5755
5756#if 0 /* Turned off to avoid making extra insn for indexed memref. */
5757 op0 = stabilize (op0);
5758#endif
5759
5760 icode = (int) this_optab->handlers[(int) mode].insn_code;
5761 if (icode != (int) CODE_FOR_nothing
5762 /* Make sure that OP0 is valid for operands 0 and 1
5763 of the insn we want to queue. */
5764 && (*insn_operand_predicate[icode][0]) (op0, mode)
5765 && (*insn_operand_predicate[icode][1]) (op0, mode))
5766 {
5767 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5768 op1 = force_reg (mode, op1);
5769
5770 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5771 }
5772 }
5773
5774 /* Preincrement, or we can't increment with one simple insn. */
5775 if (post)
5776 /* Save a copy of the value before inc or dec, to return it later. */
5777 temp = value = copy_to_reg (op0);
5778 else
5779 /* Arrange to return the incremented value. */
5780 /* Copy the rtx because expand_binop will protect from the queue,
5781 and the results of that would be invalid for us to return
5782 if our caller does emit_queue before using our result. */
5783 temp = copy_rtx (value = op0);
5784
5785 /* Increment however we can. */
5786 op1 = expand_binop (mode, this_optab, value, op1, op0,
5787 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5788 /* Make sure the value is stored into OP0. */
5789 if (op1 != op0)
5790 emit_move_insn (op0, op1);
5791
5792 return temp;
5793}
5794\f
5795/* Expand all function calls contained within EXP, innermost ones first.
5796 But don't look within expressions that have sequence points.
5797 For each CALL_EXPR, record the rtx for its value
5798 in the CALL_EXPR_RTL field. */
5799
5800static void
5801preexpand_calls (exp)
5802 tree exp;
5803{
5804 register int nops, i;
5805 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5806
5807 if (! do_preexpand_calls)
5808 return;
5809
5810 /* Only expressions and references can contain calls. */
5811
5812 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5813 return;
5814
5815 switch (TREE_CODE (exp))
5816 {
5817 case CALL_EXPR:
5818 /* Do nothing if already expanded. */
5819 if (CALL_EXPR_RTL (exp) != 0)
5820 return;
5821
5822 /* Do nothing to built-in functions. */
5823 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5824 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5825 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 5826 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
5827 return;
5828
5829 case COMPOUND_EXPR:
5830 case COND_EXPR:
5831 case TRUTH_ANDIF_EXPR:
5832 case TRUTH_ORIF_EXPR:
5833 /* If we find one of these, then we can be sure
5834 the adjust will be done for it (since it makes jumps).
5835 Do it now, so that if this is inside an argument
5836 of a function, we don't get the stack adjustment
5837 after some other args have already been pushed. */
5838 do_pending_stack_adjust ();
5839 return;
5840
5841 case BLOCK:
5842 case RTL_EXPR:
5843 case WITH_CLEANUP_EXPR:
5844 return;
5845
5846 case SAVE_EXPR:
5847 if (SAVE_EXPR_RTL (exp) != 0)
5848 return;
5849 }
5850
5851 nops = tree_code_length[(int) TREE_CODE (exp)];
5852 for (i = 0; i < nops; i++)
5853 if (TREE_OPERAND (exp, i) != 0)
5854 {
5855 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5856 if (type == 'e' || type == '<' || type == '1' || type == '2'
5857 || type == 'r')
5858 preexpand_calls (TREE_OPERAND (exp, i));
5859 }
5860}
5861\f
5862/* At the start of a function, record that we have no previously-pushed
5863 arguments waiting to be popped. */
5864
5865void
5866init_pending_stack_adjust ()
5867{
5868 pending_stack_adjust = 0;
5869}
5870
5871/* When exiting from function, if safe, clear out any pending stack adjust
5872 so the adjustment won't get done. */
5873
5874void
5875clear_pending_stack_adjust ()
5876{
5877#ifdef EXIT_IGNORE_STACK
5878 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 5879 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
5880 && ! flag_inline_functions)
5881 pending_stack_adjust = 0;
5882#endif
5883}
5884
5885/* Pop any previously-pushed arguments that have not been popped yet. */
5886
5887void
5888do_pending_stack_adjust ()
5889{
5890 if (inhibit_defer_pop == 0)
5891 {
5892 if (pending_stack_adjust != 0)
906c4e36 5893 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
5894 pending_stack_adjust = 0;
5895 }
5896}
5897
5898/* Expand all cleanups up to OLD_CLEANUPS.
5899 Needed here, and also for language-dependent calls. */
5900
5901void
5902expand_cleanups_to (old_cleanups)
5903 tree old_cleanups;
5904{
5905 while (cleanups_this_call != old_cleanups)
5906 {
906c4e36 5907 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5908 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5909 }
5910}
5911\f
5912/* Expand conditional expressions. */
5913
5914/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5915 LABEL is an rtx of code CODE_LABEL, in this function and all the
5916 functions here. */
5917
5918void
5919jumpifnot (exp, label)
5920 tree exp;
5921 rtx label;
5922{
906c4e36 5923 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
5924}
5925
5926/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5927
5928void
5929jumpif (exp, label)
5930 tree exp;
5931 rtx label;
5932{
906c4e36 5933 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
5934}
5935
5936/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5937 the result is zero, or IF_TRUE_LABEL if the result is one.
5938 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5939 meaning fall through in that case.
5940
e7c33f54
RK
5941 do_jump always does any pending stack adjust except when it does not
5942 actually perform a jump. An example where there is no jump
5943 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5944
bbf6f052
RK
5945 This function is responsible for optimizing cases such as
5946 &&, || and comparison operators in EXP. */
5947
5948void
5949do_jump (exp, if_false_label, if_true_label)
5950 tree exp;
5951 rtx if_false_label, if_true_label;
5952{
5953 register enum tree_code code = TREE_CODE (exp);
5954 /* Some cases need to create a label to jump to
5955 in order to properly fall through.
5956 These cases set DROP_THROUGH_LABEL nonzero. */
5957 rtx drop_through_label = 0;
5958 rtx temp;
5959 rtx comparison = 0;
5960 int i;
5961 tree type;
5962
5963 emit_queue ();
5964
5965 switch (code)
5966 {
5967 case ERROR_MARK:
5968 break;
5969
5970 case INTEGER_CST:
5971 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5972 if (temp)
5973 emit_jump (temp);
5974 break;
5975
5976#if 0
5977 /* This is not true with #pragma weak */
5978 case ADDR_EXPR:
5979 /* The address of something can never be zero. */
5980 if (if_true_label)
5981 emit_jump (if_true_label);
5982 break;
5983#endif
5984
5985 case NOP_EXPR:
5986 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5987 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5988 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5989 goto normal;
5990 case CONVERT_EXPR:
5991 /* If we are narrowing the operand, we have to do the compare in the
5992 narrower mode. */
5993 if ((TYPE_PRECISION (TREE_TYPE (exp))
5994 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5995 goto normal;
5996 case NON_LVALUE_EXPR:
5997 case REFERENCE_EXPR:
5998 case ABS_EXPR:
5999 case NEGATE_EXPR:
6000 case LROTATE_EXPR:
6001 case RROTATE_EXPR:
6002 /* These cannot change zero->non-zero or vice versa. */
6003 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6004 break;
6005
6006#if 0
6007 /* This is never less insns than evaluating the PLUS_EXPR followed by
6008 a test and can be longer if the test is eliminated. */
6009 case PLUS_EXPR:
6010 /* Reduce to minus. */
6011 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6012 TREE_OPERAND (exp, 0),
6013 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6014 TREE_OPERAND (exp, 1))));
6015 /* Process as MINUS. */
6016#endif
6017
6018 case MINUS_EXPR:
6019 /* Non-zero iff operands of minus differ. */
6020 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6021 TREE_OPERAND (exp, 0),
6022 TREE_OPERAND (exp, 1)),
6023 NE, NE);
6024 break;
6025
6026 case BIT_AND_EXPR:
6027 /* If we are AND'ing with a small constant, do this comparison in the
6028 smallest type that fits. If the machine doesn't have comparisons
6029 that small, it will be converted back to the wider comparison.
6030 This helps if we are testing the sign bit of a narrower object.
6031 combine can't do this for us because it can't know whether a
6032 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6033
08af8e09
RK
6034 if (! SLOW_BYTE_ACCESS
6035 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6036 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
6037 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6038 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
6039 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6040 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6041 != CODE_FOR_nothing))
bbf6f052
RK
6042 {
6043 do_jump (convert (type, exp), if_false_label, if_true_label);
6044 break;
6045 }
6046 goto normal;
6047
6048 case TRUTH_NOT_EXPR:
6049 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6050 break;
6051
6052 case TRUTH_ANDIF_EXPR:
6053 if (if_false_label == 0)
6054 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 6055 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
6056 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6057 break;
6058
6059 case TRUTH_ORIF_EXPR:
6060 if (if_true_label == 0)
6061 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 6062 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
6063 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6064 break;
6065
6066 case COMPOUND_EXPR:
6067 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6068 free_temp_slots ();
6069 emit_queue ();
e7c33f54 6070 do_pending_stack_adjust ();
bbf6f052
RK
6071 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6072 break;
6073
6074 case COMPONENT_REF:
6075 case BIT_FIELD_REF:
6076 case ARRAY_REF:
6077 {
6078 int bitsize, bitpos, unsignedp;
6079 enum machine_mode mode;
6080 tree type;
7bb0943f 6081 tree offset;
bbf6f052
RK
6082 int volatilep = 0;
6083
6084 /* Get description of this reference. We don't actually care
6085 about the underlying object here. */
7bb0943f
RS
6086 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6087 &mode, &unsignedp, &volatilep);
bbf6f052
RK
6088
6089 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
6090 if (! SLOW_BYTE_ACCESS
6091 && type != 0 && bitsize >= 0
6092 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6093 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6094 != CODE_FOR_nothing))
bbf6f052
RK
6095 {
6096 do_jump (convert (type, exp), if_false_label, if_true_label);
6097 break;
6098 }
6099 goto normal;
6100 }
6101
6102 case COND_EXPR:
6103 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6104 if (integer_onep (TREE_OPERAND (exp, 1))
6105 && integer_zerop (TREE_OPERAND (exp, 2)))
6106 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6107
6108 else if (integer_zerop (TREE_OPERAND (exp, 1))
6109 && integer_onep (TREE_OPERAND (exp, 2)))
6110 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6111
6112 else
6113 {
6114 register rtx label1 = gen_label_rtx ();
6115 drop_through_label = gen_label_rtx ();
906c4e36 6116 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
6117 /* Now the THEN-expression. */
6118 do_jump (TREE_OPERAND (exp, 1),
6119 if_false_label ? if_false_label : drop_through_label,
6120 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
6121 /* In case the do_jump just above never jumps. */
6122 do_pending_stack_adjust ();
bbf6f052
RK
6123 emit_label (label1);
6124 /* Now the ELSE-expression. */
6125 do_jump (TREE_OPERAND (exp, 2),
6126 if_false_label ? if_false_label : drop_through_label,
6127 if_true_label ? if_true_label : drop_through_label);
6128 }
6129 break;
6130
6131 case EQ_EXPR:
6132 if (integer_zerop (TREE_OPERAND (exp, 1)))
6133 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6134 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6135 == MODE_INT)
6136 &&
6137 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6138 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6139 else
6140 comparison = compare (exp, EQ, EQ);
6141 break;
6142
6143 case NE_EXPR:
6144 if (integer_zerop (TREE_OPERAND (exp, 1)))
6145 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6146 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6147 == MODE_INT)
6148 &&
6149 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6150 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6151 else
6152 comparison = compare (exp, NE, NE);
6153 break;
6154
6155 case LT_EXPR:
6156 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6157 == MODE_INT)
6158 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6159 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6160 else
6161 comparison = compare (exp, LT, LTU);
6162 break;
6163
6164 case LE_EXPR:
6165 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6166 == MODE_INT)
6167 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6168 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6169 else
6170 comparison = compare (exp, LE, LEU);
6171 break;
6172
6173 case GT_EXPR:
6174 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6175 == MODE_INT)
6176 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6177 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6178 else
6179 comparison = compare (exp, GT, GTU);
6180 break;
6181
6182 case GE_EXPR:
6183 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6184 == MODE_INT)
6185 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6186 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6187 else
6188 comparison = compare (exp, GE, GEU);
6189 break;
6190
6191 default:
6192 normal:
906c4e36 6193 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6194#if 0
6195 /* This is not needed any more and causes poor code since it causes
6196 comparisons and tests from non-SI objects to have different code
6197 sequences. */
6198 /* Copy to register to avoid generating bad insns by cse
6199 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6200 if (!cse_not_expected && GET_CODE (temp) == MEM)
6201 temp = copy_to_reg (temp);
6202#endif
6203 do_pending_stack_adjust ();
6204 if (GET_CODE (temp) == CONST_INT)
6205 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6206 else if (GET_CODE (temp) == LABEL_REF)
6207 comparison = const_true_rtx;
6208 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6209 && !can_compare_p (GET_MODE (temp)))
6210 /* Note swapping the labels gives us not-equal. */
6211 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6212 else if (GET_MODE (temp) != VOIDmode)
6213 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
6214 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6215 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
6216 else
6217 abort ();
6218 }
6219
6220 /* Do any postincrements in the expression that was tested. */
6221 emit_queue ();
6222
6223 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6224 straight into a conditional jump instruction as the jump condition.
6225 Otherwise, all the work has been done already. */
6226
6227 if (comparison == const_true_rtx)
6228 {
6229 if (if_true_label)
6230 emit_jump (if_true_label);
6231 }
6232 else if (comparison == const0_rtx)
6233 {
6234 if (if_false_label)
6235 emit_jump (if_false_label);
6236 }
6237 else if (comparison)
6238 do_jump_for_compare (comparison, if_false_label, if_true_label);
6239
6240 free_temp_slots ();
6241
6242 if (drop_through_label)
e7c33f54
RK
6243 {
6244 /* If do_jump produces code that might be jumped around,
6245 do any stack adjusts from that code, before the place
6246 where control merges in. */
6247 do_pending_stack_adjust ();
6248 emit_label (drop_through_label);
6249 }
bbf6f052
RK
6250}
6251\f
6252/* Given a comparison expression EXP for values too wide to be compared
6253 with one insn, test the comparison and jump to the appropriate label.
6254 The code of EXP is ignored; we always test GT if SWAP is 0,
6255 and LT if SWAP is 1. */
6256
6257static void
6258do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6259 tree exp;
6260 int swap;
6261 rtx if_false_label, if_true_label;
6262{
906c4e36
RK
6263 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6264 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6265 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6266 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6267 rtx drop_through_label = 0;
6268 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6269 int i;
6270
6271 if (! if_true_label || ! if_false_label)
6272 drop_through_label = gen_label_rtx ();
6273 if (! if_true_label)
6274 if_true_label = drop_through_label;
6275 if (! if_false_label)
6276 if_false_label = drop_through_label;
6277
6278 /* Compare a word at a time, high order first. */
6279 for (i = 0; i < nwords; i++)
6280 {
6281 rtx comp;
6282 rtx op0_word, op1_word;
6283
6284 if (WORDS_BIG_ENDIAN)
6285 {
6286 op0_word = operand_subword_force (op0, i, mode);
6287 op1_word = operand_subword_force (op1, i, mode);
6288 }
6289 else
6290 {
6291 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6292 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6293 }
6294
6295 /* All but high-order word must be compared as unsigned. */
6296 comp = compare_from_rtx (op0_word, op1_word,
6297 (unsignedp || i > 0) ? GTU : GT,
906c4e36 6298 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
6299 if (comp == const_true_rtx)
6300 emit_jump (if_true_label);
6301 else if (comp != const0_rtx)
906c4e36 6302 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
6303
6304 /* Consider lower words only if these are equal. */
6305 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 6306 NULL_RTX, 0);
bbf6f052
RK
6307 if (comp == const_true_rtx)
6308 emit_jump (if_false_label);
6309 else if (comp != const0_rtx)
906c4e36 6310 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
6311 }
6312
6313 if (if_false_label)
6314 emit_jump (if_false_label);
6315 if (drop_through_label)
6316 emit_label (drop_through_label);
6317}
6318
6319/* Given an EQ_EXPR expression EXP for values too wide to be compared
6320 with one insn, test the comparison and jump to the appropriate label. */
6321
6322static void
6323do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6324 tree exp;
6325 rtx if_false_label, if_true_label;
6326{
906c4e36
RK
6327 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6328 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6329 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6330 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6331 int i;
6332 rtx drop_through_label = 0;
6333
6334 if (! if_false_label)
6335 drop_through_label = if_false_label = gen_label_rtx ();
6336
6337 for (i = 0; i < nwords; i++)
6338 {
6339 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6340 operand_subword_force (op1, i, mode),
cd1b4b44
RK
6341 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6342 word_mode, NULL_RTX, 0);
bbf6f052
RK
6343 if (comp == const_true_rtx)
6344 emit_jump (if_false_label);
6345 else if (comp != const0_rtx)
906c4e36 6346 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6347 }
6348
6349 if (if_true_label)
6350 emit_jump (if_true_label);
6351 if (drop_through_label)
6352 emit_label (drop_through_label);
6353}
6354\f
6355/* Jump according to whether OP0 is 0.
6356 We assume that OP0 has an integer mode that is too wide
6357 for the available compare insns. */
6358
6359static void
6360do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6361 rtx op0;
6362 rtx if_false_label, if_true_label;
6363{
6364 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6365 int i;
6366 rtx drop_through_label = 0;
6367
6368 if (! if_false_label)
6369 drop_through_label = if_false_label = gen_label_rtx ();
6370
6371 for (i = 0; i < nwords; i++)
6372 {
6373 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6374 GET_MODE (op0)),
cd1b4b44 6375 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
6376 if (comp == const_true_rtx)
6377 emit_jump (if_false_label);
6378 else if (comp != const0_rtx)
906c4e36 6379 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6380 }
6381
6382 if (if_true_label)
6383 emit_jump (if_true_label);
6384 if (drop_through_label)
6385 emit_label (drop_through_label);
6386}
6387
6388/* Given a comparison expression in rtl form, output conditional branches to
6389 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6390
6391static void
6392do_jump_for_compare (comparison, if_false_label, if_true_label)
6393 rtx comparison, if_false_label, if_true_label;
6394{
6395 if (if_true_label)
6396 {
6397 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6398 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6399 else
6400 abort ();
6401
6402 if (if_false_label)
6403 emit_jump (if_false_label);
6404 }
6405 else if (if_false_label)
6406 {
6407 rtx insn;
6408 rtx prev = PREV_INSN (get_last_insn ());
6409 rtx branch = 0;
6410
6411 /* Output the branch with the opposite condition. Then try to invert
6412 what is generated. If more than one insn is a branch, or if the
6413 branch is not the last insn written, abort. If we can't invert
6414 the branch, emit make a true label, redirect this jump to that,
6415 emit a jump to the false label and define the true label. */
6416
6417 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6418 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6419 else
6420 abort ();
6421
6422 /* Here we get the insn before what was just emitted.
6423 On some machines, emitting the branch can discard
6424 the previous compare insn and emit a replacement. */
6425 if (prev == 0)
6426 /* If there's only one preceding insn... */
6427 insn = get_insns ();
6428 else
6429 insn = NEXT_INSN (prev);
6430
6431 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6432 if (GET_CODE (insn) == JUMP_INSN)
6433 {
6434 if (branch)
6435 abort ();
6436 branch = insn;
6437 }
6438
6439 if (branch != get_last_insn ())
6440 abort ();
6441
6442 if (! invert_jump (branch, if_false_label))
6443 {
6444 if_true_label = gen_label_rtx ();
6445 redirect_jump (branch, if_true_label);
6446 emit_jump (if_false_label);
6447 emit_label (if_true_label);
6448 }
6449 }
6450}
6451\f
6452/* Generate code for a comparison expression EXP
6453 (including code to compute the values to be compared)
6454 and set (CC0) according to the result.
6455 SIGNED_CODE should be the rtx operation for this comparison for
6456 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6457
6458 We force a stack adjustment unless there are currently
6459 things pushed on the stack that aren't yet used. */
6460
6461static rtx
6462compare (exp, signed_code, unsigned_code)
6463 register tree exp;
6464 enum rtx_code signed_code, unsigned_code;
6465{
906c4e36
RK
6466 register rtx op0
6467 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6468 register rtx op1
6469 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6470 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6471 register enum machine_mode mode = TYPE_MODE (type);
6472 int unsignedp = TREE_UNSIGNED (type);
6473 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6474
6475 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6476 ((mode == BLKmode)
906c4e36 6477 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
6478 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6479}
6480
6481/* Like compare but expects the values to compare as two rtx's.
6482 The decision as to signed or unsigned comparison must be made by the caller.
6483
6484 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6485 compared.
6486
6487 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6488 size of MODE should be used. */
6489
6490rtx
6491compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6492 register rtx op0, op1;
6493 enum rtx_code code;
6494 int unsignedp;
6495 enum machine_mode mode;
6496 rtx size;
6497 int align;
6498{
6499 /* If one operand is constant, make it the second one. */
6500
6501 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6502 {
6503 rtx tem = op0;
6504 op0 = op1;
6505 op1 = tem;
6506 code = swap_condition (code);
6507 }
6508
6509 if (flag_force_mem)
6510 {
6511 op0 = force_not_mem (op0);
6512 op1 = force_not_mem (op1);
6513 }
6514
6515 do_pending_stack_adjust ();
6516
6517 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6518 return simplify_relational_operation (code, mode, op0, op1);
6519
cd1b4b44
RK
6520#if 0
6521 /* There's no need to do this now that combine.c can eliminate lots of
6522 sign extensions. This can be less efficient in certain cases on other
6523 machines.
6524
bbf6f052
RK
6525 /* If this is a signed equality comparison, we can do it as an
6526 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
6527 extension and comparisons with zero are done as unsigned. This is
6528 the case even on machines that can do fast sign extension, since
6529 zero-extension is easier to combinen with other operations than
6530 sign-extension is. If we are comparing against a constant, we must
6531 convert it to what it would look like unsigned. */
bbf6f052 6532 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 6533 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6534 {
6535 if (GET_CODE (op1) == CONST_INT
6536 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 6537 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
6538 unsignedp = 1;
6539 }
cd1b4b44 6540#endif
bbf6f052
RK
6541
6542 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6543
6544 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6545}
6546\f
6547/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
6548 and return an rtx for the result. EXP is either a comparison
6549 or a TRUTH_NOT_EXPR whose operand is a comparison.
6550
bbf6f052
RK
6551 If TARGET is nonzero, store the result there if convenient.
6552
6553 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6554 cheap.
6555
6556 Return zero if there is no suitable set-flag instruction
6557 available on this machine.
6558
6559 Once expand_expr has been called on the arguments of the comparison,
6560 we are committed to doing the store flag, since it is not safe to
6561 re-evaluate the expression. We emit the store-flag insn by calling
6562 emit_store_flag, but only expand the arguments if we have a reason
6563 to believe that emit_store_flag will be successful. If we think that
6564 it will, but it isn't, we have to simulate the store-flag with a
6565 set/jump/set sequence. */
6566
6567static rtx
6568do_store_flag (exp, target, mode, only_cheap)
6569 tree exp;
6570 rtx target;
6571 enum machine_mode mode;
6572 int only_cheap;
6573{
6574 enum rtx_code code;
e7c33f54 6575 tree arg0, arg1, type;
bbf6f052 6576 tree tem;
e7c33f54
RK
6577 enum machine_mode operand_mode;
6578 int invert = 0;
6579 int unsignedp;
bbf6f052
RK
6580 rtx op0, op1;
6581 enum insn_code icode;
6582 rtx subtarget = target;
6583 rtx result, label, pattern, jump_pat;
6584
e7c33f54
RK
6585 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6586 result at the end. We can't simply invert the test since it would
6587 have already been inverted if it were valid. This case occurs for
6588 some floating-point comparisons. */
6589
6590 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6591 invert = 1, exp = TREE_OPERAND (exp, 0);
6592
6593 arg0 = TREE_OPERAND (exp, 0);
6594 arg1 = TREE_OPERAND (exp, 1);
6595 type = TREE_TYPE (arg0);
6596 operand_mode = TYPE_MODE (type);
6597 unsignedp = TREE_UNSIGNED (type);
6598
bbf6f052
RK
6599 /* We won't bother with BLKmode store-flag operations because it would mean
6600 passing a lot of information to emit_store_flag. */
6601 if (operand_mode == BLKmode)
6602 return 0;
6603
d964285c
CH
6604 STRIP_NOPS (arg0);
6605 STRIP_NOPS (arg1);
bbf6f052
RK
6606
6607 /* Get the rtx comparison code to use. We know that EXP is a comparison
6608 operation of some type. Some comparisons against 1 and -1 can be
6609 converted to comparisons with zero. Do so here so that the tests
6610 below will be aware that we have a comparison with zero. These
6611 tests will not catch constants in the first operand, but constants
6612 are rarely passed as the first operand. */
6613
6614 switch (TREE_CODE (exp))
6615 {
6616 case EQ_EXPR:
6617 code = EQ;
6618 break;
6619 case NE_EXPR:
6620 code = NE;
6621 break;
6622 case LT_EXPR:
6623 if (integer_onep (arg1))
6624 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6625 else
6626 code = unsignedp ? LTU : LT;
6627 break;
6628 case LE_EXPR:
6629 if (integer_all_onesp (arg1))
6630 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6631 else
6632 code = unsignedp ? LEU : LE;
6633 break;
6634 case GT_EXPR:
6635 if (integer_all_onesp (arg1))
6636 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6637 else
6638 code = unsignedp ? GTU : GT;
6639 break;
6640 case GE_EXPR:
6641 if (integer_onep (arg1))
6642 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6643 else
6644 code = unsignedp ? GEU : GE;
6645 break;
6646 default:
6647 abort ();
6648 }
6649
6650 /* Put a constant second. */
6651 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6652 {
6653 tem = arg0; arg0 = arg1; arg1 = tem;
6654 code = swap_condition (code);
6655 }
6656
6657 /* If this is an equality or inequality test of a single bit, we can
6658 do this by shifting the bit being tested to the low-order bit and
6659 masking the result with the constant 1. If the condition was EQ,
6660 we xor it with 1. This does not require an scc insn and is faster
6661 than an scc insn even if we have it. */
6662
6663 if ((code == NE || code == EQ)
6664 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6665 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 6666 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6667 {
6668 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 6669 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
6670
6671 if (subtarget == 0 || GET_CODE (subtarget) != REG
6672 || GET_MODE (subtarget) != operand_mode
6673 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6674 subtarget = 0;
6675
6676 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6677
6678 if (bitnum != 0)
6679 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6680 size_int (bitnum), target, 1);
6681
6682 if (GET_MODE (op0) != mode)
6683 op0 = convert_to_mode (mode, op0, 1);
6684
6685 if (bitnum != TYPE_PRECISION (type) - 1)
6686 op0 = expand_and (op0, const1_rtx, target);
6687
e7c33f54 6688 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
6689 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6690 OPTAB_LIB_WIDEN);
6691
6692 return op0;
6693 }
6694
6695 /* Now see if we are likely to be able to do this. Return if not. */
6696 if (! can_compare_p (operand_mode))
6697 return 0;
6698 icode = setcc_gen_code[(int) code];
6699 if (icode == CODE_FOR_nothing
6700 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6701 {
6702 /* We can only do this if it is one of the special cases that
6703 can be handled without an scc insn. */
6704 if ((code == LT && integer_zerop (arg1))
6705 || (! only_cheap && code == GE && integer_zerop (arg1)))
6706 ;
6707 else if (BRANCH_COST >= 0
6708 && ! only_cheap && (code == NE || code == EQ)
6709 && TREE_CODE (type) != REAL_TYPE
6710 && ((abs_optab->handlers[(int) operand_mode].insn_code
6711 != CODE_FOR_nothing)
6712 || (ffs_optab->handlers[(int) operand_mode].insn_code
6713 != CODE_FOR_nothing)))
6714 ;
6715 else
6716 return 0;
6717 }
6718
6719 preexpand_calls (exp);
6720 if (subtarget == 0 || GET_CODE (subtarget) != REG
6721 || GET_MODE (subtarget) != operand_mode
6722 || ! safe_from_p (subtarget, arg1))
6723 subtarget = 0;
6724
6725 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 6726 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6727
6728 if (target == 0)
6729 target = gen_reg_rtx (mode);
6730
6731 result = emit_store_flag (target, code, op0, op1, operand_mode,
6732 unsignedp, 1);
6733
6734 if (result)
e7c33f54
RK
6735 {
6736 if (invert)
6737 result = expand_binop (mode, xor_optab, result, const1_rtx,
6738 result, 0, OPTAB_LIB_WIDEN);
6739 return result;
6740 }
bbf6f052
RK
6741
6742 /* If this failed, we have to do this with set/compare/jump/set code. */
6743 if (target == 0 || GET_CODE (target) != REG
6744 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6745 target = gen_reg_rtx (GET_MODE (target));
6746
e7c33f54 6747 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
6748 result = compare_from_rtx (op0, op1, code, unsignedp,
6749 operand_mode, NULL_RTX, 0);
bbf6f052 6750 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
6751 return (((result == const0_rtx && ! invert)
6752 || (result != const0_rtx && invert))
6753 ? const0_rtx : const1_rtx);
bbf6f052
RK
6754
6755 label = gen_label_rtx ();
6756 if (bcc_gen_fctn[(int) code] == 0)
6757 abort ();
6758
6759 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 6760 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
6761 emit_label (label);
6762
6763 return target;
6764}
6765\f
6766/* Generate a tablejump instruction (used for switch statements). */
6767
6768#ifdef HAVE_tablejump
6769
6770/* INDEX is the value being switched on, with the lowest value
6771 in the table already subtracted.
88d3b7f0 6772 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
6773 RANGE is the length of the jump table.
6774 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6775
6776 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6777 index value is out of range. */
6778
6779void
e87b4f3f 6780do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 6781 rtx index, range, table_label, default_label;
e87b4f3f 6782 enum machine_mode mode;
bbf6f052
RK
6783{
6784 register rtx temp, vector;
6785
88d3b7f0
RS
6786 /* Do an unsigned comparison (in the proper mode) between the index
6787 expression and the value which represents the length of the range.
6788 Since we just finished subtracting the lower bound of the range
6789 from the index expression, this comparison allows us to simultaneously
6790 check that the original index expression value is both greater than
6791 or equal to the minimum value of the range and less than or equal to
6792 the maximum value of the range. */
e87b4f3f 6793
906c4e36 6794 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
bbf6f052 6795 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
6796
6797 /* If index is in range, it must fit in Pmode.
6798 Convert to Pmode so we can index with it. */
6799 if (mode != Pmode)
6800 index = convert_to_mode (Pmode, index, 1);
6801
bbf6f052
RK
6802 /* If flag_force_addr were to affect this address
6803 it could interfere with the tricky assumptions made
6804 about addresses that contain label-refs,
6805 which may be valid only very near the tablejump itself. */
6806 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6807 GET_MODE_SIZE, because this indicates how large insns are. The other
6808 uses should all be Pmode, because they are addresses. This code
6809 could fail if addresses and insns are not the same size. */
6810 index = memory_address_noforce
6811 (CASE_VECTOR_MODE,
6812 gen_rtx (PLUS, Pmode,
6813 gen_rtx (MULT, Pmode, index,
906c4e36 6814 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
6815 gen_rtx (LABEL_REF, Pmode, table_label)));
6816 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6817 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6818 RTX_UNCHANGING_P (vector) = 1;
6819 convert_move (temp, vector, 0);
6820
6821 emit_jump_insn (gen_tablejump (temp, table_label));
6822
6823#ifndef CASE_VECTOR_PC_RELATIVE
6824 /* If we are generating PIC code or if the table is PC-relative, the
6825 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6826 if (! flag_pic)
6827 emit_barrier ();
6828#endif
6829}
6830
6831#endif /* HAVE_tablejump */
This page took 0.799602 seconds and 5 git commands to generate.