]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(build_selector_translation_table): NULL-terminate the selector list.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
4fa52007
RK
105/* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109static char direct_load[NUM_MACHINE_MODES];
110static char direct_store[NUM_MACHINE_MODES];
111
bbf6f052
RK
112/* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115#ifndef MOVE_RATIO
266007a7 116#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
117#define MOVE_RATIO 2
118#else
119/* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121#define MOVE_RATIO 15
122#endif
123#endif
e87b4f3f 124
266007a7
RK
125/* This array records the insn_code of insns to perform block moves. */
126static enum insn_code movstr_optab[NUM_MACHINE_MODES];
127
e87b4f3f
RS
128/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
129
130#ifndef SLOW_UNALIGNED_ACCESS
131#define SLOW_UNALIGNED_ACCESS 0
132#endif
bbf6f052 133\f
4fa52007 134/* This is run once per compilation to set up which modes can be used
266007a7 135 directly in memory and to initialize the block move optab. */
4fa52007
RK
136
137void
138init_expr_once ()
139{
140 rtx insn, pat;
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
143
144 start_sequence ();
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
147
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
150 {
151 int regno;
152 rtx reg;
153 int num_clobbers;
154
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
157
158 /* Find a register that can be used in this mode, if any. */
159 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
160 if (HARD_REGNO_MODE_OK (regno, mode))
161 break;
162
163 if (regno == FIRST_PSEUDO_REGISTER)
164 continue;
165
166 reg = gen_rtx (REG, mode, regno);
167
168 SET_SRC (pat) = mem;
169 SET_DEST (pat) = reg;
170 direct_load[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
171
172 SET_SRC (pat) = reg;
173 SET_DEST (pat) = mem;
174 direct_store[(int) mode] = (recog (pat, insn, &num_clobbers)) >= 0;
266007a7
RK
175
176 movstr_optab[(int) mode] = CODE_FOR_nothing;
4fa52007
RK
177 }
178
179 end_sequence ();
266007a7
RK
180
181#ifdef HAVE_movstrqi
182 if (HAVE_movstrqi)
183 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
184#endif
185#ifdef HAVE_movstrhi
186 if (HAVE_movstrhi)
187 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
188#endif
189#ifdef HAVE_movstrsi
190 if (HAVE_movstrsi)
191 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
192#endif
193#ifdef HAVE_movstrdi
194 if (HAVE_movstrdi)
195 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
196#endif
197#ifdef HAVE_movstrti
198 if (HAVE_movstrti)
199 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
200#endif
4fa52007
RK
201}
202
bbf6f052
RK
203/* This is run at the start of compiling a function. */
204
205void
206init_expr ()
207{
208 init_queue ();
209
210 pending_stack_adjust = 0;
211 inhibit_defer_pop = 0;
212 cleanups_this_call = 0;
213 saveregs_value = 0;
e87b4f3f 214 forced_labels = 0;
bbf6f052
RK
215}
216
217/* Save all variables describing the current status into the structure *P.
218 This is used before starting a nested function. */
219
220void
221save_expr_status (p)
222 struct function *p;
223{
224 /* Instead of saving the postincrement queue, empty it. */
225 emit_queue ();
226
227 p->pending_stack_adjust = pending_stack_adjust;
228 p->inhibit_defer_pop = inhibit_defer_pop;
229 p->cleanups_this_call = cleanups_this_call;
230 p->saveregs_value = saveregs_value;
e87b4f3f 231 p->forced_labels = forced_labels;
bbf6f052
RK
232
233 pending_stack_adjust = 0;
234 inhibit_defer_pop = 0;
235 cleanups_this_call = 0;
236 saveregs_value = 0;
e87b4f3f 237 forced_labels = 0;
bbf6f052
RK
238}
239
240/* Restore all variables describing the current status from the structure *P.
241 This is used after a nested function. */
242
243void
244restore_expr_status (p)
245 struct function *p;
246{
247 pending_stack_adjust = p->pending_stack_adjust;
248 inhibit_defer_pop = p->inhibit_defer_pop;
249 cleanups_this_call = p->cleanups_this_call;
250 saveregs_value = p->saveregs_value;
e87b4f3f 251 forced_labels = p->forced_labels;
bbf6f052
RK
252}
253\f
254/* Manage the queue of increment instructions to be output
255 for POSTINCREMENT_EXPR expressions, etc. */
256
257static rtx pending_chain;
258
259/* Queue up to increment (or change) VAR later. BODY says how:
260 BODY should be the same thing you would pass to emit_insn
261 to increment right away. It will go to emit_insn later on.
262
263 The value is a QUEUED expression to be used in place of VAR
264 where you want to guarantee the pre-incrementation value of VAR. */
265
266static rtx
267enqueue_insn (var, body)
268 rtx var, body;
269{
270 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 271 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
272 return pending_chain;
273}
274
275/* Use protect_from_queue to convert a QUEUED expression
276 into something that you can put immediately into an instruction.
277 If the queued incrementation has not happened yet,
278 protect_from_queue returns the variable itself.
279 If the incrementation has happened, protect_from_queue returns a temp
280 that contains a copy of the old value of the variable.
281
282 Any time an rtx which might possibly be a QUEUED is to be put
283 into an instruction, it must be passed through protect_from_queue first.
284 QUEUED expressions are not meaningful in instructions.
285
286 Do not pass a value through protect_from_queue and then hold
287 on to it for a while before putting it in an instruction!
288 If the queue is flushed in between, incorrect code will result. */
289
290rtx
291protect_from_queue (x, modify)
292 register rtx x;
293 int modify;
294{
295 register RTX_CODE code = GET_CODE (x);
296
297#if 0 /* A QUEUED can hang around after the queue is forced out. */
298 /* Shortcut for most common case. */
299 if (pending_chain == 0)
300 return x;
301#endif
302
303 if (code != QUEUED)
304 {
305 /* A special hack for read access to (MEM (QUEUED ...))
306 to facilitate use of autoincrement.
307 Make a copy of the contents of the memory location
308 rather than a copy of the address, but not
309 if the value is of mode BLKmode. */
310 if (code == MEM && GET_MODE (x) != BLKmode
311 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
312 {
313 register rtx y = XEXP (x, 0);
314 XEXP (x, 0) = QUEUED_VAR (y);
315 if (QUEUED_INSN (y))
316 {
317 register rtx temp = gen_reg_rtx (GET_MODE (x));
318 emit_insn_before (gen_move_insn (temp, x),
319 QUEUED_INSN (y));
320 return temp;
321 }
322 return x;
323 }
324 /* Otherwise, recursively protect the subexpressions of all
325 the kinds of rtx's that can contain a QUEUED. */
326 if (code == MEM)
327 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
328 else if (code == PLUS || code == MULT)
329 {
330 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
331 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
332 }
333 return x;
334 }
335 /* If the increment has not happened, use the variable itself. */
336 if (QUEUED_INSN (x) == 0)
337 return QUEUED_VAR (x);
338 /* If the increment has happened and a pre-increment copy exists,
339 use that copy. */
340 if (QUEUED_COPY (x) != 0)
341 return QUEUED_COPY (x);
342 /* The increment has happened but we haven't set up a pre-increment copy.
343 Set one up now, and use it. */
344 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
345 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
346 QUEUED_INSN (x));
347 return QUEUED_COPY (x);
348}
349
350/* Return nonzero if X contains a QUEUED expression:
351 if it contains anything that will be altered by a queued increment.
352 We handle only combinations of MEM, PLUS, MINUS and MULT operators
353 since memory addresses generally contain only those. */
354
355static int
356queued_subexp_p (x)
357 rtx x;
358{
359 register enum rtx_code code = GET_CODE (x);
360 switch (code)
361 {
362 case QUEUED:
363 return 1;
364 case MEM:
365 return queued_subexp_p (XEXP (x, 0));
366 case MULT:
367 case PLUS:
368 case MINUS:
369 return queued_subexp_p (XEXP (x, 0))
370 || queued_subexp_p (XEXP (x, 1));
371 }
372 return 0;
373}
374
375/* Perform all the pending incrementations. */
376
377void
378emit_queue ()
379{
380 register rtx p;
381 while (p = pending_chain)
382 {
383 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
384 pending_chain = QUEUED_NEXT (p);
385 }
386}
387
388static void
389init_queue ()
390{
391 if (pending_chain)
392 abort ();
393}
394\f
395/* Copy data from FROM to TO, where the machine modes are not the same.
396 Both modes may be integer, or both may be floating.
397 UNSIGNEDP should be nonzero if FROM is an unsigned type.
398 This causes zero-extension instead of sign-extension. */
399
400void
401convert_move (to, from, unsignedp)
402 register rtx to, from;
403 int unsignedp;
404{
405 enum machine_mode to_mode = GET_MODE (to);
406 enum machine_mode from_mode = GET_MODE (from);
407 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
408 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
409 enum insn_code code;
410 rtx libcall;
411
412 /* rtx code for making an equivalent value. */
413 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
414
415 to = protect_from_queue (to, 1);
416 from = protect_from_queue (from, 0);
417
418 if (to_real != from_real)
419 abort ();
420
421 if (to_mode == from_mode
422 || (from_mode == VOIDmode && CONSTANT_P (from)))
423 {
424 emit_move_insn (to, from);
425 return;
426 }
427
428 if (to_real)
429 {
430#ifdef HAVE_extendsfdf2
431 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
432 {
433 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
434 return;
435 }
436#endif
b092b471
JW
437#ifdef HAVE_extendsfxf2
438 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
439 {
440 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
441 return;
442 }
443#endif
bbf6f052
RK
444#ifdef HAVE_extendsftf2
445 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
446 {
447 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
448 return;
449 }
450#endif
b092b471
JW
451#ifdef HAVE_extenddfxf2
452 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
453 {
454 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
455 return;
456 }
457#endif
bbf6f052
RK
458#ifdef HAVE_extenddftf2
459 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
460 {
461 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
462 return;
463 }
464#endif
465#ifdef HAVE_truncdfsf2
466 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
467 {
468 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
469 return;
470 }
471#endif
b092b471
JW
472#ifdef HAVE_truncxfsf2
473 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
474 {
475 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
476 return;
477 }
478#endif
bbf6f052
RK
479#ifdef HAVE_trunctfsf2
480 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
481 {
482 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
483 return;
484 }
485#endif
b092b471
JW
486#ifdef HAVE_truncxfdf2
487 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
488 {
489 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
490 return;
491 }
492#endif
bbf6f052
RK
493#ifdef HAVE_trunctfdf2
494 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
495 {
496 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
497 return;
498 }
499#endif
500
b092b471
JW
501 libcall = (rtx) 0;
502 switch (from_mode)
503 {
504 case SFmode:
505 switch (to_mode)
506 {
507 case DFmode:
508 libcall = extendsfdf2_libfunc;
509 break;
510
511 case XFmode:
512 libcall = extendsfxf2_libfunc;
513 break;
514
515 case TFmode:
516 libcall = extendsftf2_libfunc;
517 break;
518 }
519 break;
520
521 case DFmode:
522 switch (to_mode)
523 {
524 case SFmode:
525 libcall = truncdfsf2_libfunc;
526 break;
527
528 case XFmode:
529 libcall = extenddfxf2_libfunc;
530 break;
531
532 case TFmode:
533 libcall = extenddftf2_libfunc;
534 break;
535 }
536 break;
537
538 case XFmode:
539 switch (to_mode)
540 {
541 case SFmode:
542 libcall = truncxfsf2_libfunc;
543 break;
544
545 case DFmode:
546 libcall = truncxfdf2_libfunc;
547 break;
548 }
549 break;
550
551 case TFmode:
552 switch (to_mode)
553 {
554 case SFmode:
555 libcall = trunctfsf2_libfunc;
556 break;
557
558 case DFmode:
559 libcall = trunctfdf2_libfunc;
560 break;
561 }
562 break;
563 }
564
565 if (libcall == (rtx) 0)
566 /* This conversion is not implemented yet. */
bbf6f052
RK
567 abort ();
568
e87b4f3f 569 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
570 emit_move_insn (to, hard_libcall_value (to_mode));
571 return;
572 }
573
574 /* Now both modes are integers. */
575
576 /* Handle expanding beyond a word. */
577 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
578 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
579 {
580 rtx insns;
581 rtx lowpart;
582 rtx fill_value;
583 rtx lowfrom;
584 int i;
585 enum machine_mode lowpart_mode;
586 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
587
588 /* Try converting directly if the insn is supported. */
589 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
590 != CODE_FOR_nothing)
591 {
cd1b4b44
RK
592 /* If FROM is a SUBREG, put it into a register. Do this
593 so that we always generate the same set of insns for
594 better cse'ing; if an intermediate assignment occurred,
595 we won't be doing the operation directly on the SUBREG. */
596 if (optimize > 0 && GET_CODE (from) == SUBREG)
597 from = force_reg (from_mode, from);
bbf6f052
RK
598 emit_unop_insn (code, to, from, equiv_code);
599 return;
600 }
601 /* Next, try converting via full word. */
602 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
603 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
604 != CODE_FOR_nothing))
605 {
606 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
607 emit_unop_insn (code, to,
608 gen_lowpart (word_mode, to), equiv_code);
609 return;
610 }
611
612 /* No special multiword conversion insn; do it by hand. */
613 start_sequence ();
614
615 /* Get a copy of FROM widened to a word, if necessary. */
616 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
617 lowpart_mode = word_mode;
618 else
619 lowpart_mode = from_mode;
620
621 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
622
623 lowpart = gen_lowpart (lowpart_mode, to);
624 emit_move_insn (lowpart, lowfrom);
625
626 /* Compute the value to put in each remaining word. */
627 if (unsignedp)
628 fill_value = const0_rtx;
629 else
630 {
631#ifdef HAVE_slt
632 if (HAVE_slt
633 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
634 && STORE_FLAG_VALUE == -1)
635 {
906c4e36
RK
636 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
637 lowpart_mode, 0, 0);
bbf6f052
RK
638 fill_value = gen_reg_rtx (word_mode);
639 emit_insn (gen_slt (fill_value));
640 }
641 else
642#endif
643 {
644 fill_value
645 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
646 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 647 NULL_RTX, 0);
bbf6f052
RK
648 fill_value = convert_to_mode (word_mode, fill_value, 1);
649 }
650 }
651
652 /* Fill the remaining words. */
653 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
654 {
655 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
656 rtx subword = operand_subword (to, index, 1, to_mode);
657
658 if (subword == 0)
659 abort ();
660
661 if (fill_value != subword)
662 emit_move_insn (subword, fill_value);
663 }
664
665 insns = get_insns ();
666 end_sequence ();
667
906c4e36 668 emit_no_conflict_block (insns, to, from, NULL_RTX,
bbf6f052
RK
669 gen_rtx (equiv_code, to_mode, from));
670 return;
671 }
672
673 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
674 {
675 convert_move (to, gen_lowpart (word_mode, from), 0);
676 return;
677 }
678
679 /* Handle pointer conversion */ /* SPEE 900220 */
680 if (to_mode == PSImode)
681 {
682 if (from_mode != SImode)
683 from = convert_to_mode (SImode, from, unsignedp);
684
685#ifdef HAVE_truncsipsi
686 if (HAVE_truncsipsi)
687 {
688 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
689 return;
690 }
691#endif /* HAVE_truncsipsi */
692 abort ();
693 }
694
695 if (from_mode == PSImode)
696 {
697 if (to_mode != SImode)
698 {
699 from = convert_to_mode (SImode, from, unsignedp);
700 from_mode = SImode;
701 }
702 else
703 {
704#ifdef HAVE_extendpsisi
705 if (HAVE_extendpsisi)
706 {
707 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
708 return;
709 }
710#endif /* HAVE_extendpsisi */
711 abort ();
712 }
713 }
714
715 /* Now follow all the conversions between integers
716 no more than a word long. */
717
718 /* For truncation, usually we can just refer to FROM in a narrower mode. */
719 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
720 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
721 GET_MODE_BITSIZE (from_mode))
722 && ((GET_CODE (from) == MEM
723 && ! MEM_VOLATILE_P (from)
4fa52007 724 && direct_load[(int) to_mode]
bbf6f052
RK
725 && ! mode_dependent_address_p (XEXP (from, 0)))
726 || GET_CODE (from) == REG
727 || GET_CODE (from) == SUBREG))
728 {
729 emit_move_insn (to, gen_lowpart (to_mode, from));
730 return;
731 }
732
733 /* For truncation, usually we can just refer to FROM in a narrower mode. */
734 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
735 {
736 /* Convert directly if that works. */
737 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
738 != CODE_FOR_nothing)
739 {
3dc4195c
RK
740 /* If FROM is a SUBREG, put it into a register. Do this
741 so that we always generate the same set of insns for
742 better cse'ing; if an intermediate assignment occurred,
743 we won't be doing the operation directly on the SUBREG. */
744 if (optimize > 0 && GET_CODE (from) == SUBREG)
745 from = force_reg (from_mode, from);
bbf6f052
RK
746 emit_unop_insn (code, to, from, equiv_code);
747 return;
748 }
749 else
750 {
751 enum machine_mode intermediate;
752
753 /* Search for a mode to convert via. */
754 for (intermediate = from_mode; intermediate != VOIDmode;
755 intermediate = GET_MODE_WIDER_MODE (intermediate))
756 if ((can_extend_p (to_mode, intermediate, unsignedp)
757 != CODE_FOR_nothing)
758 && (can_extend_p (intermediate, from_mode, unsignedp)
759 != CODE_FOR_nothing))
760 {
761 convert_move (to, convert_to_mode (intermediate, from,
762 unsignedp), unsignedp);
763 return;
764 }
765
766 /* No suitable intermediate mode. */
767 abort ();
768 }
769 }
770
771 /* Support special truncate insns for certain modes. */
772
773 if (from_mode == DImode && to_mode == SImode)
774 {
775#ifdef HAVE_truncdisi2
776 if (HAVE_truncdisi2)
777 {
778 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
779 return;
780 }
781#endif
782 convert_move (to, force_reg (from_mode, from), unsignedp);
783 return;
784 }
785
786 if (from_mode == DImode && to_mode == HImode)
787 {
788#ifdef HAVE_truncdihi2
789 if (HAVE_truncdihi2)
790 {
791 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
792 return;
793 }
794#endif
795 convert_move (to, force_reg (from_mode, from), unsignedp);
796 return;
797 }
798
799 if (from_mode == DImode && to_mode == QImode)
800 {
801#ifdef HAVE_truncdiqi2
802 if (HAVE_truncdiqi2)
803 {
804 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
805 return;
806 }
807#endif
808 convert_move (to, force_reg (from_mode, from), unsignedp);
809 return;
810 }
811
812 if (from_mode == SImode && to_mode == HImode)
813 {
814#ifdef HAVE_truncsihi2
815 if (HAVE_truncsihi2)
816 {
817 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
818 return;
819 }
820#endif
821 convert_move (to, force_reg (from_mode, from), unsignedp);
822 return;
823 }
824
825 if (from_mode == SImode && to_mode == QImode)
826 {
827#ifdef HAVE_truncsiqi2
828 if (HAVE_truncsiqi2)
829 {
830 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
831 return;
832 }
833#endif
834 convert_move (to, force_reg (from_mode, from), unsignedp);
835 return;
836 }
837
838 if (from_mode == HImode && to_mode == QImode)
839 {
840#ifdef HAVE_trunchiqi2
841 if (HAVE_trunchiqi2)
842 {
843 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
844 return;
845 }
846#endif
847 convert_move (to, force_reg (from_mode, from), unsignedp);
848 return;
849 }
850
851 /* Handle truncation of volatile memrefs, and so on;
852 the things that couldn't be truncated directly,
853 and for which there was no special instruction. */
854 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
855 {
856 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
857 emit_move_insn (to, temp);
858 return;
859 }
860
861 /* Mode combination is not recognized. */
862 abort ();
863}
864
865/* Return an rtx for a value that would result
866 from converting X to mode MODE.
867 Both X and MODE may be floating, or both integer.
868 UNSIGNEDP is nonzero if X is an unsigned value.
869 This can be done by referring to a part of X in place
5d901c31
RS
870 or by copying to a new temporary with conversion.
871
872 This function *must not* call protect_from_queue
873 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
874
875rtx
876convert_to_mode (mode, x, unsignedp)
877 enum machine_mode mode;
878 rtx x;
879 int unsignedp;
880{
881 register rtx temp;
882
bbf6f052
RK
883 if (mode == GET_MODE (x))
884 return x;
885
886 /* There is one case that we must handle specially: If we are converting
906c4e36 887 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
888 we are to interpret the constant as unsigned, gen_lowpart will do
889 the wrong if the constant appears negative. What we want to do is
890 make the high-order word of the constant zero, not all ones. */
891
892 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 893 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 894 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 895 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
896
897 /* We can do this with a gen_lowpart if both desired and current modes
898 are integer, and this is either a constant integer, a register, or a
899 non-volatile MEM. Except for the constant case, we must be narrowing
900 the operand. */
901
902 if (GET_CODE (x) == CONST_INT
903 || (GET_MODE_CLASS (mode) == MODE_INT
904 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
905 && (GET_CODE (x) == CONST_DOUBLE
906 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
907 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 908 && direct_load[(int) mode]
bbf6f052
RK
909 || GET_CODE (x) == REG)))))
910 return gen_lowpart (mode, x);
911
912 temp = gen_reg_rtx (mode);
913 convert_move (temp, x, unsignedp);
914 return temp;
915}
916\f
917/* Generate several move instructions to copy LEN bytes
918 from block FROM to block TO. (These are MEM rtx's with BLKmode).
919 The caller must pass FROM and TO
920 through protect_from_queue before calling.
921 ALIGN (in bytes) is maximum alignment we can assume. */
922
923struct move_by_pieces
924{
925 rtx to;
926 rtx to_addr;
927 int autinc_to;
928 int explicit_inc_to;
929 rtx from;
930 rtx from_addr;
931 int autinc_from;
932 int explicit_inc_from;
933 int len;
934 int offset;
935 int reverse;
936};
937
938static void move_by_pieces_1 ();
939static int move_by_pieces_ninsns ();
940
941static void
942move_by_pieces (to, from, len, align)
943 rtx to, from;
944 int len, align;
945{
946 struct move_by_pieces data;
947 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 948 int max_size = MOVE_MAX + 1;
bbf6f052
RK
949
950 data.offset = 0;
951 data.to_addr = to_addr;
952 data.from_addr = from_addr;
953 data.to = to;
954 data.from = from;
955 data.autinc_to
956 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
957 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
958 data.autinc_from
959 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
960 || GET_CODE (from_addr) == POST_INC
961 || GET_CODE (from_addr) == POST_DEC);
962
963 data.explicit_inc_from = 0;
964 data.explicit_inc_to = 0;
965 data.reverse
966 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
967 if (data.reverse) data.offset = len;
968 data.len = len;
969
970 /* If copying requires more than two move insns,
971 copy addresses to registers (to make displacements shorter)
972 and use post-increment if available. */
973 if (!(data.autinc_from && data.autinc_to)
974 && move_by_pieces_ninsns (len, align) > 2)
975 {
976#ifdef HAVE_PRE_DECREMENT
977 if (data.reverse && ! data.autinc_from)
978 {
979 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
980 data.autinc_from = 1;
981 data.explicit_inc_from = -1;
982 }
983#endif
984#ifdef HAVE_POST_INCREMENT
985 if (! data.autinc_from)
986 {
987 data.from_addr = copy_addr_to_reg (from_addr);
988 data.autinc_from = 1;
989 data.explicit_inc_from = 1;
990 }
991#endif
992 if (!data.autinc_from && CONSTANT_P (from_addr))
993 data.from_addr = copy_addr_to_reg (from_addr);
994#ifdef HAVE_PRE_DECREMENT
995 if (data.reverse && ! data.autinc_to)
996 {
997 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
998 data.autinc_to = 1;
999 data.explicit_inc_to = -1;
1000 }
1001#endif
1002#ifdef HAVE_POST_INCREMENT
1003 if (! data.reverse && ! data.autinc_to)
1004 {
1005 data.to_addr = copy_addr_to_reg (to_addr);
1006 data.autinc_to = 1;
1007 data.explicit_inc_to = 1;
1008 }
1009#endif
1010 if (!data.autinc_to && CONSTANT_P (to_addr))
1011 data.to_addr = copy_addr_to_reg (to_addr);
1012 }
1013
e87b4f3f
RS
1014 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1015 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1016 align = MOVE_MAX;
bbf6f052
RK
1017
1018 /* First move what we can in the largest integer mode, then go to
1019 successively smaller modes. */
1020
1021 while (max_size > 1)
1022 {
1023 enum machine_mode mode = VOIDmode, tmode;
1024 enum insn_code icode;
1025
e7c33f54
RK
1026 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1027 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1028 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1029 mode = tmode;
1030
1031 if (mode == VOIDmode)
1032 break;
1033
1034 icode = mov_optab->handlers[(int) mode].insn_code;
1035 if (icode != CODE_FOR_nothing
1036 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1037 GET_MODE_SIZE (mode)))
1038 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1039
1040 max_size = GET_MODE_SIZE (mode);
1041 }
1042
1043 /* The code above should have handled everything. */
1044 if (data.len != 0)
1045 abort ();
1046}
1047
1048/* Return number of insns required to move L bytes by pieces.
1049 ALIGN (in bytes) is maximum alignment we can assume. */
1050
1051static int
1052move_by_pieces_ninsns (l, align)
1053 unsigned int l;
1054 int align;
1055{
1056 register int n_insns = 0;
e87b4f3f 1057 int max_size = MOVE_MAX + 1;
bbf6f052 1058
e87b4f3f
RS
1059 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1060 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1061 align = MOVE_MAX;
bbf6f052
RK
1062
1063 while (max_size > 1)
1064 {
1065 enum machine_mode mode = VOIDmode, tmode;
1066 enum insn_code icode;
1067
e7c33f54
RK
1068 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1069 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1070 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1071 mode = tmode;
1072
1073 if (mode == VOIDmode)
1074 break;
1075
1076 icode = mov_optab->handlers[(int) mode].insn_code;
1077 if (icode != CODE_FOR_nothing
1078 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1079 GET_MODE_SIZE (mode)))
1080 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1081
1082 max_size = GET_MODE_SIZE (mode);
1083 }
1084
1085 return n_insns;
1086}
1087
1088/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1089 with move instructions for mode MODE. GENFUN is the gen_... function
1090 to make a move insn for that mode. DATA has all the other info. */
1091
1092static void
1093move_by_pieces_1 (genfun, mode, data)
1094 rtx (*genfun) ();
1095 enum machine_mode mode;
1096 struct move_by_pieces *data;
1097{
1098 register int size = GET_MODE_SIZE (mode);
1099 register rtx to1, from1;
1100
1101 while (data->len >= size)
1102 {
1103 if (data->reverse) data->offset -= size;
1104
1105 to1 = (data->autinc_to
1106 ? gen_rtx (MEM, mode, data->to_addr)
1107 : change_address (data->to, mode,
1108 plus_constant (data->to_addr, data->offset)));
1109 from1 =
1110 (data->autinc_from
1111 ? gen_rtx (MEM, mode, data->from_addr)
1112 : change_address (data->from, mode,
1113 plus_constant (data->from_addr, data->offset)));
1114
1115#ifdef HAVE_PRE_DECREMENT
1116 if (data->explicit_inc_to < 0)
906c4e36 1117 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1118 if (data->explicit_inc_from < 0)
906c4e36 1119 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1120#endif
1121
1122 emit_insn ((*genfun) (to1, from1));
1123#ifdef HAVE_POST_INCREMENT
1124 if (data->explicit_inc_to > 0)
906c4e36 1125 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1126 if (data->explicit_inc_from > 0)
906c4e36 1127 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1128#endif
1129
1130 if (! data->reverse) data->offset += size;
1131
1132 data->len -= size;
1133 }
1134}
1135\f
1136/* Emit code to move a block Y to a block X.
1137 This may be done with string-move instructions,
1138 with multiple scalar move instructions, or with a library call.
1139
1140 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1141 with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have,
1144 measured in bytes. */
1145
1146void
1147emit_block_move (x, y, size, align)
1148 rtx x, y;
1149 rtx size;
1150 int align;
1151{
1152 if (GET_MODE (x) != BLKmode)
1153 abort ();
1154
1155 if (GET_MODE (y) != BLKmode)
1156 abort ();
1157
1158 x = protect_from_queue (x, 1);
1159 y = protect_from_queue (y, 0);
5d901c31 1160 size = protect_from_queue (size, 0);
bbf6f052
RK
1161
1162 if (GET_CODE (x) != MEM)
1163 abort ();
1164 if (GET_CODE (y) != MEM)
1165 abort ();
1166 if (size == 0)
1167 abort ();
1168
1169 if (GET_CODE (size) == CONST_INT
906c4e36 1170 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1171 move_by_pieces (x, y, INTVAL (size), align);
1172 else
1173 {
1174 /* Try the most limited insn first, because there's no point
1175 including more than one in the machine description unless
1176 the more limited one has some advantage. */
266007a7 1177
0bba3f6f 1178 rtx opalign = GEN_INT (align);
266007a7
RK
1179 enum machine_mode mode;
1180
1181 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1182 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1183 {
266007a7 1184 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1185
1186 if (code != CODE_FOR_nothing
0bba3f6f 1187 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
266007a7 1188 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1189 && (insn_operand_predicate[(int) code][0] == 0
1190 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1191 && (insn_operand_predicate[(int) code][1] == 0
1192 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1193 && (insn_operand_predicate[(int) code][3] == 0
1194 || (*insn_operand_predicate[(int) code][3]) (opalign,
1195 VOIDmode)))
bbf6f052 1196 {
266007a7
RK
1197 rtx op2 = size;
1198 rtx last = get_last_insn ();
1199 rtx pat;
1200
0bba3f6f
RK
1201 if (insn_operand_predicate[(int) code][2] != 0
1202 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1203 op2 = copy_to_mode_reg (mode, op2);
1204
1205 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1206 if (pat)
1207 {
1208 emit_insn (pat);
1209 return;
1210 }
1211 else
1212 delete_insns_since (last);
bbf6f052
RK
1213 }
1214 }
bbf6f052
RK
1215
1216#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1217 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1218 VOIDmode, 3, XEXP (x, 0), Pmode,
1219 XEXP (y, 0), Pmode,
5a2724d7 1220 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052 1221#else
e87b4f3f 1222 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1223 VOIDmode, 3, XEXP (y, 0), Pmode,
1224 XEXP (x, 0), Pmode,
5a2724d7 1225 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052
RK
1226#endif
1227 }
1228}
1229\f
1230/* Copy all or part of a value X into registers starting at REGNO.
1231 The number of registers to be filled is NREGS. */
1232
1233void
1234move_block_to_reg (regno, x, nregs, mode)
1235 int regno;
1236 rtx x;
1237 int nregs;
1238 enum machine_mode mode;
1239{
1240 int i;
1241 rtx pat, last;
1242
1243 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1244 x = validize_mem (force_const_mem (mode, x));
1245
1246 /* See if the machine can do this with a load multiple insn. */
1247#ifdef HAVE_load_multiple
1248 last = get_last_insn ();
1249 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1250 GEN_INT (nregs));
bbf6f052
RK
1251 if (pat)
1252 {
1253 emit_insn (pat);
1254 return;
1255 }
1256 else
1257 delete_insns_since (last);
1258#endif
1259
1260 for (i = 0; i < nregs; i++)
1261 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1262 operand_subword_force (x, i, mode));
1263}
1264
1265/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1266 The number of registers to be filled is NREGS. */
1267
1268void
1269move_block_from_reg (regno, x, nregs)
1270 int regno;
1271 rtx x;
1272 int nregs;
1273{
1274 int i;
1275 rtx pat, last;
1276
1277 /* See if the machine can do this with a store multiple insn. */
1278#ifdef HAVE_store_multiple
1279 last = get_last_insn ();
1280 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1281 GEN_INT (nregs));
bbf6f052
RK
1282 if (pat)
1283 {
1284 emit_insn (pat);
1285 return;
1286 }
1287 else
1288 delete_insns_since (last);
1289#endif
1290
1291 for (i = 0; i < nregs; i++)
1292 {
1293 rtx tem = operand_subword (x, i, 1, BLKmode);
1294
1295 if (tem == 0)
1296 abort ();
1297
1298 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1299 }
1300}
1301
1302/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1303
1304void
1305use_regs (regno, nregs)
1306 int regno;
1307 int nregs;
1308{
1309 int i;
1310
1311 for (i = 0; i < nregs; i++)
1312 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1313}
1314\f
1315/* Write zeros through the storage of OBJECT.
1316 If OBJECT has BLKmode, SIZE is its length in bytes. */
1317
1318void
1319clear_storage (object, size)
1320 rtx object;
1321 int size;
1322{
1323 if (GET_MODE (object) == BLKmode)
1324 {
1325#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1326 emit_library_call (memset_libfunc, 1,
bbf6f052
RK
1327 VOIDmode, 3,
1328 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1329 GEN_INT (size), Pmode);
bbf6f052 1330#else
e87b4f3f 1331 emit_library_call (bzero_libfunc, 1,
bbf6f052
RK
1332 VOIDmode, 2,
1333 XEXP (object, 0), Pmode,
906c4e36 1334 GEN_INT (size), Pmode);
bbf6f052
RK
1335#endif
1336 }
1337 else
1338 emit_move_insn (object, const0_rtx);
1339}
1340
1341/* Generate code to copy Y into X.
1342 Both Y and X must have the same mode, except that
1343 Y can be a constant with VOIDmode.
1344 This mode cannot be BLKmode; use emit_block_move for that.
1345
1346 Return the last instruction emitted. */
1347
1348rtx
1349emit_move_insn (x, y)
1350 rtx x, y;
1351{
1352 enum machine_mode mode = GET_MODE (x);
1353 int i;
1354
1355 x = protect_from_queue (x, 1);
1356 y = protect_from_queue (y, 0);
1357
1358 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1359 abort ();
1360
1361 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1362 y = force_const_mem (mode, y);
1363
1364 /* If X or Y are memory references, verify that their addresses are valid
1365 for the machine. */
1366 if (GET_CODE (x) == MEM
1367 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1368 && ! push_operand (x, GET_MODE (x)))
1369 || (flag_force_addr
1370 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1371 x = change_address (x, VOIDmode, XEXP (x, 0));
1372
1373 if (GET_CODE (y) == MEM
1374 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1375 || (flag_force_addr
1376 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1377 y = change_address (y, VOIDmode, XEXP (y, 0));
1378
1379 if (mode == BLKmode)
1380 abort ();
1381
1382 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1383 return
1384 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1385
1386 /* This will handle any multi-word mode that lacks a move_insn pattern.
1387 However, you will get better code if you define such patterns,
1388 even if they must turn into multiple assembler instructions. */
a4320483 1389 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1390 {
1391 rtx last_insn = 0;
1392
1393 for (i = 0;
1394 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1395 i++)
1396 {
1397 rtx xpart = operand_subword (x, i, 1, mode);
1398 rtx ypart = operand_subword (y, i, 1, mode);
1399
1400 /* If we can't get a part of Y, put Y into memory if it is a
1401 constant. Otherwise, force it into a register. If we still
1402 can't get a part of Y, abort. */
1403 if (ypart == 0 && CONSTANT_P (y))
1404 {
1405 y = force_const_mem (mode, y);
1406 ypart = operand_subword (y, i, 1, mode);
1407 }
1408 else if (ypart == 0)
1409 ypart = operand_subword_force (y, i, mode);
1410
1411 if (xpart == 0 || ypart == 0)
1412 abort ();
1413
1414 last_insn = emit_move_insn (xpart, ypart);
1415 }
1416 return last_insn;
1417 }
1418 else
1419 abort ();
1420}
1421\f
1422/* Pushing data onto the stack. */
1423
1424/* Push a block of length SIZE (perhaps variable)
1425 and return an rtx to address the beginning of the block.
1426 Note that it is not possible for the value returned to be a QUEUED.
1427 The value may be virtual_outgoing_args_rtx.
1428
1429 EXTRA is the number of bytes of padding to push in addition to SIZE.
1430 BELOW nonzero means this padding comes at low addresses;
1431 otherwise, the padding comes at high addresses. */
1432
1433rtx
1434push_block (size, extra, below)
1435 rtx size;
1436 int extra, below;
1437{
1438 register rtx temp;
1439 if (CONSTANT_P (size))
1440 anti_adjust_stack (plus_constant (size, extra));
1441 else if (GET_CODE (size) == REG && extra == 0)
1442 anti_adjust_stack (size);
1443 else
1444 {
1445 rtx temp = copy_to_mode_reg (Pmode, size);
1446 if (extra != 0)
906c4e36 1447 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1448 temp, 0, OPTAB_LIB_WIDEN);
1449 anti_adjust_stack (temp);
1450 }
1451
1452#ifdef STACK_GROWS_DOWNWARD
1453 temp = virtual_outgoing_args_rtx;
1454 if (extra != 0 && below)
1455 temp = plus_constant (temp, extra);
1456#else
1457 if (GET_CODE (size) == CONST_INT)
1458 temp = plus_constant (virtual_outgoing_args_rtx,
1459 - INTVAL (size) - (below ? 0 : extra));
1460 else if (extra != 0 && !below)
1461 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1462 negate_rtx (Pmode, plus_constant (size, extra)));
1463 else
1464 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1465 negate_rtx (Pmode, size));
1466#endif
1467
1468 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1469}
1470
1471static rtx
1472gen_push_operand ()
1473{
1474 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1475}
1476
1477/* Generate code to push X onto the stack, assuming it has mode MODE and
1478 type TYPE.
1479 MODE is redundant except when X is a CONST_INT (since they don't
1480 carry mode info).
1481 SIZE is an rtx for the size of data to be copied (in bytes),
1482 needed only if X is BLKmode.
1483
1484 ALIGN (in bytes) is maximum alignment we can assume.
1485
1486 If PARTIAL is nonzero, then copy that many of the first words
1487 of X into registers starting with REG, and push the rest of X.
1488 The amount of space pushed is decreased by PARTIAL words,
1489 rounded *down* to a multiple of PARM_BOUNDARY.
1490 REG must be a hard register in this case.
1491
1492 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1493 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1494
1495 On a machine that lacks real push insns, ARGS_ADDR is the address of
1496 the bottom of the argument block for this call. We use indexing off there
1497 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1498 argument block has not been preallocated.
1499
1500 ARGS_SO_FAR is the size of args previously pushed for this call. */
1501
1502void
1503emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1504 args_addr, args_so_far)
1505 register rtx x;
1506 enum machine_mode mode;
1507 tree type;
1508 rtx size;
1509 int align;
1510 int partial;
1511 rtx reg;
1512 int extra;
1513 rtx args_addr;
1514 rtx args_so_far;
1515{
1516 rtx xinner;
1517 enum direction stack_direction
1518#ifdef STACK_GROWS_DOWNWARD
1519 = downward;
1520#else
1521 = upward;
1522#endif
1523
1524 /* Decide where to pad the argument: `downward' for below,
1525 `upward' for above, or `none' for don't pad it.
1526 Default is below for small data on big-endian machines; else above. */
1527 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1528
1529 /* Invert direction if stack is post-update. */
1530 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1531 if (where_pad != none)
1532 where_pad = (where_pad == downward ? upward : downward);
1533
1534 xinner = x = protect_from_queue (x, 0);
1535
1536 if (mode == BLKmode)
1537 {
1538 /* Copy a block into the stack, entirely or partially. */
1539
1540 register rtx temp;
1541 int used = partial * UNITS_PER_WORD;
1542 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1543 int skip;
1544
1545 if (size == 0)
1546 abort ();
1547
1548 used -= offset;
1549
1550 /* USED is now the # of bytes we need not copy to the stack
1551 because registers will take care of them. */
1552
1553 if (partial != 0)
1554 xinner = change_address (xinner, BLKmode,
1555 plus_constant (XEXP (xinner, 0), used));
1556
1557 /* If the partial register-part of the arg counts in its stack size,
1558 skip the part of stack space corresponding to the registers.
1559 Otherwise, start copying to the beginning of the stack space,
1560 by setting SKIP to 0. */
1561#ifndef REG_PARM_STACK_SPACE
1562 skip = 0;
1563#else
1564 skip = used;
1565#endif
1566
1567#ifdef PUSH_ROUNDING
1568 /* Do it with several push insns if that doesn't take lots of insns
1569 and if there is no difficulty with push insns that skip bytes
1570 on the stack for alignment purposes. */
1571 if (args_addr == 0
1572 && GET_CODE (size) == CONST_INT
1573 && skip == 0
1574 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1575 < MOVE_RATIO)
bbf6f052
RK
1576 /* Here we avoid the case of a structure whose weak alignment
1577 forces many pushes of a small amount of data,
1578 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1579 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1580 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1581 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1582 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1583 {
1584 /* Push padding now if padding above and stack grows down,
1585 or if padding below and stack grows up.
1586 But if space already allocated, this has already been done. */
1587 if (extra && args_addr == 0
1588 && where_pad != none && where_pad != stack_direction)
906c4e36 1589 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1590
1591 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1592 INTVAL (size) - used, align);
1593 }
1594 else
1595#endif /* PUSH_ROUNDING */
1596 {
1597 /* Otherwise make space on the stack and copy the data
1598 to the address of that space. */
1599
1600 /* Deduct words put into registers from the size we must copy. */
1601 if (partial != 0)
1602 {
1603 if (GET_CODE (size) == CONST_INT)
906c4e36 1604 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1605 else
1606 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1607 GEN_INT (used), NULL_RTX, 0,
1608 OPTAB_LIB_WIDEN);
bbf6f052
RK
1609 }
1610
1611 /* Get the address of the stack space.
1612 In this case, we do not deal with EXTRA separately.
1613 A single stack adjust will do. */
1614 if (! args_addr)
1615 {
1616 temp = push_block (size, extra, where_pad == downward);
1617 extra = 0;
1618 }
1619 else if (GET_CODE (args_so_far) == CONST_INT)
1620 temp = memory_address (BLKmode,
1621 plus_constant (args_addr,
1622 skip + INTVAL (args_so_far)));
1623 else
1624 temp = memory_address (BLKmode,
1625 plus_constant (gen_rtx (PLUS, Pmode,
1626 args_addr, args_so_far),
1627 skip));
1628
1629 /* TEMP is the address of the block. Copy the data there. */
1630 if (GET_CODE (size) == CONST_INT
1631 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1632 < MOVE_RATIO))
1633 {
1634 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1635 INTVAL (size), align);
1636 goto ret;
1637 }
1638 /* Try the most limited insn first, because there's no point
1639 including more than one in the machine description unless
1640 the more limited one has some advantage. */
1641#ifdef HAVE_movstrqi
1642 if (HAVE_movstrqi
1643 && GET_CODE (size) == CONST_INT
1644 && ((unsigned) INTVAL (size)
1645 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1646 {
1647 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1648 xinner, size, GEN_INT (align)));
bbf6f052
RK
1649 goto ret;
1650 }
1651#endif
1652#ifdef HAVE_movstrhi
1653 if (HAVE_movstrhi
1654 && GET_CODE (size) == CONST_INT
1655 && ((unsigned) INTVAL (size)
1656 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1657 {
1658 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1659 xinner, size, GEN_INT (align)));
bbf6f052
RK
1660 goto ret;
1661 }
1662#endif
1663#ifdef HAVE_movstrsi
1664 if (HAVE_movstrsi)
1665 {
1666 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1667 xinner, size, GEN_INT (align)));
bbf6f052
RK
1668 goto ret;
1669 }
1670#endif
1671#ifdef HAVE_movstrdi
1672 if (HAVE_movstrdi)
1673 {
1674 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1675 xinner, size, GEN_INT (align)));
bbf6f052
RK
1676 goto ret;
1677 }
1678#endif
1679
1680#ifndef ACCUMULATE_OUTGOING_ARGS
1681 /* If the source is referenced relative to the stack pointer,
1682 copy it to another register to stabilize it. We do not need
1683 to do this if we know that we won't be changing sp. */
1684
1685 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1686 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1687 temp = copy_to_reg (temp);
1688#endif
1689
1690 /* Make inhibit_defer_pop nonzero around the library call
1691 to force it to pop the bcopy-arguments right away. */
1692 NO_DEFER_POP;
1693#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1694 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1695 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1696 size, Pmode);
1697#else
e87b4f3f 1698 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1699 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1700 size, Pmode);
1701#endif
1702 OK_DEFER_POP;
1703 }
1704 }
1705 else if (partial > 0)
1706 {
1707 /* Scalar partly in registers. */
1708
1709 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1710 int i;
1711 int not_stack;
1712 /* # words of start of argument
1713 that we must make space for but need not store. */
1714 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1715 int args_offset = INTVAL (args_so_far);
1716 int skip;
1717
1718 /* Push padding now if padding above and stack grows down,
1719 or if padding below and stack grows up.
1720 But if space already allocated, this has already been done. */
1721 if (extra && args_addr == 0
1722 && where_pad != none && where_pad != stack_direction)
906c4e36 1723 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1724
1725 /* If we make space by pushing it, we might as well push
1726 the real data. Otherwise, we can leave OFFSET nonzero
1727 and leave the space uninitialized. */
1728 if (args_addr == 0)
1729 offset = 0;
1730
1731 /* Now NOT_STACK gets the number of words that we don't need to
1732 allocate on the stack. */
1733 not_stack = partial - offset;
1734
1735 /* If the partial register-part of the arg counts in its stack size,
1736 skip the part of stack space corresponding to the registers.
1737 Otherwise, start copying to the beginning of the stack space,
1738 by setting SKIP to 0. */
1739#ifndef REG_PARM_STACK_SPACE
1740 skip = 0;
1741#else
1742 skip = not_stack;
1743#endif
1744
1745 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1746 x = validize_mem (force_const_mem (mode, x));
1747
1748 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1749 SUBREGs of such registers are not allowed. */
1750 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1751 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1752 x = copy_to_reg (x);
1753
1754 /* Loop over all the words allocated on the stack for this arg. */
1755 /* We can do it by words, because any scalar bigger than a word
1756 has a size a multiple of a word. */
1757#ifndef PUSH_ARGS_REVERSED
1758 for (i = not_stack; i < size; i++)
1759#else
1760 for (i = size - 1; i >= not_stack; i--)
1761#endif
1762 if (i >= not_stack + offset)
1763 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
1764 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1765 0, args_addr,
1766 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
1767 * UNITS_PER_WORD)));
1768 }
1769 else
1770 {
1771 rtx addr;
1772
1773 /* Push padding now if padding above and stack grows down,
1774 or if padding below and stack grows up.
1775 But if space already allocated, this has already been done. */
1776 if (extra && args_addr == 0
1777 && where_pad != none && where_pad != stack_direction)
906c4e36 1778 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1779
1780#ifdef PUSH_ROUNDING
1781 if (args_addr == 0)
1782 addr = gen_push_operand ();
1783 else
1784#endif
1785 if (GET_CODE (args_so_far) == CONST_INT)
1786 addr
1787 = memory_address (mode,
1788 plus_constant (args_addr, INTVAL (args_so_far)));
1789 else
1790 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1791 args_so_far));
1792
1793 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1794 }
1795
1796 ret:
1797 /* If part should go in registers, copy that part
1798 into the appropriate registers. Do this now, at the end,
1799 since mem-to-mem copies above may do function calls. */
1800 if (partial > 0)
1801 move_block_to_reg (REGNO (reg), x, partial, mode);
1802
1803 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 1804 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1805}
1806\f
1807/* Output a library call to function FUN (a SYMBOL_REF rtx)
1808 (emitting the queue unless NO_QUEUE is nonzero),
1809 for a value of mode OUTMODE,
1810 with NARGS different arguments, passed as alternating rtx values
1811 and machine_modes to convert them to.
1812 The rtx values should have been passed through protect_from_queue already.
1813
1814 NO_QUEUE will be true if and only if the library call is a `const' call
1815 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1816 to the variable is_const in expand_call. */
1817
1818void
1819emit_library_call (va_alist)
1820 va_dcl
1821{
1822 va_list p;
1823 struct args_size args_size;
1824 register int argnum;
1825 enum machine_mode outmode;
1826 int nargs;
1827 rtx fun;
1828 rtx orgfun;
1829 int inc;
1830 int count;
1831 rtx argblock = 0;
1832 CUMULATIVE_ARGS args_so_far;
1833 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1834 struct args_size offset; struct args_size size; };
1835 struct arg *argvec;
1836 int old_inhibit_defer_pop = inhibit_defer_pop;
1837 int no_queue = 0;
1838 rtx use_insns;
1839
1840 va_start (p);
1841 orgfun = fun = va_arg (p, rtx);
1842 no_queue = va_arg (p, int);
1843 outmode = va_arg (p, enum machine_mode);
1844 nargs = va_arg (p, int);
1845
1846 /* Copy all the libcall-arguments out of the varargs data
1847 and into a vector ARGVEC.
1848
1849 Compute how to pass each argument. We only support a very small subset
1850 of the full argument passing conventions to limit complexity here since
1851 library functions shouldn't have many args. */
1852
1853 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1854
1855 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1856
1857 args_size.constant = 0;
1858 args_size.var = 0;
1859
1860 for (count = 0; count < nargs; count++)
1861 {
1862 rtx val = va_arg (p, rtx);
1863 enum machine_mode mode = va_arg (p, enum machine_mode);
1864
1865 /* We cannot convert the arg value to the mode the library wants here;
1866 must do it earlier where we know the signedness of the arg. */
1867 if (mode == BLKmode
1868 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1869 abort ();
1870
1871 /* On some machines, there's no way to pass a float to a library fcn.
1872 Pass it as a double instead. */
1873#ifdef LIBGCC_NEEDS_DOUBLE
1874 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1875 val = convert_to_mode (DFmode, val), mode = DFmode;
1876#endif
1877
5d901c31
RS
1878 /* There's no need to call protect_from_queue, because
1879 either emit_move_insn or emit_push_insn will do that. */
1880
bbf6f052
RK
1881 /* Make sure it is a reasonable operand for a move or push insn. */
1882 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1883 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
906c4e36 1884 val = force_operand (val, NULL_RTX);
bbf6f052
RK
1885
1886 argvec[count].value = val;
1887 argvec[count].mode = mode;
1888
1889#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
906c4e36 1890 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
bbf6f052
RK
1891 abort ();
1892#endif
1893
906c4e36 1894 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
1895 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1896 abort ();
1897#ifdef FUNCTION_ARG_PARTIAL_NREGS
1898 argvec[count].partial
906c4e36 1899 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
1900#else
1901 argvec[count].partial = 0;
1902#endif
1903
906c4e36 1904 locate_and_pad_parm (mode, NULL_TREE,
bbf6f052 1905 argvec[count].reg && argvec[count].partial == 0,
906c4e36 1906 NULL_TREE, &args_size, &argvec[count].offset,
bbf6f052
RK
1907 &argvec[count].size);
1908
1909 if (argvec[count].size.var)
1910 abort ();
1911
1912#ifndef REG_PARM_STACK_SPACE
1913 if (argvec[count].partial)
1914 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1915#endif
1916
1917 if (argvec[count].reg == 0 || argvec[count].partial != 0
1918#ifdef REG_PARM_STACK_SPACE
1919 || 1
1920#endif
1921 )
1922 args_size.constant += argvec[count].size.constant;
1923
1924#ifdef ACCUMULATE_OUTGOING_ARGS
1925 /* If this arg is actually passed on the stack, it might be
1926 clobbering something we already put there (this library call might
1927 be inside the evaluation of an argument to a function whose call
1928 requires the stack). This will only occur when the library call
1929 has sufficient args to run out of argument registers. Abort in
1930 this case; if this ever occurs, code must be added to save and
1931 restore the arg slot. */
1932
1933 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1934 abort ();
1935#endif
1936
1937 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1938 }
1939 va_end (p);
1940
1941 /* If this machine requires an external definition for library
1942 functions, write one out. */
1943 assemble_external_libcall (fun);
1944
1945#ifdef STACK_BOUNDARY
1946 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1947 / STACK_BYTES) * STACK_BYTES);
1948#endif
1949
1950#ifdef REG_PARM_STACK_SPACE
1951 args_size.constant = MAX (args_size.constant,
1952 REG_PARM_STACK_SPACE ((tree) 0));
1953#endif
1954
1955#ifdef ACCUMULATE_OUTGOING_ARGS
1956 if (args_size.constant > current_function_outgoing_args_size)
1957 current_function_outgoing_args_size = args_size.constant;
1958 args_size.constant = 0;
1959#endif
1960
1961#ifndef PUSH_ROUNDING
906c4e36 1962 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
bbf6f052
RK
1963#endif
1964
1965#ifdef PUSH_ARGS_REVERSED
1966 inc = -1;
1967 argnum = nargs - 1;
1968#else
1969 inc = 1;
1970 argnum = 0;
1971#endif
1972
1973 /* Push the args that need to be pushed. */
1974
1975 for (count = 0; count < nargs; count++, argnum += inc)
1976 {
1977 register enum machine_mode mode = argvec[argnum].mode;
1978 register rtx val = argvec[argnum].value;
1979 rtx reg = argvec[argnum].reg;
1980 int partial = argvec[argnum].partial;
1981
1982 if (! (reg != 0 && partial == 0))
906c4e36
RK
1983 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1984 argblock, GEN_INT (argvec[count].offset.constant));
bbf6f052
RK
1985 NO_DEFER_POP;
1986 }
1987
1988#ifdef PUSH_ARGS_REVERSED
1989 argnum = nargs - 1;
1990#else
1991 argnum = 0;
1992#endif
1993
1994 /* Now load any reg parms into their regs. */
1995
1996 for (count = 0; count < nargs; count++, argnum += inc)
1997 {
1998 register enum machine_mode mode = argvec[argnum].mode;
1999 register rtx val = argvec[argnum].value;
2000 rtx reg = argvec[argnum].reg;
2001 int partial = argvec[argnum].partial;
2002
2003 if (reg != 0 && partial == 0)
2004 emit_move_insn (reg, val);
2005 NO_DEFER_POP;
2006 }
2007
2008 /* For version 1.37, try deleting this entirely. */
2009 if (! no_queue)
2010 emit_queue ();
2011
2012 /* Any regs containing parms remain in use through the call. */
2013 start_sequence ();
2014 for (count = 0; count < nargs; count++)
2015 if (argvec[count].reg != 0)
2016 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2017
2018 use_insns = get_insns ();
2019 end_sequence ();
2020
906c4e36 2021 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
bbf6f052
RK
2022
2023 /* Don't allow popping to be deferred, since then
2024 cse'ing of library calls could delete a call and leave the pop. */
2025 NO_DEFER_POP;
2026
2027 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2028 will set inhibit_defer_pop to that value. */
2029
2030 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2031 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
906c4e36 2032 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
bbf6f052
RK
2033 old_inhibit_defer_pop + 1, use_insns, no_queue);
2034
2035 /* Now restore inhibit_defer_pop to its actual original value. */
2036 OK_DEFER_POP;
2037}
2038\f
2039/* Expand an assignment that stores the value of FROM into TO.
2040 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2041 (This may contain a QUEUED rtx.)
2042 Otherwise, the returned value is not meaningful.
2043
2044 SUGGEST_REG is no longer actually used.
2045 It used to mean, copy the value through a register
2046 and return that register, if that is possible.
2047 But now we do this if WANT_VALUE.
2048
2049 If the value stored is a constant, we return the constant. */
2050
2051rtx
2052expand_assignment (to, from, want_value, suggest_reg)
2053 tree to, from;
2054 int want_value;
2055 int suggest_reg;
2056{
2057 register rtx to_rtx = 0;
2058 rtx result;
2059
2060 /* Don't crash if the lhs of the assignment was erroneous. */
2061
2062 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2063 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2064
2065 /* Assignment of a structure component needs special treatment
2066 if the structure component's rtx is not simply a MEM.
2067 Assignment of an array element at a constant index
2068 has the same problem. */
2069
2070 if (TREE_CODE (to) == COMPONENT_REF
2071 || TREE_CODE (to) == BIT_FIELD_REF
2072 || (TREE_CODE (to) == ARRAY_REF
2073 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2074 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2075 {
2076 enum machine_mode mode1;
2077 int bitsize;
2078 int bitpos;
7bb0943f 2079 tree offset;
bbf6f052
RK
2080 int unsignedp;
2081 int volatilep = 0;
7bb0943f 2082 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2083 &mode1, &unsignedp, &volatilep);
2084
2085 /* If we are going to use store_bit_field and extract_bit_field,
2086 make sure to_rtx will be safe for multiple use. */
2087
2088 if (mode1 == VOIDmode && want_value)
2089 tem = stabilize_reference (tem);
2090
906c4e36 2091 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2092 if (offset != 0)
2093 {
906c4e36 2094 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2095
2096 if (GET_CODE (to_rtx) != MEM)
2097 abort ();
2098 to_rtx = change_address (to_rtx, VOIDmode,
2099 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2100 force_reg (Pmode, offset_rtx)));
2101 }
bbf6f052
RK
2102 if (volatilep)
2103 {
2104 if (GET_CODE (to_rtx) == MEM)
2105 MEM_VOLATILE_P (to_rtx) = 1;
2106#if 0 /* This was turned off because, when a field is volatile
2107 in an object which is not volatile, the object may be in a register,
2108 and then we would abort over here. */
2109 else
2110 abort ();
2111#endif
2112 }
2113
2114 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2115 (want_value
2116 /* Spurious cast makes HPUX compiler happy. */
2117 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2118 : VOIDmode),
2119 unsignedp,
2120 /* Required alignment of containing datum. */
2121 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2122 int_size_in_bytes (TREE_TYPE (tem)));
2123 preserve_temp_slots (result);
2124 free_temp_slots ();
2125
2126 return result;
2127 }
2128
2129 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2130 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2131
2132 if (to_rtx == 0)
906c4e36 2133 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2134
2135 /* In case we are returning the contents of an object which overlaps
2136 the place the value is being stored, use a safe function when copying
2137 a value through a pointer into a structure value return block. */
2138 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2139 && current_function_returns_struct
2140 && !current_function_returns_pcc_struct)
2141 {
906c4e36 2142 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2143 rtx size = expr_size (from);
2144
2145#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 2146 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
2147 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2148 XEXP (from_rtx, 0), Pmode,
2149 size, Pmode);
2150#else
e87b4f3f 2151 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
2152 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2153 XEXP (to_rtx, 0), Pmode,
2154 size, Pmode);
2155#endif
2156
2157 preserve_temp_slots (to_rtx);
2158 free_temp_slots ();
2159 return to_rtx;
2160 }
2161
2162 /* Compute FROM and store the value in the rtx we got. */
2163
2164 result = store_expr (from, to_rtx, want_value);
2165 preserve_temp_slots (result);
2166 free_temp_slots ();
2167 return result;
2168}
2169
2170/* Generate code for computing expression EXP,
2171 and storing the value into TARGET.
2172 Returns TARGET or an equivalent value.
2173 TARGET may contain a QUEUED rtx.
2174
2175 If SUGGEST_REG is nonzero, copy the value through a register
2176 and return that register, if that is possible.
2177
2178 If the value stored is a constant, we return the constant. */
2179
2180rtx
2181store_expr (exp, target, suggest_reg)
2182 register tree exp;
2183 register rtx target;
2184 int suggest_reg;
2185{
2186 register rtx temp;
2187 int dont_return_target = 0;
2188
2189 if (TREE_CODE (exp) == COMPOUND_EXPR)
2190 {
2191 /* Perform first part of compound expression, then assign from second
2192 part. */
2193 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2194 emit_queue ();
2195 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2196 }
2197 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2198 {
2199 /* For conditional expression, get safe form of the target. Then
2200 test the condition, doing the appropriate assignment on either
2201 side. This avoids the creation of unnecessary temporaries.
2202 For non-BLKmode, it is more efficient not to do this. */
2203
2204 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2205
2206 emit_queue ();
2207 target = protect_from_queue (target, 1);
2208
2209 NO_DEFER_POP;
2210 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2211 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2212 emit_queue ();
2213 emit_jump_insn (gen_jump (lab2));
2214 emit_barrier ();
2215 emit_label (lab1);
2216 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2217 emit_queue ();
2218 emit_label (lab2);
2219 OK_DEFER_POP;
2220 return target;
2221 }
2222 else if (suggest_reg && GET_CODE (target) == MEM
2223 && GET_MODE (target) != BLKmode)
2224 /* If target is in memory and caller wants value in a register instead,
2225 arrange that. Pass TARGET as target for expand_expr so that,
2226 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2227 We know expand_expr will not use the target in that case. */
2228 {
906c4e36 2229 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2230 GET_MODE (target), 0);
2231 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2232 temp = copy_to_reg (temp);
2233 dont_return_target = 1;
2234 }
2235 else if (queued_subexp_p (target))
2236 /* If target contains a postincrement, it is not safe
2237 to use as the returned value. It would access the wrong
2238 place by the time the queued increment gets output.
2239 So copy the value through a temporary and use that temp
2240 as the result. */
2241 {
2242 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2243 {
2244 /* Expand EXP into a new pseudo. */
2245 temp = gen_reg_rtx (GET_MODE (target));
2246 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2247 }
2248 else
906c4e36 2249 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2250 dont_return_target = 1;
2251 }
2252 else
2253 {
2254 temp = expand_expr (exp, target, GET_MODE (target), 0);
2255 /* DO return TARGET if it's a specified hardware register.
2256 expand_return relies on this. */
2257 if (!(target && GET_CODE (target) == REG
2258 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2259 && CONSTANT_P (temp))
2260 dont_return_target = 1;
2261 }
2262
2263 /* If value was not generated in the target, store it there.
2264 Convert the value to TARGET's type first if nec. */
2265
2266 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2267 {
2268 target = protect_from_queue (target, 1);
2269 if (GET_MODE (temp) != GET_MODE (target)
2270 && GET_MODE (temp) != VOIDmode)
2271 {
2272 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2273 if (dont_return_target)
2274 {
2275 /* In this case, we will return TEMP,
2276 so make sure it has the proper mode.
2277 But don't forget to store the value into TARGET. */
2278 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2279 emit_move_insn (target, temp);
2280 }
2281 else
2282 convert_move (target, temp, unsignedp);
2283 }
2284
2285 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2286 {
2287 /* Handle copying a string constant into an array.
2288 The string constant may be shorter than the array.
2289 So copy just the string's actual length, and clear the rest. */
2290 rtx size;
2291
e87b4f3f
RS
2292 /* Get the size of the data type of the string,
2293 which is actually the size of the target. */
2294 size = expr_size (exp);
2295 if (GET_CODE (size) == CONST_INT
2296 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2297 emit_block_move (target, temp, size,
2298 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2299 else
bbf6f052 2300 {
e87b4f3f
RS
2301 /* Compute the size of the data to copy from the string. */
2302 tree copy_size
2303 = fold (build (MIN_EXPR, sizetype,
2304 size_binop (CEIL_DIV_EXPR,
2305 TYPE_SIZE (TREE_TYPE (exp)),
2306 size_int (BITS_PER_UNIT)),
2307 convert (sizetype,
2308 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
906c4e36
RK
2309 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2310 VOIDmode, 0);
e87b4f3f
RS
2311 rtx label = 0;
2312
2313 /* Copy that much. */
2314 emit_block_move (target, temp, copy_size_rtx,
2315 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2316
2317 /* Figure out how much is left in TARGET
2318 that we have to clear. */
2319 if (GET_CODE (copy_size_rtx) == CONST_INT)
2320 {
2321 temp = plus_constant (XEXP (target, 0),
2322 TREE_STRING_LENGTH (exp));
2323 size = plus_constant (size,
2324 - TREE_STRING_LENGTH (exp));
2325 }
2326 else
2327 {
2328 enum machine_mode size_mode = Pmode;
2329
2330 temp = force_reg (Pmode, XEXP (target, 0));
2331 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2332 copy_size_rtx, NULL_RTX, 0,
2333 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2334
2335 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2336 copy_size_rtx, NULL_RTX, 0,
2337 OPTAB_LIB_WIDEN);
e87b4f3f 2338
906c4e36 2339 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2340 GET_MODE (size), 0, 0);
2341 label = gen_label_rtx ();
2342 emit_jump_insn (gen_blt (label));
2343 }
2344
2345 if (size != const0_rtx)
2346 {
bbf6f052 2347#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f
RS
2348 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2349 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2350#else
e87b4f3f
RS
2351 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2352 temp, Pmode, size, Pmode);
bbf6f052 2353#endif
e87b4f3f
RS
2354 }
2355 if (label)
2356 emit_label (label);
bbf6f052
RK
2357 }
2358 }
2359 else if (GET_MODE (temp) == BLKmode)
2360 emit_block_move (target, temp, expr_size (exp),
2361 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2362 else
2363 emit_move_insn (target, temp);
2364 }
2365 if (dont_return_target)
2366 return temp;
2367 return target;
2368}
2369\f
2370/* Store the value of constructor EXP into the rtx TARGET.
2371 TARGET is either a REG or a MEM. */
2372
2373static void
2374store_constructor (exp, target)
2375 tree exp;
2376 rtx target;
2377{
4af3895e
JVA
2378 tree type = TREE_TYPE (exp);
2379
bbf6f052
RK
2380 /* We know our target cannot conflict, since safe_from_p has been called. */
2381#if 0
2382 /* Don't try copying piece by piece into a hard register
2383 since that is vulnerable to being clobbered by EXP.
2384 Instead, construct in a pseudo register and then copy it all. */
2385 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2386 {
2387 rtx temp = gen_reg_rtx (GET_MODE (target));
2388 store_constructor (exp, temp);
2389 emit_move_insn (target, temp);
2390 return;
2391 }
2392#endif
2393
4af3895e 2394 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2395 {
2396 register tree elt;
2397
4af3895e
JVA
2398 /* Inform later passes that the whole union value is dead. */
2399 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2400 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2401
2402 /* If we are building a static constructor into a register,
2403 set the initial value as zero so we can fold the value into
2404 a constant. */
2405 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2406 emit_move_insn (target, const0_rtx);
2407
bbf6f052
RK
2408 /* If the constructor has fewer fields than the structure,
2409 clear the whole structure first. */
2410 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2411 != list_length (TYPE_FIELDS (type)))
2412 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2413 else
2414 /* Inform later passes that the old value is dead. */
2415 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2416
2417 /* Store each element of the constructor into
2418 the corresponding field of TARGET. */
2419
2420 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2421 {
2422 register tree field = TREE_PURPOSE (elt);
2423 register enum machine_mode mode;
2424 int bitsize;
2425 int bitpos;
2426 int unsignedp;
2427
f32fd778
RS
2428 /* Just ignore missing fields.
2429 We cleared the whole structure, above,
2430 if any fields are missing. */
2431 if (field == 0)
2432 continue;
2433
bbf6f052
RK
2434 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2435 unsignedp = TREE_UNSIGNED (field);
2436 mode = DECL_MODE (field);
2437 if (DECL_BIT_FIELD (field))
2438 mode = VOIDmode;
2439
2440 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2441 /* ??? This case remains to be written. */
2442 abort ();
2443
2444 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2445
2446 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2447 /* The alignment of TARGET is
2448 at least what its type requires. */
2449 VOIDmode, 0,
4af3895e
JVA
2450 TYPE_ALIGN (type) / BITS_PER_UNIT,
2451 int_size_in_bytes (type));
bbf6f052
RK
2452 }
2453 }
4af3895e 2454 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2455 {
2456 register tree elt;
2457 register int i;
4af3895e 2458 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2459 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2460 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2461 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2462
2463 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2464 clear the whole structure first. Similarly if this this is
2465 static constructor of a non-BLKmode object. */
bbf6f052 2466
4af3895e
JVA
2467 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2468 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2469 clear_storage (target, maxelt - minelt + 1);
2470 else
2471 /* Inform later passes that the old value is dead. */
2472 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2473
2474 /* Store each element of the constructor into
2475 the corresponding element of TARGET, determined
2476 by counting the elements. */
2477 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2478 elt;
2479 elt = TREE_CHAIN (elt), i++)
2480 {
2481 register enum machine_mode mode;
2482 int bitsize;
2483 int bitpos;
2484 int unsignedp;
2485
2486 mode = TYPE_MODE (elttype);
2487 bitsize = GET_MODE_BITSIZE (mode);
2488 unsignedp = TREE_UNSIGNED (elttype);
2489
2490 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2491
2492 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2493 /* The alignment of TARGET is
2494 at least what its type requires. */
2495 VOIDmode, 0,
4af3895e
JVA
2496 TYPE_ALIGN (type) / BITS_PER_UNIT,
2497 int_size_in_bytes (type));
bbf6f052
RK
2498 }
2499 }
2500
2501 else
2502 abort ();
2503}
2504
2505/* Store the value of EXP (an expression tree)
2506 into a subfield of TARGET which has mode MODE and occupies
2507 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2508 If MODE is VOIDmode, it means that we are storing into a bit-field.
2509
2510 If VALUE_MODE is VOIDmode, return nothing in particular.
2511 UNSIGNEDP is not used in this case.
2512
2513 Otherwise, return an rtx for the value stored. This rtx
2514 has mode VALUE_MODE if that is convenient to do.
2515 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2516
2517 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2518 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2519
2520static rtx
2521store_field (target, bitsize, bitpos, mode, exp, value_mode,
2522 unsignedp, align, total_size)
2523 rtx target;
2524 int bitsize, bitpos;
2525 enum machine_mode mode;
2526 tree exp;
2527 enum machine_mode value_mode;
2528 int unsignedp;
2529 int align;
2530 int total_size;
2531{
906c4e36 2532 HOST_WIDE_INT width_mask = 0;
bbf6f052 2533
906c4e36
RK
2534 if (bitsize < HOST_BITS_PER_WIDE_INT)
2535 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2536
2537 /* If we are storing into an unaligned field of an aligned union that is
2538 in a register, we may have the mode of TARGET being an integer mode but
2539 MODE == BLKmode. In that case, get an aligned object whose size and
2540 alignment are the same as TARGET and store TARGET into it (we can avoid
2541 the store if the field being stored is the entire width of TARGET). Then
2542 call ourselves recursively to store the field into a BLKmode version of
2543 that object. Finally, load from the object into TARGET. This is not
2544 very efficient in general, but should only be slightly more expensive
2545 than the otherwise-required unaligned accesses. Perhaps this can be
2546 cleaned up later. */
2547
2548 if (mode == BLKmode
2549 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2550 {
2551 rtx object = assign_stack_temp (GET_MODE (target),
2552 GET_MODE_SIZE (GET_MODE (target)), 0);
2553 rtx blk_object = copy_rtx (object);
2554
2555 PUT_MODE (blk_object, BLKmode);
2556
2557 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2558 emit_move_insn (object, target);
2559
2560 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2561 align, total_size);
2562
2563 emit_move_insn (target, object);
2564
2565 return target;
2566 }
2567
2568 /* If the structure is in a register or if the component
2569 is a bit field, we cannot use addressing to access it.
2570 Use bit-field techniques or SUBREG to store in it. */
2571
4fa52007
RK
2572 if (mode == VOIDmode
2573 || (mode != BLKmode && ! direct_store[(int) mode])
2574 || GET_CODE (target) == REG
bbf6f052
RK
2575 || GET_CODE (target) == SUBREG)
2576 {
906c4e36 2577 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2578 /* Store the value in the bitfield. */
2579 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2580 if (value_mode != VOIDmode)
2581 {
2582 /* The caller wants an rtx for the value. */
2583 /* If possible, avoid refetching from the bitfield itself. */
2584 if (width_mask != 0
2585 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
906c4e36 2586 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
bbf6f052 2587 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2588 NULL_RTX, value_mode, 0, align,
2589 total_size);
bbf6f052
RK
2590 }
2591 return const0_rtx;
2592 }
2593 else
2594 {
2595 rtx addr = XEXP (target, 0);
2596 rtx to_rtx;
2597
2598 /* If a value is wanted, it must be the lhs;
2599 so make the address stable for multiple use. */
2600
2601 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2602 && ! CONSTANT_ADDRESS_P (addr)
2603 /* A frame-pointer reference is already stable. */
2604 && ! (GET_CODE (addr) == PLUS
2605 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2606 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2607 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2608 addr = copy_to_reg (addr);
2609
2610 /* Now build a reference to just the desired component. */
2611
2612 to_rtx = change_address (target, mode,
2613 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2614 MEM_IN_STRUCT_P (to_rtx) = 1;
2615
2616 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2617 }
2618}
2619\f
2620/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2621 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2622 ARRAY_REFs at constant positions and find the ultimate containing object,
2623 which we return.
2624
2625 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2626 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2627 If the position of the field is variable, we store a tree
2628 giving the variable offset (in units) in *POFFSET.
2629 This offset is in addition to the bit position.
2630 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2631
2632 If any of the extraction expressions is volatile,
2633 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2634
2635 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2636 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2637 is redundant.
2638
2639 If the field describes a variable-sized object, *PMODE is set to
2640 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2641 this case, but the address of the object can be found. */
bbf6f052
RK
2642
2643tree
7bb0943f 2644get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
bbf6f052
RK
2645 tree exp;
2646 int *pbitsize;
2647 int *pbitpos;
7bb0943f 2648 tree *poffset;
bbf6f052
RK
2649 enum machine_mode *pmode;
2650 int *punsignedp;
2651 int *pvolatilep;
2652{
2653 tree size_tree = 0;
2654 enum machine_mode mode = VOIDmode;
7bb0943f 2655 tree offset = 0;
bbf6f052
RK
2656
2657 if (TREE_CODE (exp) == COMPONENT_REF)
2658 {
2659 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2660 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2661 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2662 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2663 }
2664 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2665 {
2666 size_tree = TREE_OPERAND (exp, 1);
2667 *punsignedp = TREE_UNSIGNED (exp);
2668 }
2669 else
2670 {
2671 mode = TYPE_MODE (TREE_TYPE (exp));
2672 *pbitsize = GET_MODE_BITSIZE (mode);
2673 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2674 }
2675
2676 if (size_tree)
2677 {
2678 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2679 mode = BLKmode, *pbitsize = -1;
2680 else
2681 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2682 }
2683
2684 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2685 and find the ultimate containing object. */
2686
2687 *pbitpos = 0;
2688
2689 while (1)
2690 {
7bb0943f 2691 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2692 {
7bb0943f
RS
2693 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2694 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2695 : TREE_OPERAND (exp, 2));
bbf6f052 2696
7bb0943f
RS
2697 if (TREE_CODE (pos) == PLUS_EXPR)
2698 {
2699 tree constant, var;
2700 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2701 {
2702 constant = TREE_OPERAND (pos, 0);
2703 var = TREE_OPERAND (pos, 1);
2704 }
2705 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2706 {
2707 constant = TREE_OPERAND (pos, 1);
2708 var = TREE_OPERAND (pos, 0);
2709 }
2710 else
2711 abort ();
2712 *pbitpos += TREE_INT_CST_LOW (constant);
2713 if (offset)
2714 offset = size_binop (PLUS_EXPR, offset,
2715 size_binop (FLOOR_DIV_EXPR, var,
2716 size_int (BITS_PER_UNIT)));
2717 else
2718 offset = size_binop (FLOOR_DIV_EXPR, var,
2719 size_int (BITS_PER_UNIT));
2720 }
2721 else if (TREE_CODE (pos) == INTEGER_CST)
2722 *pbitpos += TREE_INT_CST_LOW (pos);
2723 else
2724 {
2725 /* Assume here that the offset is a multiple of a unit.
2726 If not, there should be an explicitly added constant. */
2727 if (offset)
2728 offset = size_binop (PLUS_EXPR, offset,
2729 size_binop (FLOOR_DIV_EXPR, pos,
2730 size_int (BITS_PER_UNIT)));
2731 else
2732 offset = size_binop (FLOOR_DIV_EXPR, pos,
2733 size_int (BITS_PER_UNIT));
2734 }
bbf6f052 2735 }
bbf6f052 2736
bbf6f052
RK
2737 else if (TREE_CODE (exp) == ARRAY_REF
2738 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2739 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2740 {
2741 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2742 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
2743 }
2744 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2745 && ! ((TREE_CODE (exp) == NOP_EXPR
2746 || TREE_CODE (exp) == CONVERT_EXPR)
2747 && (TYPE_MODE (TREE_TYPE (exp))
2748 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2749 break;
7bb0943f
RS
2750
2751 /* If any reference in the chain is volatile, the effect is volatile. */
2752 if (TREE_THIS_VOLATILE (exp))
2753 *pvolatilep = 1;
bbf6f052
RK
2754 exp = TREE_OPERAND (exp, 0);
2755 }
2756
2757 /* If this was a bit-field, see if there is a mode that allows direct
2758 access in case EXP is in memory. */
2759 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2760 {
2761 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2762 if (mode == BLKmode)
2763 mode = VOIDmode;
2764 }
2765
2766 *pmode = mode;
7bb0943f
RS
2767 *poffset = offset;
2768#if 0
2769 /* We aren't finished fixing the callers to really handle nonzero offset. */
2770 if (offset != 0)
2771 abort ();
2772#endif
bbf6f052
RK
2773
2774 return exp;
2775}
2776\f
2777/* Given an rtx VALUE that may contain additions and multiplications,
2778 return an equivalent value that just refers to a register or memory.
2779 This is done by generating instructions to perform the arithmetic
2780 and returning a pseudo-register containing the value. */
2781
2782rtx
2783force_operand (value, target)
2784 rtx value, target;
2785{
2786 register optab binoptab = 0;
2787 /* Use a temporary to force order of execution of calls to
2788 `force_operand'. */
2789 rtx tmp;
2790 register rtx op2;
2791 /* Use subtarget as the target for operand 0 of a binary operation. */
2792 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2793
2794 if (GET_CODE (value) == PLUS)
2795 binoptab = add_optab;
2796 else if (GET_CODE (value) == MINUS)
2797 binoptab = sub_optab;
2798 else if (GET_CODE (value) == MULT)
2799 {
2800 op2 = XEXP (value, 1);
2801 if (!CONSTANT_P (op2)
2802 && !(GET_CODE (op2) == REG && op2 != subtarget))
2803 subtarget = 0;
2804 tmp = force_operand (XEXP (value, 0), subtarget);
2805 return expand_mult (GET_MODE (value), tmp,
906c4e36 2806 force_operand (op2, NULL_RTX),
bbf6f052
RK
2807 target, 0);
2808 }
2809
2810 if (binoptab)
2811 {
2812 op2 = XEXP (value, 1);
2813 if (!CONSTANT_P (op2)
2814 && !(GET_CODE (op2) == REG && op2 != subtarget))
2815 subtarget = 0;
2816 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2817 {
2818 binoptab = add_optab;
2819 op2 = negate_rtx (GET_MODE (value), op2);
2820 }
2821
2822 /* Check for an addition with OP2 a constant integer and our first
2823 operand a PLUS of a virtual register and something else. In that
2824 case, we want to emit the sum of the virtual register and the
2825 constant first and then add the other value. This allows virtual
2826 register instantiation to simply modify the constant rather than
2827 creating another one around this addition. */
2828 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2829 && GET_CODE (XEXP (value, 0)) == PLUS
2830 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2831 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2832 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2833 {
2834 rtx temp = expand_binop (GET_MODE (value), binoptab,
2835 XEXP (XEXP (value, 0), 0), op2,
2836 subtarget, 0, OPTAB_LIB_WIDEN);
2837 return expand_binop (GET_MODE (value), binoptab, temp,
2838 force_operand (XEXP (XEXP (value, 0), 1), 0),
2839 target, 0, OPTAB_LIB_WIDEN);
2840 }
2841
2842 tmp = force_operand (XEXP (value, 0), subtarget);
2843 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 2844 force_operand (op2, NULL_RTX),
bbf6f052
RK
2845 target, 0, OPTAB_LIB_WIDEN);
2846 /* We give UNSIGNEP = 0 to expand_binop
2847 because the only operations we are expanding here are signed ones. */
2848 }
2849 return value;
2850}
2851\f
2852/* Subroutine of expand_expr:
2853 save the non-copied parts (LIST) of an expr (LHS), and return a list
2854 which can restore these values to their previous values,
2855 should something modify their storage. */
2856
2857static tree
2858save_noncopied_parts (lhs, list)
2859 tree lhs;
2860 tree list;
2861{
2862 tree tail;
2863 tree parts = 0;
2864
2865 for (tail = list; tail; tail = TREE_CHAIN (tail))
2866 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2867 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2868 else
2869 {
2870 tree part = TREE_VALUE (tail);
2871 tree part_type = TREE_TYPE (part);
906c4e36 2872 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
2873 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2874 int_size_in_bytes (part_type), 0);
2875 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 2876 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 2877 parts = tree_cons (to_be_saved,
906c4e36
RK
2878 build (RTL_EXPR, part_type, NULL_TREE,
2879 (tree) target),
bbf6f052
RK
2880 parts);
2881 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2882 }
2883 return parts;
2884}
2885
2886/* Subroutine of expand_expr:
2887 record the non-copied parts (LIST) of an expr (LHS), and return a list
2888 which specifies the initial values of these parts. */
2889
2890static tree
2891init_noncopied_parts (lhs, list)
2892 tree lhs;
2893 tree list;
2894{
2895 tree tail;
2896 tree parts = 0;
2897
2898 for (tail = list; tail; tail = TREE_CHAIN (tail))
2899 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2900 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2901 else
2902 {
2903 tree part = TREE_VALUE (tail);
2904 tree part_type = TREE_TYPE (part);
906c4e36 2905 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
2906 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2907 }
2908 return parts;
2909}
2910
2911/* Subroutine of expand_expr: return nonzero iff there is no way that
2912 EXP can reference X, which is being modified. */
2913
2914static int
2915safe_from_p (x, exp)
2916 rtx x;
2917 tree exp;
2918{
2919 rtx exp_rtl = 0;
2920 int i, nops;
2921
2922 if (x == 0)
2923 return 1;
2924
2925 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2926 find the underlying pseudo. */
2927 if (GET_CODE (x) == SUBREG)
2928 {
2929 x = SUBREG_REG (x);
2930 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2931 return 0;
2932 }
2933
2934 /* If X is a location in the outgoing argument area, it is always safe. */
2935 if (GET_CODE (x) == MEM
2936 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2937 || (GET_CODE (XEXP (x, 0)) == PLUS
2938 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2939 return 1;
2940
2941 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2942 {
2943 case 'd':
2944 exp_rtl = DECL_RTL (exp);
2945 break;
2946
2947 case 'c':
2948 return 1;
2949
2950 case 'x':
2951 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
2952 return ((TREE_VALUE (exp) == 0
2953 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
2954 && (TREE_CHAIN (exp) == 0
2955 || safe_from_p (x, TREE_CHAIN (exp))));
2956 else
2957 return 0;
2958
2959 case '1':
2960 return safe_from_p (x, TREE_OPERAND (exp, 0));
2961
2962 case '2':
2963 case '<':
2964 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2965 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2966
2967 case 'e':
2968 case 'r':
2969 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2970 the expression. If it is set, we conflict iff we are that rtx or
2971 both are in memory. Otherwise, we check all operands of the
2972 expression recursively. */
2973
2974 switch (TREE_CODE (exp))
2975 {
2976 case ADDR_EXPR:
2977 return staticp (TREE_OPERAND (exp, 0));
2978
2979 case INDIRECT_REF:
2980 if (GET_CODE (x) == MEM)
2981 return 0;
2982 break;
2983
2984 case CALL_EXPR:
2985 exp_rtl = CALL_EXPR_RTL (exp);
2986 if (exp_rtl == 0)
2987 {
2988 /* Assume that the call will clobber all hard registers and
2989 all of memory. */
2990 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2991 || GET_CODE (x) == MEM)
2992 return 0;
2993 }
2994
2995 break;
2996
2997 case RTL_EXPR:
2998 exp_rtl = RTL_EXPR_RTL (exp);
2999 if (exp_rtl == 0)
3000 /* We don't know what this can modify. */
3001 return 0;
3002
3003 break;
3004
3005 case WITH_CLEANUP_EXPR:
3006 exp_rtl = RTL_EXPR_RTL (exp);
3007 break;
3008
3009 case SAVE_EXPR:
3010 exp_rtl = SAVE_EXPR_RTL (exp);
3011 break;
3012
8129842c
RS
3013 case BIND_EXPR:
3014 /* The only operand we look at is operand 1. The rest aren't
3015 part of the expression. */
3016 return safe_from_p (x, TREE_OPERAND (exp, 1));
3017
bbf6f052
RK
3018 case METHOD_CALL_EXPR:
3019 /* This takes a rtx argument, but shouldn't appear here. */
3020 abort ();
3021 }
3022
3023 /* If we have an rtx, we do not need to scan our operands. */
3024 if (exp_rtl)
3025 break;
3026
3027 nops = tree_code_length[(int) TREE_CODE (exp)];
3028 for (i = 0; i < nops; i++)
3029 if (TREE_OPERAND (exp, i) != 0
3030 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3031 return 0;
3032 }
3033
3034 /* If we have an rtl, find any enclosed object. Then see if we conflict
3035 with it. */
3036 if (exp_rtl)
3037 {
3038 if (GET_CODE (exp_rtl) == SUBREG)
3039 {
3040 exp_rtl = SUBREG_REG (exp_rtl);
3041 if (GET_CODE (exp_rtl) == REG
3042 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3043 return 0;
3044 }
3045
3046 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3047 are memory and EXP is not readonly. */
3048 return ! (rtx_equal_p (x, exp_rtl)
3049 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3050 && ! TREE_READONLY (exp)));
3051 }
3052
3053 /* If we reach here, it is safe. */
3054 return 1;
3055}
3056
3057/* Subroutine of expand_expr: return nonzero iff EXP is an
3058 expression whose type is statically determinable. */
3059
3060static int
3061fixed_type_p (exp)
3062 tree exp;
3063{
3064 if (TREE_CODE (exp) == PARM_DECL
3065 || TREE_CODE (exp) == VAR_DECL
3066 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3067 || TREE_CODE (exp) == COMPONENT_REF
3068 || TREE_CODE (exp) == ARRAY_REF)
3069 return 1;
3070 return 0;
3071}
3072\f
3073/* expand_expr: generate code for computing expression EXP.
3074 An rtx for the computed value is returned. The value is never null.
3075 In the case of a void EXP, const0_rtx is returned.
3076
3077 The value may be stored in TARGET if TARGET is nonzero.
3078 TARGET is just a suggestion; callers must assume that
3079 the rtx returned may not be the same as TARGET.
3080
3081 If TARGET is CONST0_RTX, it means that the value will be ignored.
3082
3083 If TMODE is not VOIDmode, it suggests generating the
3084 result in mode TMODE. But this is done only when convenient.
3085 Otherwise, TMODE is ignored and the value generated in its natural mode.
3086 TMODE is just a suggestion; callers must assume that
3087 the rtx returned may not have mode TMODE.
3088
3089 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3090 with a constant address even if that address is not normally legitimate.
3091 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3092
3093 If MODIFIER is EXPAND_SUM then when EXP is an addition
3094 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3095 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3096 products as above, or REG or MEM, or constant.
3097 Ordinarily in such cases we would output mul or add instructions
3098 and then return a pseudo reg containing the sum.
3099
3100 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3101 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3102 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3103 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3104
3105rtx
3106expand_expr (exp, target, tmode, modifier)
3107 register tree exp;
3108 rtx target;
3109 enum machine_mode tmode;
3110 enum expand_modifier modifier;
3111{
3112 register rtx op0, op1, temp;
3113 tree type = TREE_TYPE (exp);
3114 int unsignedp = TREE_UNSIGNED (type);
3115 register enum machine_mode mode = TYPE_MODE (type);
3116 register enum tree_code code = TREE_CODE (exp);
3117 optab this_optab;
3118 /* Use subtarget as the target for operand 0 of a binary operation. */
3119 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3120 rtx original_target = target;
3121 int ignore = target == const0_rtx;
3122 tree context;
3123
3124 /* Don't use hard regs as subtargets, because the combiner
3125 can only handle pseudo regs. */
3126 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3127 subtarget = 0;
3128 /* Avoid subtargets inside loops,
3129 since they hide some invariant expressions. */
3130 if (preserve_subexpressions_p ())
3131 subtarget = 0;
3132
3133 if (ignore) target = 0, original_target = 0;
3134
3135 /* If will do cse, generate all results into pseudo registers
3136 since 1) that allows cse to find more things
3137 and 2) otherwise cse could produce an insn the machine
3138 cannot support. */
3139
3140 if (! cse_not_expected && mode != BLKmode && target
3141 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3142 target = subtarget;
3143
3144 /* Ensure we reference a volatile object even if value is ignored. */
3145 if (ignore && TREE_THIS_VOLATILE (exp)
3146 && mode != VOIDmode && mode != BLKmode)
3147 {
3148 target = gen_reg_rtx (mode);
3149 temp = expand_expr (exp, target, VOIDmode, modifier);
3150 if (temp != target)
3151 emit_move_insn (target, temp);
3152 return target;
3153 }
3154
3155 switch (code)
3156 {
3157 case LABEL_DECL:
b552441b
RS
3158 {
3159 tree function = decl_function_context (exp);
3160 /* Handle using a label in a containing function. */
3161 if (function != current_function_decl && function != 0)
3162 {
3163 struct function *p = find_function_data (function);
3164 /* Allocate in the memory associated with the function
3165 that the label is in. */
3166 push_obstacks (p->function_obstack,
3167 p->function_maybepermanent_obstack);
3168
3169 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3170 label_rtx (exp), p->forced_labels);
3171 pop_obstacks ();
3172 }
3173 else if (modifier == EXPAND_INITIALIZER)
3174 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3175 label_rtx (exp), forced_labels);
26fcb35a 3176 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3177 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3178 if (function != current_function_decl && function != 0)
3179 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3180 return temp;
b552441b 3181 }
bbf6f052
RK
3182
3183 case PARM_DECL:
3184 if (DECL_RTL (exp) == 0)
3185 {
3186 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3187 return CONST0_RTX (mode);
bbf6f052
RK
3188 }
3189
3190 case FUNCTION_DECL:
3191 case VAR_DECL:
3192 case RESULT_DECL:
3193 if (DECL_RTL (exp) == 0)
3194 abort ();
3195 /* Ensure variable marked as used
3196 even if it doesn't go through a parser. */
3197 TREE_USED (exp) = 1;
3198 /* Handle variables inherited from containing functions. */
3199 context = decl_function_context (exp);
3200
3201 /* We treat inline_function_decl as an alias for the current function
3202 because that is the inline function whose vars, types, etc.
3203 are being merged into the current function.
3204 See expand_inline_function. */
3205 if (context != 0 && context != current_function_decl
3206 && context != inline_function_decl
3207 /* If var is static, we don't need a static chain to access it. */
3208 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3209 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3210 {
3211 rtx addr;
3212
3213 /* Mark as non-local and addressable. */
81feeecb 3214 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3215 mark_addressable (exp);
3216 if (GET_CODE (DECL_RTL (exp)) != MEM)
3217 abort ();
3218 addr = XEXP (DECL_RTL (exp), 0);
3219 if (GET_CODE (addr) == MEM)
3220 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3221 else
3222 addr = fix_lexical_addr (addr, exp);
3223 return change_address (DECL_RTL (exp), mode, addr);
3224 }
4af3895e 3225
bbf6f052
RK
3226 /* This is the case of an array whose size is to be determined
3227 from its initializer, while the initializer is still being parsed.
3228 See expand_decl. */
3229 if (GET_CODE (DECL_RTL (exp)) == MEM
3230 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3231 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3232 XEXP (DECL_RTL (exp), 0));
3233 if (GET_CODE (DECL_RTL (exp)) == MEM
3234 && modifier != EXPAND_CONST_ADDRESS
3235 && modifier != EXPAND_SUM
3236 && modifier != EXPAND_INITIALIZER)
3237 {
3238 /* DECL_RTL probably contains a constant address.
3239 On RISC machines where a constant address isn't valid,
3240 make some insns to get that address into a register. */
3241 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3242 || (flag_force_addr
3243 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3244 return change_address (DECL_RTL (exp), VOIDmode,
3245 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3246 }
3247 return DECL_RTL (exp);
3248
3249 case INTEGER_CST:
3250 return immed_double_const (TREE_INT_CST_LOW (exp),
3251 TREE_INT_CST_HIGH (exp),
3252 mode);
3253
3254 case CONST_DECL:
3255 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3256
3257 case REAL_CST:
3258 /* If optimized, generate immediate CONST_DOUBLE
3259 which will be turned into memory by reload if necessary.
3260
3261 We used to force a register so that loop.c could see it. But
3262 this does not allow gen_* patterns to perform optimizations with
3263 the constants. It also produces two insns in cases like "x = 1.0;".
3264 On most machines, floating-point constants are not permitted in
3265 many insns, so we'd end up copying it to a register in any case.
3266
3267 Now, we do the copying in expand_binop, if appropriate. */
3268 return immed_real_const (exp);
3269
3270 case COMPLEX_CST:
3271 case STRING_CST:
3272 if (! TREE_CST_RTL (exp))
3273 output_constant_def (exp);
3274
3275 /* TREE_CST_RTL probably contains a constant address.
3276 On RISC machines where a constant address isn't valid,
3277 make some insns to get that address into a register. */
3278 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3279 && modifier != EXPAND_CONST_ADDRESS
3280 && modifier != EXPAND_INITIALIZER
3281 && modifier != EXPAND_SUM
3282 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3283 return change_address (TREE_CST_RTL (exp), VOIDmode,
3284 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3285 return TREE_CST_RTL (exp);
3286
3287 case SAVE_EXPR:
3288 context = decl_function_context (exp);
3289 /* We treat inline_function_decl as an alias for the current function
3290 because that is the inline function whose vars, types, etc.
3291 are being merged into the current function.
3292 See expand_inline_function. */
3293 if (context == current_function_decl || context == inline_function_decl)
3294 context = 0;
3295
3296 /* If this is non-local, handle it. */
3297 if (context)
3298 {
3299 temp = SAVE_EXPR_RTL (exp);
3300 if (temp && GET_CODE (temp) == REG)
3301 {
3302 put_var_into_stack (exp);
3303 temp = SAVE_EXPR_RTL (exp);
3304 }
3305 if (temp == 0 || GET_CODE (temp) != MEM)
3306 abort ();
3307 return change_address (temp, mode,
3308 fix_lexical_addr (XEXP (temp, 0), exp));
3309 }
3310 if (SAVE_EXPR_RTL (exp) == 0)
3311 {
3312 if (mode == BLKmode)
3313 temp
3314 = assign_stack_temp (mode,
3315 int_size_in_bytes (TREE_TYPE (exp)), 0);
3316 else
3317 temp = gen_reg_rtx (mode);
3318 SAVE_EXPR_RTL (exp) = temp;
3319 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3320 if (!optimize && GET_CODE (temp) == REG)
3321 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3322 save_expr_regs);
3323 }
3324 return SAVE_EXPR_RTL (exp);
3325
3326 case EXIT_EXPR:
3327 /* Exit the current loop if the body-expression is true. */
3328 {
3329 rtx label = gen_label_rtx ();
906c4e36
RK
3330 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3331 expand_exit_loop (NULL_PTR);
bbf6f052
RK
3332 emit_label (label);
3333 }
3334 return const0_rtx;
3335
3336 case LOOP_EXPR:
3337 expand_start_loop (1);
3338 expand_expr_stmt (TREE_OPERAND (exp, 0));
3339 expand_end_loop ();
3340
3341 return const0_rtx;
3342
3343 case BIND_EXPR:
3344 {
3345 tree vars = TREE_OPERAND (exp, 0);
3346 int vars_need_expansion = 0;
3347
3348 /* Need to open a binding contour here because
3349 if there are any cleanups they most be contained here. */
3350 expand_start_bindings (0);
3351
3352 /* Mark the corresponding BLOCK for output. */
3353 if (TREE_OPERAND (exp, 2) != 0)
3354 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3355
3356 /* If VARS have not yet been expanded, expand them now. */
3357 while (vars)
3358 {
3359 if (DECL_RTL (vars) == 0)
3360 {
3361 vars_need_expansion = 1;
3362 expand_decl (vars);
3363 }
3364 expand_decl_init (vars);
3365 vars = TREE_CHAIN (vars);
3366 }
3367
3368 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3369
3370 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3371
3372 return temp;
3373 }
3374
3375 case RTL_EXPR:
3376 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3377 abort ();
3378 emit_insns (RTL_EXPR_SEQUENCE (exp));
3379 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3380 return RTL_EXPR_RTL (exp);
3381
3382 case CONSTRUCTOR:
4af3895e
JVA
3383 /* All elts simple constants => refer to a constant in memory. But
3384 if this is a non-BLKmode mode, let it store a field at a time
3385 since that should make a CONST_INT or CONST_DOUBLE when we
3386 fold. */
3387 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3388 {
3389 rtx constructor = output_constant_def (exp);
b552441b
RS
3390 if (modifier != EXPAND_CONST_ADDRESS
3391 && modifier != EXPAND_INITIALIZER
3392 && modifier != EXPAND_SUM
3393 && !memory_address_p (GET_MODE (constructor),
3394 XEXP (constructor, 0)))
bbf6f052
RK
3395 constructor = change_address (constructor, VOIDmode,
3396 XEXP (constructor, 0));
3397 return constructor;
3398 }
3399
3400 if (ignore)
3401 {
3402 tree elt;
3403 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3404 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3405 return const0_rtx;
3406 }
3407 else
3408 {
3409 if (target == 0 || ! safe_from_p (target, exp))
3410 {
3411 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3412 target = gen_reg_rtx (mode);
3413 else
3414 {
3415 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3416 if (target)
3417 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3418 target = safe_target;
3419 }
3420 }
3421 store_constructor (exp, target);
3422 return target;
3423 }
3424
3425 case INDIRECT_REF:
3426 {
3427 tree exp1 = TREE_OPERAND (exp, 0);
3428 tree exp2;
3429
3430 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3431 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3432 This code has the same general effect as simply doing
3433 expand_expr on the save expr, except that the expression PTR
3434 is computed for use as a memory address. This means different
3435 code, suitable for indexing, may be generated. */
3436 if (TREE_CODE (exp1) == SAVE_EXPR
3437 && SAVE_EXPR_RTL (exp1) == 0
3438 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3439 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3440 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3441 {
906c4e36
RK
3442 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3443 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3444 op0 = memory_address (mode, temp);
3445 op0 = copy_all_regs (op0);
3446 SAVE_EXPR_RTL (exp1) = op0;
3447 }
3448 else
3449 {
906c4e36 3450 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3451 op0 = memory_address (mode, op0);
3452 }
8c8a8e34
JW
3453
3454 temp = gen_rtx (MEM, mode, op0);
3455 /* If address was computed by addition,
3456 mark this as an element of an aggregate. */
3457 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3458 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3459 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3460 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3461 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3462 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3463 || (TREE_CODE (exp1) == ADDR_EXPR
3464 && (exp2 = TREE_OPERAND (exp1, 0))
3465 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3466 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3467 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3468 MEM_IN_STRUCT_P (temp) = 1;
3469 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
bbf6f052
RK
3470#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3471 a location is accessed through a pointer to const does not mean
3472 that the value there can never change. */
8c8a8e34 3473 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3474#endif
8c8a8e34
JW
3475 return temp;
3476 }
bbf6f052
RK
3477
3478 case ARRAY_REF:
3479 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3480 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3481 {
3482 /* Nonconstant array index or nonconstant element size.
3483 Generate the tree for *(&array+index) and expand that,
3484 except do it in a language-independent way
3485 and don't complain about non-lvalue arrays.
3486 `mark_addressable' should already have been called
3487 for any array for which this case will be reached. */
3488
3489 /* Don't forget the const or volatile flag from the array element. */
3490 tree variant_type = build_type_variant (type,
3491 TREE_READONLY (exp),
3492 TREE_THIS_VOLATILE (exp));
3493 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3494 TREE_OPERAND (exp, 0));
3495 tree index = TREE_OPERAND (exp, 1);
3496 tree elt;
3497
3498 /* Convert the integer argument to a type the same size as a pointer
3499 so the multiply won't overflow spuriously. */
3500 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3501 index = convert (type_for_size (POINTER_SIZE, 0), index);
3502
3503 /* Don't think the address has side effects
3504 just because the array does.
3505 (In some cases the address might have side effects,
3506 and we fail to record that fact here. However, it should not
3507 matter, since expand_expr should not care.) */
3508 TREE_SIDE_EFFECTS (array_adr) = 0;
3509
3510 elt = build1 (INDIRECT_REF, type,
3511 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3512 array_adr,
3513 fold (build (MULT_EXPR,
3514 TYPE_POINTER_TO (variant_type),
3515 index, size_in_bytes (type))))));
3516
3517 /* Volatility, etc., of new expression is same as old expression. */
3518 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3519 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3520 TREE_READONLY (elt) = TREE_READONLY (exp);
3521
3522 return expand_expr (elt, target, tmode, modifier);
3523 }
3524
3525 /* Fold an expression like: "foo"[2].
3526 This is not done in fold so it won't happen inside &. */
3527 {
3528 int i;
3529 tree arg0 = TREE_OPERAND (exp, 0);
3530 tree arg1 = TREE_OPERAND (exp, 1);
3531
3532 if (TREE_CODE (arg0) == STRING_CST
3533 && TREE_CODE (arg1) == INTEGER_CST
3534 && !TREE_INT_CST_HIGH (arg1)
3535 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3536 {
3537 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3538 {
3539 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3540 TREE_TYPE (exp) = integer_type_node;
3541 return expand_expr (exp, target, tmode, modifier);
3542 }
3543 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3544 {
3545 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3546 TREE_TYPE (exp) = integer_type_node;
3547 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3548 }
3549 }
3550 }
3551
3552 /* If this is a constant index into a constant array,
4af3895e
JVA
3553 just get the value from the array. Handle both the cases when
3554 we have an explicit constructor and when our operand is a variable
3555 that was declared const. */
3556
3557 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3558 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3559 {
3560 tree index = fold (TREE_OPERAND (exp, 1));
3561 if (TREE_CODE (index) == INTEGER_CST
3562 && TREE_INT_CST_HIGH (index) == 0)
3563 {
3564 int i = TREE_INT_CST_LOW (index);
3565 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3566
3567 while (elem && i--)
3568 elem = TREE_CHAIN (elem);
3569 if (elem)
3570 return expand_expr (fold (TREE_VALUE (elem)), target,
3571 tmode, modifier);
3572 }
3573 }
3574
3575 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3576 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3577 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3578 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3579 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3580 && optimize >= 1
3581 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3582 != ERROR_MARK))
bbf6f052
RK
3583 {
3584 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3585 if (TREE_CODE (index) == INTEGER_CST
3586 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3587 {
3588 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3589 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3590
8c8a8e34
JW
3591 if (TREE_CODE (init) == CONSTRUCTOR)
3592 {
3593 tree elem = CONSTRUCTOR_ELTS (init);
3594
3595 while (elem && i--)
3596 elem = TREE_CHAIN (elem);
3597 if (elem)
3598 return expand_expr (fold (TREE_VALUE (elem)), target,
3599 tmode, modifier);
3600 }
3601 else if (TREE_CODE (init) == STRING_CST
3602 && i < TREE_STRING_LENGTH (init))
3603 {
906c4e36 3604 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
8c8a8e34
JW
3605 return convert_to_mode (mode, temp, 0);
3606 }
bbf6f052
RK
3607 }
3608 }
3609 /* Treat array-ref with constant index as a component-ref. */
3610
3611 case COMPONENT_REF:
3612 case BIT_FIELD_REF:
4af3895e
JVA
3613 /* If the operand is a CONSTRUCTOR, we can just extract the
3614 appropriate field if it is present. */
3615 if (code != ARRAY_REF
3616 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3617 {
3618 tree elt;
3619
3620 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3621 elt = TREE_CHAIN (elt))
3622 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3623 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3624 }
3625
bbf6f052
RK
3626 {
3627 enum machine_mode mode1;
3628 int bitsize;
3629 int bitpos;
7bb0943f 3630 tree offset;
bbf6f052 3631 int volatilep = 0;
7bb0943f 3632 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3633 &mode1, &unsignedp, &volatilep);
3634
3635 /* In some cases, we will be offsetting OP0's address by a constant.
3636 So get it as a sum, if possible. If we will be using it
3637 directly in an insn, we validate it. */
906c4e36 3638 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3639
8c8a8e34
JW
3640 /* If this is a constant, put it into a register if it is a
3641 legimate constant and memory if it isn't. */
3642 if (CONSTANT_P (op0))
3643 {
3644 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3645 if (LEGITIMATE_CONSTANT_P (op0))
3646 op0 = force_reg (mode, op0);
3647 else
3648 op0 = validize_mem (force_const_mem (mode, op0));
3649 }
3650
7bb0943f
RS
3651 if (offset != 0)
3652 {
906c4e36 3653 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3654
3655 if (GET_CODE (op0) != MEM)
3656 abort ();
3657 op0 = change_address (op0, VOIDmode,
3658 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3659 force_reg (Pmode, offset_rtx)));
3660 }
3661
bbf6f052
RK
3662 /* Don't forget about volatility even if this is a bitfield. */
3663 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3664 {
3665 op0 = copy_rtx (op0);
3666 MEM_VOLATILE_P (op0) = 1;
3667 }
3668
3669 if (mode1 == VOIDmode
0bba3f6f
RK
3670 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3671 && modifier != EXPAND_CONST_ADDRESS
3672 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
3673 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3674 {
3675 /* In cases where an aligned union has an unaligned object
3676 as a field, we might be extracting a BLKmode value from
3677 an integer-mode (e.g., SImode) object. Handle this case
3678 by doing the extract into an object as wide as the field
3679 (which we know to be the width of a basic mode), then
3680 storing into memory, and changing the mode to BLKmode. */
3681 enum machine_mode ext_mode = mode;
3682
3683 if (ext_mode == BLKmode)
3684 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3685
3686 if (ext_mode == BLKmode)
3687 abort ();
3688
3689 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3690 unsignedp, target, ext_mode, ext_mode,
3691 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3692 int_size_in_bytes (TREE_TYPE (tem)));
3693 if (mode == BLKmode)
3694 {
3695 rtx new = assign_stack_temp (ext_mode,
3696 bitsize / BITS_PER_UNIT, 0);
3697
3698 emit_move_insn (new, op0);
3699 op0 = copy_rtx (new);
3700 PUT_MODE (op0, BLKmode);
3701 }
3702
3703 return op0;
3704 }
3705
3706 /* Get a reference to just this component. */
3707 if (modifier == EXPAND_CONST_ADDRESS
3708 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3709 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3710 (bitpos / BITS_PER_UNIT)));
3711 else
3712 op0 = change_address (op0, mode1,
3713 plus_constant (XEXP (op0, 0),
3714 (bitpos / BITS_PER_UNIT)));
3715 MEM_IN_STRUCT_P (op0) = 1;
3716 MEM_VOLATILE_P (op0) |= volatilep;
3717 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3718 return op0;
3719 if (target == 0)
3720 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3721 convert_move (target, op0, unsignedp);
3722 return target;
3723 }
3724
3725 case OFFSET_REF:
3726 {
3727 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3728 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 3729 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3730 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3731 MEM_IN_STRUCT_P (temp) = 1;
3732 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3733#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3734 a location is accessed through a pointer to const does not mean
3735 that the value there can never change. */
3736 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3737#endif
3738 return temp;
3739 }
3740
3741 /* Intended for a reference to a buffer of a file-object in Pascal.
3742 But it's not certain that a special tree code will really be
3743 necessary for these. INDIRECT_REF might work for them. */
3744 case BUFFER_REF:
3745 abort ();
3746
3747 case WITH_CLEANUP_EXPR:
3748 if (RTL_EXPR_RTL (exp) == 0)
3749 {
3750 RTL_EXPR_RTL (exp)
3751 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
3752 cleanups_this_call
3753 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
3754 /* That's it for this cleanup. */
3755 TREE_OPERAND (exp, 2) = 0;
3756 }
3757 return RTL_EXPR_RTL (exp);
3758
3759 case CALL_EXPR:
3760 /* Check for a built-in function. */
3761 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3762 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3763 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3764 return expand_builtin (exp, target, subtarget, tmode, ignore);
3765 /* If this call was expanded already by preexpand_calls,
3766 just return the result we got. */
3767 if (CALL_EXPR_RTL (exp) != 0)
3768 return CALL_EXPR_RTL (exp);
8129842c 3769 return expand_call (exp, target, ignore);
bbf6f052
RK
3770
3771 case NON_LVALUE_EXPR:
3772 case NOP_EXPR:
3773 case CONVERT_EXPR:
3774 case REFERENCE_EXPR:
3775 if (TREE_CODE (type) == VOID_TYPE || ignore)
3776 {
3777 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3778 return const0_rtx;
3779 }
3780 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3781 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3782 if (TREE_CODE (type) == UNION_TYPE)
3783 {
3784 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3785 if (target == 0)
3786 {
3787 if (mode == BLKmode)
3788 {
3789 if (TYPE_SIZE (type) == 0
3790 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3791 abort ();
3792 target = assign_stack_temp (BLKmode,
3793 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3794 + BITS_PER_UNIT - 1)
3795 / BITS_PER_UNIT, 0);
3796 }
3797 else
3798 target = gen_reg_rtx (mode);
3799 }
3800 if (GET_CODE (target) == MEM)
3801 /* Store data into beginning of memory target. */
3802 store_expr (TREE_OPERAND (exp, 0),
906c4e36
RK
3803 change_address (target, TYPE_MODE (valtype), 0),
3804 NULL_RTX);
bbf6f052
RK
3805 else if (GET_CODE (target) == REG)
3806 /* Store this field into a union of the proper type. */
3807 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3808 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3809 VOIDmode, 0, 1,
3810 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3811 else
3812 abort ();
3813
3814 /* Return the entire union. */
3815 return target;
3816 }
26fcb35a 3817 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
bbf6f052
RK
3818 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3819 return op0;
26fcb35a
RS
3820 if (modifier == EXPAND_INITIALIZER)
3821 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
3822 if (flag_force_mem && GET_CODE (op0) == MEM)
3823 op0 = copy_to_reg (op0);
3824
3825 if (target == 0)
3826 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3827 else
3828 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3829 return target;
3830
3831 case PLUS_EXPR:
3832 /* We come here from MINUS_EXPR when the second operand is a constant. */
3833 plus_expr:
3834 this_optab = add_optab;
3835
3836 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3837 something else, make sure we add the register to the constant and
3838 then to the other thing. This case can occur during strength
3839 reduction and doing it this way will produce better code if the
3840 frame pointer or argument pointer is eliminated.
3841
3842 fold-const.c will ensure that the constant is always in the inner
3843 PLUS_EXPR, so the only case we need to do anything about is if
3844 sp, ap, or fp is our second argument, in which case we must swap
3845 the innermost first argument and our second argument. */
3846
3847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3848 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3849 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3850 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3851 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3852 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3853 {
3854 tree t = TREE_OPERAND (exp, 1);
3855
3856 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3857 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3858 }
3859
3860 /* If the result is to be Pmode and we are adding an integer to
3861 something, we might be forming a constant. So try to use
3862 plus_constant. If it produces a sum and we can't accept it,
3863 use force_operand. This allows P = &ARR[const] to generate
3864 efficient code on machines where a SYMBOL_REF is not a valid
3865 address.
3866
3867 If this is an EXPAND_SUM call, always return the sum. */
3868 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 3869 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
3870 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3871 || mode == Pmode))
3872 {
3873 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3874 EXPAND_SUM);
3875 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3876 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3877 op1 = force_operand (op1, target);
3878 return op1;
3879 }
3880
3881 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3882 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3883 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3884 || mode == Pmode))
3885 {
3886 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3887 EXPAND_SUM);
3888 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3889 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3890 op0 = force_operand (op0, target);
3891 return op0;
3892 }
3893
3894 /* No sense saving up arithmetic to be done
3895 if it's all in the wrong mode to form part of an address.
3896 And force_operand won't know whether to sign-extend or
3897 zero-extend. */
3898 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3899 || mode != Pmode) goto binop;
3900
3901 preexpand_calls (exp);
3902 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3903 subtarget = 0;
3904
3905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 3906 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
3907
3908 /* Make sure any term that's a sum with a constant comes last. */
3909 if (GET_CODE (op0) == PLUS
3910 && CONSTANT_P (XEXP (op0, 1)))
3911 {
3912 temp = op0;
3913 op0 = op1;
3914 op1 = temp;
3915 }
3916 /* If adding to a sum including a constant,
3917 associate it to put the constant outside. */
3918 if (GET_CODE (op1) == PLUS
3919 && CONSTANT_P (XEXP (op1, 1)))
3920 {
3921 rtx constant_term = const0_rtx;
3922
3923 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3924 if (temp != 0)
3925 op0 = temp;
6f90e075
JW
3926 /* Ensure that MULT comes first if there is one. */
3927 else if (GET_CODE (op0) == MULT)
3928 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
3929 else
3930 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3931
3932 /* Let's also eliminate constants from op0 if possible. */
3933 op0 = eliminate_constant_term (op0, &constant_term);
3934
3935 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3936 their sum should be a constant. Form it into OP1, since the
3937 result we want will then be OP0 + OP1. */
3938
3939 temp = simplify_binary_operation (PLUS, mode, constant_term,
3940 XEXP (op1, 1));
3941 if (temp != 0)
3942 op1 = temp;
3943 else
3944 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3945 }
3946
3947 /* Put a constant term last and put a multiplication first. */
3948 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3949 temp = op1, op1 = op0, op0 = temp;
3950
3951 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3952 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3953
3954 case MINUS_EXPR:
3955 /* Handle difference of two symbolic constants,
3956 for the sake of an initializer. */
3957 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3958 && really_constant_p (TREE_OPERAND (exp, 0))
3959 && really_constant_p (TREE_OPERAND (exp, 1)))
3960 {
906c4e36
RK
3961 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3962 VOIDmode, modifier);
3963 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3964 VOIDmode, modifier);
bbf6f052
RK
3965 return gen_rtx (MINUS, mode, op0, op1);
3966 }
3967 /* Convert A - const to A + (-const). */
3968 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3969 {
3970 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3971 fold (build1 (NEGATE_EXPR, type,
3972 TREE_OPERAND (exp, 1))));
3973 goto plus_expr;
3974 }
3975 this_optab = sub_optab;
3976 goto binop;
3977
3978 case MULT_EXPR:
3979 preexpand_calls (exp);
3980 /* If first operand is constant, swap them.
3981 Thus the following special case checks need only
3982 check the second operand. */
3983 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3984 {
3985 register tree t1 = TREE_OPERAND (exp, 0);
3986 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3987 TREE_OPERAND (exp, 1) = t1;
3988 }
3989
3990 /* Attempt to return something suitable for generating an
3991 indexed address, for machines that support that. */
3992
3993 if (modifier == EXPAND_SUM && mode == Pmode
3994 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 3995 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
3996 {
3997 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3998
3999 /* Apply distributive law if OP0 is x+c. */
4000 if (GET_CODE (op0) == PLUS
4001 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4002 return gen_rtx (PLUS, mode,
4003 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4004 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4005 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4006 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4007
4008 if (GET_CODE (op0) != REG)
906c4e36 4009 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4010 if (GET_CODE (op0) != REG)
4011 op0 = copy_to_mode_reg (mode, op0);
4012
4013 return gen_rtx (MULT, mode, op0,
906c4e36 4014 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4015 }
4016
4017 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4018 subtarget = 0;
4019
4020 /* Check for multiplying things that have been extended
4021 from a narrower type. If this machine supports multiplying
4022 in that narrower type with a result in the desired type,
4023 do it that way, and avoid the explicit type-conversion. */
4024 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4025 && TREE_CODE (type) == INTEGER_TYPE
4026 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4027 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4028 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4029 && int_fits_type_p (TREE_OPERAND (exp, 1),
4030 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4031 /* Don't use a widening multiply if a shift will do. */
4032 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4033 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4034 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4035 ||
4036 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4037 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4038 ==
4039 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4040 /* If both operands are extended, they must either both
4041 be zero-extended or both be sign-extended. */
4042 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4043 ==
4044 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4045 {
4046 enum machine_mode innermode
4047 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4048 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4049 ? umul_widen_optab : smul_widen_optab);
4050 if (mode == GET_MODE_WIDER_MODE (innermode)
4051 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4052 {
4053 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4054 NULL_RTX, VOIDmode, 0);
bbf6f052 4055 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4056 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4057 VOIDmode, 0);
bbf6f052
RK
4058 else
4059 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4060 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4061 goto binop2;
4062 }
4063 }
4064 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4065 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4066 return expand_mult (mode, op0, op1, target, unsignedp);
4067
4068 case TRUNC_DIV_EXPR:
4069 case FLOOR_DIV_EXPR:
4070 case CEIL_DIV_EXPR:
4071 case ROUND_DIV_EXPR:
4072 case EXACT_DIV_EXPR:
4073 preexpand_calls (exp);
4074 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4075 subtarget = 0;
4076 /* Possible optimization: compute the dividend with EXPAND_SUM
4077 then if the divisor is constant can optimize the case
4078 where some terms of the dividend have coeffs divisible by it. */
4079 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4080 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4081 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4082
4083 case RDIV_EXPR:
4084 this_optab = flodiv_optab;
4085 goto binop;
4086
4087 case TRUNC_MOD_EXPR:
4088 case FLOOR_MOD_EXPR:
4089 case CEIL_MOD_EXPR:
4090 case ROUND_MOD_EXPR:
4091 preexpand_calls (exp);
4092 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4093 subtarget = 0;
4094 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4095 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4096 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4097
4098 case FIX_ROUND_EXPR:
4099 case FIX_FLOOR_EXPR:
4100 case FIX_CEIL_EXPR:
4101 abort (); /* Not used for C. */
4102
4103 case FIX_TRUNC_EXPR:
906c4e36 4104 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4105 if (target == 0)
4106 target = gen_reg_rtx (mode);
4107 expand_fix (target, op0, unsignedp);
4108 return target;
4109
4110 case FLOAT_EXPR:
906c4e36 4111 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4112 if (target == 0)
4113 target = gen_reg_rtx (mode);
4114 /* expand_float can't figure out what to do if FROM has VOIDmode.
4115 So give it the correct mode. With -O, cse will optimize this. */
4116 if (GET_MODE (op0) == VOIDmode)
4117 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4118 op0);
4119 expand_float (target, op0,
4120 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4121 return target;
4122
4123 case NEGATE_EXPR:
4124 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4125 temp = expand_unop (mode, neg_optab, op0, target, 0);
4126 if (temp == 0)
4127 abort ();
4128 return temp;
4129
4130 case ABS_EXPR:
4131 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4132
4133 /* Unsigned abs is simply the operand. Testing here means we don't
4134 risk generating incorrect code below. */
4135 if (TREE_UNSIGNED (type))
4136 return op0;
4137
4138 /* First try to do it with a special abs instruction. */
4139 temp = expand_unop (mode, abs_optab, op0, target, 0);
4140 if (temp != 0)
4141 return temp;
4142
4143 /* If this machine has expensive jumps, we can do integer absolute
4144 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4145 where W is the width of MODE. */
4146
4147 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4148 {
4149 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4150 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4151 NULL_RTX, 0);
bbf6f052
RK
4152
4153 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4154 OPTAB_LIB_WIDEN);
4155 if (temp != 0)
4156 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4157 OPTAB_LIB_WIDEN);
4158
4159 if (temp != 0)
4160 return temp;
4161 }
4162
4163 /* If that does not win, use conditional jump and negate. */
4164 target = original_target;
4165 temp = gen_label_rtx ();
4166 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4167 || (GET_CODE (target) == REG
4168 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4169 target = gen_reg_rtx (mode);
4170 emit_move_insn (target, op0);
4171 emit_cmp_insn (target,
4172 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4173 NULL_RTX, VOIDmode, 0),
4174 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4175 NO_DEFER_POP;
4176 emit_jump_insn (gen_bge (temp));
4177 op0 = expand_unop (mode, neg_optab, target, target, 0);
4178 if (op0 != target)
4179 emit_move_insn (target, op0);
4180 emit_label (temp);
4181 OK_DEFER_POP;
4182 return target;
4183
4184 case MAX_EXPR:
4185 case MIN_EXPR:
4186 target = original_target;
4187 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4188 || (GET_CODE (target) == REG
4189 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4190 target = gen_reg_rtx (mode);
906c4e36 4191 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4192 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4193
4194 /* First try to do it with a special MIN or MAX instruction.
4195 If that does not win, use a conditional jump to select the proper
4196 value. */
4197 this_optab = (TREE_UNSIGNED (type)
4198 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4199 : (code == MIN_EXPR ? smin_optab : smax_optab));
4200
4201 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4202 OPTAB_WIDEN);
4203 if (temp != 0)
4204 return temp;
4205
4206 if (target != op0)
4207 emit_move_insn (target, op0);
4208 op0 = gen_label_rtx ();
4209 if (code == MAX_EXPR)
4210 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4211 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4212 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4213 else
4214 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4215 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4216 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4217 if (temp == const0_rtx)
4218 emit_move_insn (target, op1);
4219 else if (temp != const_true_rtx)
4220 {
4221 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4222 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4223 else
4224 abort ();
4225 emit_move_insn (target, op1);
4226 }
4227 emit_label (op0);
4228 return target;
4229
4230/* ??? Can optimize when the operand of this is a bitwise operation,
4231 by using a different bitwise operation. */
4232 case BIT_NOT_EXPR:
4233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4234 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4235 if (temp == 0)
4236 abort ();
4237 return temp;
4238
4239 case FFS_EXPR:
4240 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4241 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4242 if (temp == 0)
4243 abort ();
4244 return temp;
4245
4246/* ??? Can optimize bitwise operations with one arg constant.
4247 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4248 and (a bitwise1 b) bitwise2 b (etc)
4249 but that is probably not worth while. */
4250
4251/* BIT_AND_EXPR is for bitwise anding.
4252 TRUTH_AND_EXPR is for anding two boolean values
4253 when we want in all cases to compute both of them.
4254 In general it is fastest to do TRUTH_AND_EXPR by
4255 computing both operands as actual zero-or-1 values
4256 and then bitwise anding. In cases where there cannot
4257 be any side effects, better code would be made by
4258 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4259 but the question is how to recognize those cases. */
4260
4261 case TRUTH_AND_EXPR:
4262 case BIT_AND_EXPR:
4263 this_optab = and_optab;
4264 goto binop;
4265
4266/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4267 case TRUTH_OR_EXPR:
4268 case BIT_IOR_EXPR:
4269 this_optab = ior_optab;
4270 goto binop;
4271
4272 case BIT_XOR_EXPR:
4273 this_optab = xor_optab;
4274 goto binop;
4275
4276 case LSHIFT_EXPR:
4277 case RSHIFT_EXPR:
4278 case LROTATE_EXPR:
4279 case RROTATE_EXPR:
4280 preexpand_calls (exp);
4281 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4282 subtarget = 0;
4283 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4284 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4285 unsignedp);
4286
4287/* Could determine the answer when only additive constants differ.
4288 Also, the addition of one can be handled by changing the condition. */
4289 case LT_EXPR:
4290 case LE_EXPR:
4291 case GT_EXPR:
4292 case GE_EXPR:
4293 case EQ_EXPR:
4294 case NE_EXPR:
4295 preexpand_calls (exp);
4296 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4297 if (temp != 0)
4298 return temp;
4299 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4300 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4301 && original_target
4302 && GET_CODE (original_target) == REG
4303 && (GET_MODE (original_target)
4304 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4305 {
4306 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4307 if (temp != original_target)
4308 temp = copy_to_reg (temp);
4309 op1 = gen_label_rtx ();
906c4e36 4310 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4311 GET_MODE (temp), unsignedp, 0);
4312 emit_jump_insn (gen_beq (op1));
4313 emit_move_insn (temp, const1_rtx);
4314 emit_label (op1);
4315 return temp;
4316 }
4317 /* If no set-flag instruction, must generate a conditional
4318 store into a temporary variable. Drop through
4319 and handle this like && and ||. */
4320
4321 case TRUTH_ANDIF_EXPR:
4322 case TRUTH_ORIF_EXPR:
4323 if (target == 0 || ! safe_from_p (target, exp)
4324 /* Make sure we don't have a hard reg (such as function's return
4325 value) live across basic blocks, if not optimizing. */
4326 || (!optimize && GET_CODE (target) == REG
4327 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4328 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4329 emit_clr_insn (target);
4330 op1 = gen_label_rtx ();
4331 jumpifnot (exp, op1);
4332 emit_0_to_1_insn (target);
4333 emit_label (op1);
4334 return target;
4335
4336 case TRUTH_NOT_EXPR:
4337 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4338 /* The parser is careful to generate TRUTH_NOT_EXPR
4339 only with operands that are always zero or one. */
906c4e36 4340 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4341 target, 1, OPTAB_LIB_WIDEN);
4342 if (temp == 0)
4343 abort ();
4344 return temp;
4345
4346 case COMPOUND_EXPR:
4347 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4348 emit_queue ();
4349 return expand_expr (TREE_OPERAND (exp, 1),
4350 (ignore ? const0_rtx : target),
4351 VOIDmode, 0);
4352
4353 case COND_EXPR:
4354 {
4355 /* Note that COND_EXPRs whose type is a structure or union
4356 are required to be constructed to contain assignments of
4357 a temporary variable, so that we can evaluate them here
4358 for side effect only. If type is void, we must do likewise. */
4359
4360 /* If an arm of the branch requires a cleanup,
4361 only that cleanup is performed. */
4362
4363 tree singleton = 0;
4364 tree binary_op = 0, unary_op = 0;
4365 tree old_cleanups = cleanups_this_call;
4366 cleanups_this_call = 0;
4367
4368 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4369 convert it to our mode, if necessary. */
4370 if (integer_onep (TREE_OPERAND (exp, 1))
4371 && integer_zerop (TREE_OPERAND (exp, 2))
4372 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4373 {
4374 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4375 if (GET_MODE (op0) == mode)
4376 return op0;
4377 if (target == 0)
4378 target = gen_reg_rtx (mode);
4379 convert_move (target, op0, unsignedp);
4380 return target;
4381 }
4382
4383 /* If we are not to produce a result, we have no target. Otherwise,
4384 if a target was specified use it; it will not be used as an
4385 intermediate target unless it is safe. If no target, use a
4386 temporary. */
4387
4388 if (mode == VOIDmode || ignore)
4389 temp = 0;
4390 else if (original_target
4391 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4392 temp = original_target;
4393 else if (mode == BLKmode)
4394 {
4395 if (TYPE_SIZE (type) == 0
4396 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4397 abort ();
4398 temp = assign_stack_temp (BLKmode,
4399 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4400 + BITS_PER_UNIT - 1)
4401 / BITS_PER_UNIT, 0);
4402 }
4403 else
4404 temp = gen_reg_rtx (mode);
4405
4406 /* Check for X ? A + B : A. If we have this, we can copy
4407 A to the output and conditionally add B. Similarly for unary
4408 operations. Don't do this if X has side-effects because
4409 those side effects might affect A or B and the "?" operation is
4410 a sequence point in ANSI. (We test for side effects later.) */
4411
4412 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4413 && operand_equal_p (TREE_OPERAND (exp, 2),
4414 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4415 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4416 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4417 && operand_equal_p (TREE_OPERAND (exp, 1),
4418 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4419 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4420 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4421 && operand_equal_p (TREE_OPERAND (exp, 2),
4422 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4423 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4424 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4425 && operand_equal_p (TREE_OPERAND (exp, 1),
4426 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4427 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4428
4429 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4430 operation, do this as A + (X != 0). Similarly for other simple
4431 binary operators. */
4432 if (singleton && binary_op
4433 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4434 && (TREE_CODE (binary_op) == PLUS_EXPR
4435 || TREE_CODE (binary_op) == MINUS_EXPR
4436 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4437 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4438 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4439 && integer_onep (TREE_OPERAND (binary_op, 1))
4440 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4441 {
4442 rtx result;
4443 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4444 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4445 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4446 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4447 : and_optab);
4448
4449 /* If we had X ? A : A + 1, do this as A + (X == 0).
4450
4451 We have to invert the truth value here and then put it
4452 back later if do_store_flag fails. We cannot simply copy
4453 TREE_OPERAND (exp, 0) to another variable and modify that
4454 because invert_truthvalue can modify the tree pointed to
4455 by its argument. */
4456 if (singleton == TREE_OPERAND (exp, 1))
4457 TREE_OPERAND (exp, 0)
4458 = invert_truthvalue (TREE_OPERAND (exp, 0));
4459
4460 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4461 (safe_from_p (temp, singleton)
4462 ? temp : NULL_RTX),
bbf6f052
RK
4463 mode, BRANCH_COST <= 1);
4464
4465 if (result)
4466 {
906c4e36 4467 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4468 return expand_binop (mode, boptab, op1, result, temp,
4469 unsignedp, OPTAB_LIB_WIDEN);
4470 }
4471 else if (singleton == TREE_OPERAND (exp, 1))
4472 TREE_OPERAND (exp, 0)
4473 = invert_truthvalue (TREE_OPERAND (exp, 0));
4474 }
4475
4476 NO_DEFER_POP;
4477 op0 = gen_label_rtx ();
4478
4479 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4480 {
4481 if (temp != 0)
4482 {
4483 /* If the target conflicts with the other operand of the
4484 binary op, we can't use it. Also, we can't use the target
4485 if it is a hard register, because evaluating the condition
4486 might clobber it. */
4487 if ((binary_op
4488 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4489 || (GET_CODE (temp) == REG
4490 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4491 temp = gen_reg_rtx (mode);
4492 store_expr (singleton, temp, 0);
4493 }
4494 else
906c4e36
RK
4495 expand_expr (singleton,
4496 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4497 if (cleanups_this_call)
4498 {
4499 sorry ("aggregate value in COND_EXPR");
4500 cleanups_this_call = 0;
4501 }
4502 if (singleton == TREE_OPERAND (exp, 1))
4503 jumpif (TREE_OPERAND (exp, 0), op0);
4504 else
4505 jumpifnot (TREE_OPERAND (exp, 0), op0);
4506
4507 if (binary_op && temp == 0)
4508 /* Just touch the other operand. */
4509 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 4510 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4511 else if (binary_op)
4512 store_expr (build (TREE_CODE (binary_op), type,
4513 make_tree (type, temp),
4514 TREE_OPERAND (binary_op, 1)),
4515 temp, 0);
4516 else
4517 store_expr (build1 (TREE_CODE (unary_op), type,
4518 make_tree (type, temp)),
4519 temp, 0);
4520 op1 = op0;
4521 }
4522#if 0
4523 /* This is now done in jump.c and is better done there because it
4524 produces shorter register lifetimes. */
4525
4526 /* Check for both possibilities either constants or variables
4527 in registers (but not the same as the target!). If so, can
4528 save branches by assigning one, branching, and assigning the
4529 other. */
4530 else if (temp && GET_MODE (temp) != BLKmode
4531 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4532 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4533 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4534 && DECL_RTL (TREE_OPERAND (exp, 1))
4535 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4536 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4537 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4538 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4539 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4540 && DECL_RTL (TREE_OPERAND (exp, 2))
4541 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4542 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4543 {
4544 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4545 temp = gen_reg_rtx (mode);
4546 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4547 jumpifnot (TREE_OPERAND (exp, 0), op0);
4548 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4549 op1 = op0;
4550 }
4551#endif
4552 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4553 comparison operator. If we have one of these cases, set the
4554 output to A, branch on A (cse will merge these two references),
4555 then set the output to FOO. */
4556 else if (temp
4557 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4558 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4559 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4560 TREE_OPERAND (exp, 1), 0)
4561 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4562 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4563 {
4564 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4565 temp = gen_reg_rtx (mode);
4566 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4567 jumpif (TREE_OPERAND (exp, 0), op0);
4568 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4569 op1 = op0;
4570 }
4571 else if (temp
4572 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4573 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4574 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4575 TREE_OPERAND (exp, 2), 0)
4576 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4577 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4578 {
4579 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4580 temp = gen_reg_rtx (mode);
4581 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4582 jumpifnot (TREE_OPERAND (exp, 0), op0);
4583 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4584 op1 = op0;
4585 }
4586 else
4587 {
4588 op1 = gen_label_rtx ();
4589 jumpifnot (TREE_OPERAND (exp, 0), op0);
4590 if (temp != 0)
4591 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4592 else
906c4e36
RK
4593 expand_expr (TREE_OPERAND (exp, 1),
4594 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4595 if (cleanups_this_call)
4596 {
4597 sorry ("aggregate value in COND_EXPR");
4598 cleanups_this_call = 0;
4599 }
4600
4601 emit_queue ();
4602 emit_jump_insn (gen_jump (op1));
4603 emit_barrier ();
4604 emit_label (op0);
4605 if (temp != 0)
4606 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4607 else
906c4e36
RK
4608 expand_expr (TREE_OPERAND (exp, 2),
4609 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4610 }
4611
4612 if (cleanups_this_call)
4613 {
4614 sorry ("aggregate value in COND_EXPR");
4615 cleanups_this_call = 0;
4616 }
4617
4618 emit_queue ();
4619 emit_label (op1);
4620 OK_DEFER_POP;
4621 cleanups_this_call = old_cleanups;
4622 return temp;
4623 }
4624
4625 case TARGET_EXPR:
4626 {
4627 /* Something needs to be initialized, but we didn't know
4628 where that thing was when building the tree. For example,
4629 it could be the return value of a function, or a parameter
4630 to a function which lays down in the stack, or a temporary
4631 variable which must be passed by reference.
4632
4633 We guarantee that the expression will either be constructed
4634 or copied into our original target. */
4635
4636 tree slot = TREE_OPERAND (exp, 0);
4637
4638 if (TREE_CODE (slot) != VAR_DECL)
4639 abort ();
4640
4641 if (target == 0)
4642 {
4643 if (DECL_RTL (slot) != 0)
ac993f4f
MS
4644 {
4645 target = DECL_RTL (slot);
4646 /* We have already expanded the slot, so don't do
4647 it again. (mrs) */
4648 return target;
4649 }
bbf6f052
RK
4650 else
4651 {
4652 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4653 /* All temp slots at this level must not conflict. */
4654 preserve_temp_slots (target);
4655 DECL_RTL (slot) = target;
4656 }
4657
4658#if 0
ac993f4f
MS
4659 /* I bet this needs to be done, and I bet that it needs to
4660 be above, inside the else clause. The reason is
4661 simple, how else is it going to get cleaned up? (mrs)
4662
4663 The reason is probably did not work before, and was
4664 commented out is because this was re-expanding already
4665 expanded target_exprs (target == 0 and DECL_RTL (slot)
4666 != 0) also cleaning them up many times as well. :-( */
4667
bbf6f052
RK
4668 /* Since SLOT is not known to the called function
4669 to belong to its stack frame, we must build an explicit
4670 cleanup. This case occurs when we must build up a reference
4671 to pass the reference as an argument. In this case,
4672 it is very likely that such a reference need not be
4673 built here. */
4674
4675 if (TREE_OPERAND (exp, 2) == 0)
4676 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4677 if (TREE_OPERAND (exp, 2))
906c4e36
RK
4678 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4679 cleanups_this_call);
bbf6f052
RK
4680#endif
4681 }
4682 else
4683 {
4684 /* This case does occur, when expanding a parameter which
4685 needs to be constructed on the stack. The target
4686 is the actual stack address that we want to initialize.
4687 The function we call will perform the cleanup in this case. */
4688
4689 DECL_RTL (slot) = target;
4690 }
4691
4692 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4693 }
4694
4695 case INIT_EXPR:
4696 {
4697 tree lhs = TREE_OPERAND (exp, 0);
4698 tree rhs = TREE_OPERAND (exp, 1);
4699 tree noncopied_parts = 0;
4700 tree lhs_type = TREE_TYPE (lhs);
4701
4702 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4703 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4704 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4705 TYPE_NONCOPIED_PARTS (lhs_type));
4706 while (noncopied_parts != 0)
4707 {
4708 expand_assignment (TREE_VALUE (noncopied_parts),
4709 TREE_PURPOSE (noncopied_parts), 0, 0);
4710 noncopied_parts = TREE_CHAIN (noncopied_parts);
4711 }
4712 return temp;
4713 }
4714
4715 case MODIFY_EXPR:
4716 {
4717 /* If lhs is complex, expand calls in rhs before computing it.
4718 That's so we don't compute a pointer and save it over a call.
4719 If lhs is simple, compute it first so we can give it as a
4720 target if the rhs is just a call. This avoids an extra temp and copy
4721 and that prevents a partial-subsumption which makes bad code.
4722 Actually we could treat component_ref's of vars like vars. */
4723
4724 tree lhs = TREE_OPERAND (exp, 0);
4725 tree rhs = TREE_OPERAND (exp, 1);
4726 tree noncopied_parts = 0;
4727 tree lhs_type = TREE_TYPE (lhs);
4728
4729 temp = 0;
4730
4731 if (TREE_CODE (lhs) != VAR_DECL
4732 && TREE_CODE (lhs) != RESULT_DECL
4733 && TREE_CODE (lhs) != PARM_DECL)
4734 preexpand_calls (exp);
4735
4736 /* Check for |= or &= of a bitfield of size one into another bitfield
4737 of size 1. In this case, (unless we need the result of the
4738 assignment) we can do this more efficiently with a
4739 test followed by an assignment, if necessary.
4740
4741 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4742 things change so we do, this code should be enhanced to
4743 support it. */
4744 if (ignore
4745 && TREE_CODE (lhs) == COMPONENT_REF
4746 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4747 || TREE_CODE (rhs) == BIT_AND_EXPR)
4748 && TREE_OPERAND (rhs, 0) == lhs
4749 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4750 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4751 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4752 {
4753 rtx label = gen_label_rtx ();
4754
4755 do_jump (TREE_OPERAND (rhs, 1),
4756 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4757 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4758 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4759 (TREE_CODE (rhs) == BIT_IOR_EXPR
4760 ? integer_one_node
4761 : integer_zero_node)),
4762 0, 0);
e7c33f54 4763 do_pending_stack_adjust ();
bbf6f052
RK
4764 emit_label (label);
4765 return const0_rtx;
4766 }
4767
4768 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4769 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4770 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4771 TYPE_NONCOPIED_PARTS (lhs_type));
4772
4773 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4774 while (noncopied_parts != 0)
4775 {
4776 expand_assignment (TREE_PURPOSE (noncopied_parts),
4777 TREE_VALUE (noncopied_parts), 0, 0);
4778 noncopied_parts = TREE_CHAIN (noncopied_parts);
4779 }
4780 return temp;
4781 }
4782
4783 case PREINCREMENT_EXPR:
4784 case PREDECREMENT_EXPR:
4785 return expand_increment (exp, 0);
4786
4787 case POSTINCREMENT_EXPR:
4788 case POSTDECREMENT_EXPR:
4789 /* Faster to treat as pre-increment if result is not used. */
4790 return expand_increment (exp, ! ignore);
4791
4792 case ADDR_EXPR:
4793 /* Are we taking the address of a nested function? */
4794 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4795 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4796 {
4797 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4798 op0 = force_operand (op0, target);
4799 }
4800 else
4801 {
906c4e36 4802 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
4803 (modifier == EXPAND_INITIALIZER
4804 ? modifier : EXPAND_CONST_ADDRESS));
4805 if (GET_CODE (op0) != MEM)
4806 abort ();
4807
4808 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4809 return XEXP (op0, 0);
4810 op0 = force_operand (XEXP (op0, 0), target);
4811 }
4812 if (flag_force_addr && GET_CODE (op0) != REG)
4813 return force_reg (Pmode, op0);
4814 return op0;
4815
4816 case ENTRY_VALUE_EXPR:
4817 abort ();
4818
4819 case ERROR_MARK:
4820 return const0_rtx;
4821
4822 default:
4823 return (*lang_expand_expr) (exp, target, tmode, modifier);
4824 }
4825
4826 /* Here to do an ordinary binary operator, generating an instruction
4827 from the optab already placed in `this_optab'. */
4828 binop:
4829 preexpand_calls (exp);
4830 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4831 subtarget = 0;
4832 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4833 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4834 binop2:
4835 temp = expand_binop (mode, this_optab, op0, op1, target,
4836 unsignedp, OPTAB_LIB_WIDEN);
4837 if (temp == 0)
4838 abort ();
4839 return temp;
4840}
4841\f
e87b4f3f
RS
4842/* Return the alignment in bits of EXP, a pointer valued expression.
4843 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
4844 The alignment returned is, by default, the alignment of the thing that
4845 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4846
4847 Otherwise, look at the expression to see if we can do better, i.e., if the
4848 expression is actually pointing at an object whose alignment is tighter. */
4849
4850static int
4851get_pointer_alignment (exp, max_align)
4852 tree exp;
4853 unsigned max_align;
4854{
4855 unsigned align, inner;
4856
4857 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4858 return 0;
4859
4860 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4861 align = MIN (align, max_align);
4862
4863 while (1)
4864 {
4865 switch (TREE_CODE (exp))
4866 {
4867 case NOP_EXPR:
4868 case CONVERT_EXPR:
4869 case NON_LVALUE_EXPR:
4870 exp = TREE_OPERAND (exp, 0);
4871 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4872 return align;
4873 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4874 inner = MIN (inner, max_align);
4875 align = MAX (align, inner);
4876 break;
4877
4878 case PLUS_EXPR:
4879 /* If sum of pointer + int, restrict our maximum alignment to that
4880 imposed by the integer. If not, we can't do any better than
4881 ALIGN. */
4882 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4883 return align;
4884
e87b4f3f
RS
4885 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4886 & (max_align - 1))
4887 != 0)
bbf6f052
RK
4888 max_align >>= 1;
4889
4890 exp = TREE_OPERAND (exp, 0);
4891 break;
4892
4893 case ADDR_EXPR:
4894 /* See what we are pointing at and look at its alignment. */
4895 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
4896 if (TREE_CODE (exp) == FUNCTION_DECL)
4897 align = MAX (align, FUNCTION_BOUNDARY);
4898 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
4899 align = MAX (align, DECL_ALIGN (exp));
4900#ifdef CONSTANT_ALIGNMENT
4901 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4902 align = CONSTANT_ALIGNMENT (exp, align);
4903#endif
4904 return MIN (align, max_align);
4905
4906 default:
4907 return align;
4908 }
4909 }
4910}
4911\f
4912/* Return the tree node and offset if a given argument corresponds to
4913 a string constant. */
4914
4915static tree
4916string_constant (arg, ptr_offset)
4917 tree arg;
4918 tree *ptr_offset;
4919{
4920 STRIP_NOPS (arg);
4921
4922 if (TREE_CODE (arg) == ADDR_EXPR
4923 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4924 {
4925 *ptr_offset = integer_zero_node;
4926 return TREE_OPERAND (arg, 0);
4927 }
4928 else if (TREE_CODE (arg) == PLUS_EXPR)
4929 {
4930 tree arg0 = TREE_OPERAND (arg, 0);
4931 tree arg1 = TREE_OPERAND (arg, 1);
4932
4933 STRIP_NOPS (arg0);
4934 STRIP_NOPS (arg1);
4935
4936 if (TREE_CODE (arg0) == ADDR_EXPR
4937 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4938 {
4939 *ptr_offset = arg1;
4940 return TREE_OPERAND (arg0, 0);
4941 }
4942 else if (TREE_CODE (arg1) == ADDR_EXPR
4943 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4944 {
4945 *ptr_offset = arg0;
4946 return TREE_OPERAND (arg1, 0);
4947 }
4948 }
4949
4950 return 0;
4951}
4952
4953/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4954 way, because it could contain a zero byte in the middle.
4955 TREE_STRING_LENGTH is the size of the character array, not the string.
4956
4957 Unfortunately, string_constant can't access the values of const char
4958 arrays with initializers, so neither can we do so here. */
4959
4960static tree
4961c_strlen (src)
4962 tree src;
4963{
4964 tree offset_node;
4965 int offset, max;
4966 char *ptr;
4967
4968 src = string_constant (src, &offset_node);
4969 if (src == 0)
4970 return 0;
4971 max = TREE_STRING_LENGTH (src);
4972 ptr = TREE_STRING_POINTER (src);
4973 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4974 {
4975 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4976 compute the offset to the following null if we don't know where to
4977 start searching for it. */
4978 int i;
4979 for (i = 0; i < max; i++)
4980 if (ptr[i] == 0)
4981 return 0;
4982 /* We don't know the starting offset, but we do know that the string
4983 has no internal zero bytes. We can assume that the offset falls
4984 within the bounds of the string; otherwise, the programmer deserves
4985 what he gets. Subtract the offset from the length of the string,
4986 and return that. */
4987 /* This would perhaps not be valid if we were dealing with named
4988 arrays in addition to literal string constants. */
4989 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4990 }
4991
4992 /* We have a known offset into the string. Start searching there for
4993 a null character. */
4994 if (offset_node == 0)
4995 offset = 0;
4996 else
4997 {
4998 /* Did we get a long long offset? If so, punt. */
4999 if (TREE_INT_CST_HIGH (offset_node) != 0)
5000 return 0;
5001 offset = TREE_INT_CST_LOW (offset_node);
5002 }
5003 /* If the offset is known to be out of bounds, warn, and call strlen at
5004 runtime. */
5005 if (offset < 0 || offset > max)
5006 {
5007 warning ("offset outside bounds of constant string");
5008 return 0;
5009 }
5010 /* Use strlen to search for the first zero byte. Since any strings
5011 constructed with build_string will have nulls appended, we win even
5012 if we get handed something like (char[4])"abcd".
5013
5014 Since OFFSET is our starting index into the string, no further
5015 calculation is needed. */
5016 return size_int (strlen (ptr + offset));
5017}
5018\f
5019/* Expand an expression EXP that calls a built-in function,
5020 with result going to TARGET if that's convenient
5021 (and in mode MODE if that's convenient).
5022 SUBTARGET may be used as the target for computing one of EXP's operands.
5023 IGNORE is nonzero if the value is to be ignored. */
5024
5025static rtx
5026expand_builtin (exp, target, subtarget, mode, ignore)
5027 tree exp;
5028 rtx target;
5029 rtx subtarget;
5030 enum machine_mode mode;
5031 int ignore;
5032{
5033 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5034 tree arglist = TREE_OPERAND (exp, 1);
5035 rtx op0;
60bac6ea 5036 rtx lab1, insns;
bbf6f052
RK
5037 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5038
5039 switch (DECL_FUNCTION_CODE (fndecl))
5040 {
5041 case BUILT_IN_ABS:
5042 case BUILT_IN_LABS:
5043 case BUILT_IN_FABS:
5044 /* build_function_call changes these into ABS_EXPR. */
5045 abort ();
5046
e87b4f3f
RS
5047 case BUILT_IN_FSQRT:
5048 /* If not optimizing, call the library function. */
8c8a8e34 5049 if (! optimize)
e87b4f3f
RS
5050 break;
5051
5052 if (arglist == 0
19deaec9 5053 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5054 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5055 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5056
db0e6d01
RS
5057 /* Stabilize and compute the argument. */
5058 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5059 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5060 {
5061 exp = copy_node (exp);
5062 arglist = copy_node (arglist);
5063 TREE_OPERAND (exp, 1) = arglist;
5064 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5065 }
e87b4f3f 5066 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5067
5068 /* Make a suitable register to place result in. */
5069 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5070
c1f7c223 5071 emit_queue ();
8c8a8e34 5072 start_sequence ();
e7c33f54 5073
60bac6ea 5074 /* Compute sqrt into TARGET.
e87b4f3f
RS
5075 Set TARGET to wherever the result comes back. */
5076 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8c8a8e34 5077 sqrt_optab, op0, target, 0);
e7c33f54
RK
5078
5079 /* If we were unable to expand via the builtin, stop the
5080 sequence (without outputting the insns) and break, causing
5081 a call the the library function. */
e87b4f3f 5082 if (target == 0)
e7c33f54 5083 {
8c8a8e34 5084 end_sequence ();
e7c33f54
RK
5085 break;
5086 }
e87b4f3f 5087
60bac6ea
RS
5088 /* Check the results by default. But if flag_fast_math is turned on,
5089 then assume sqrt will always be called with valid arguments. */
5090
5091 if (! flag_fast_math)
5092 {
5093 /* Don't define the sqrt instructions
5094 if your machine is not IEEE. */
5095 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5096 abort ();
5097
5098 lab1 = gen_label_rtx ();
5099
5100 /* Test the result; if it is NaN, set errno=EDOM because
5101 the argument was not in the domain. */
5102 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5103 emit_jump_insn (gen_beq (lab1));
5104
5105#if TARGET_EDOM
5106 {
5107#ifdef GEN_ERRNO_RTX
5108 rtx errno_rtx = GEN_ERRNO_RTX;
5109#else
5110 rtx errno_rtx
5111 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5112#endif
5113
5114 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5115 }
5116#else
5117 /* We can't set errno=EDOM directly; let the library call do it.
5118 Pop the arguments right away in case the call gets deleted. */
5119 NO_DEFER_POP;
5120 expand_call (exp, target, 0);
5121 OK_DEFER_POP;
5122#endif
5123
5124 emit_label (lab1);
5125 }
e87b4f3f 5126
e7c33f54 5127 /* Output the entire sequence. */
8c8a8e34
JW
5128 insns = get_insns ();
5129 end_sequence ();
5130 emit_insns (insns);
e7c33f54
RK
5131
5132 return target;
5133
bbf6f052
RK
5134 case BUILT_IN_SAVEREGS:
5135 /* Don't do __builtin_saveregs more than once in a function.
5136 Save the result of the first call and reuse it. */
5137 if (saveregs_value != 0)
5138 return saveregs_value;
5139 {
5140 /* When this function is called, it means that registers must be
5141 saved on entry to this function. So we migrate the
5142 call to the first insn of this function. */
5143 rtx temp;
5144 rtx seq;
5145 rtx valreg, saved_valreg;
5146
5147 /* Now really call the function. `expand_call' does not call
5148 expand_builtin, so there is no danger of infinite recursion here. */
5149 start_sequence ();
5150
5151#ifdef EXPAND_BUILTIN_SAVEREGS
5152 /* Do whatever the machine needs done in this case. */
5153 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5154#else
5155 /* The register where the function returns its value
5156 is likely to have something else in it, such as an argument.
5157 So preserve that register around the call. */
5158 if (value_mode != VOIDmode)
5159 {
5160 valreg = hard_libcall_value (value_mode);
5161 saved_valreg = gen_reg_rtx (value_mode);
5162 emit_move_insn (saved_valreg, valreg);
5163 }
5164
5165 /* Generate the call, putting the value in a pseudo. */
5166 temp = expand_call (exp, target, ignore);
5167
5168 if (value_mode != VOIDmode)
5169 emit_move_insn (valreg, saved_valreg);
5170#endif
5171
5172 seq = get_insns ();
5173 end_sequence ();
5174
5175 saveregs_value = temp;
5176
5177 /* This won't work inside a SEQUENCE--it really has to be
5178 at the start of the function. */
5179 if (in_sequence_p ())
5180 {
5181 /* Better to do this than to crash. */
5182 error ("`va_start' used within `({...})'");
5183 return temp;
5184 }
5185
5186 /* Put the sequence after the NOTE that starts the function. */
5187 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5188 return temp;
5189 }
5190
5191 /* __builtin_args_info (N) returns word N of the arg space info
5192 for the current function. The number and meanings of words
5193 is controlled by the definition of CUMULATIVE_ARGS. */
5194 case BUILT_IN_ARGS_INFO:
5195 {
5196 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5197 int i;
5198 int *word_ptr = (int *) &current_function_args_info;
5199 tree type, elts, result;
5200
5201 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5202 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5203 __FILE__, __LINE__);
5204
5205 if (arglist != 0)
5206 {
5207 tree arg = TREE_VALUE (arglist);
5208 if (TREE_CODE (arg) != INTEGER_CST)
5209 error ("argument of __builtin_args_info must be constant");
5210 else
5211 {
5212 int wordnum = TREE_INT_CST_LOW (arg);
5213
5214 if (wordnum < 0 || wordnum >= nwords)
5215 error ("argument of __builtin_args_info out of range");
5216 else
906c4e36 5217 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5218 }
5219 }
5220 else
5221 error ("missing argument in __builtin_args_info");
5222
5223 return const0_rtx;
5224
5225#if 0
5226 for (i = 0; i < nwords; i++)
5227 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5228
5229 type = build_array_type (integer_type_node,
5230 build_index_type (build_int_2 (nwords, 0)));
5231 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5232 TREE_CONSTANT (result) = 1;
5233 TREE_STATIC (result) = 1;
5234 result = build (INDIRECT_REF, build_pointer_type (type), result);
5235 TREE_CONSTANT (result) = 1;
906c4e36 5236 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5237#endif
5238 }
5239
5240 /* Return the address of the first anonymous stack arg. */
5241 case BUILT_IN_NEXT_ARG:
5242 {
5243 tree fntype = TREE_TYPE (current_function_decl);
5244 if (!(TYPE_ARG_TYPES (fntype) != 0
5245 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5246 != void_type_node)))
5247 {
5248 error ("`va_start' used in function with fixed args");
5249 return const0_rtx;
5250 }
5251 }
5252
5253 return expand_binop (Pmode, add_optab,
5254 current_function_internal_arg_pointer,
5255 current_function_arg_offset_rtx,
906c4e36 5256 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5257
5258 case BUILT_IN_CLASSIFY_TYPE:
5259 if (arglist != 0)
5260 {
5261 tree type = TREE_TYPE (TREE_VALUE (arglist));
5262 enum tree_code code = TREE_CODE (type);
5263 if (code == VOID_TYPE)
906c4e36 5264 return GEN_INT (void_type_class);
bbf6f052 5265 if (code == INTEGER_TYPE)
906c4e36 5266 return GEN_INT (integer_type_class);
bbf6f052 5267 if (code == CHAR_TYPE)
906c4e36 5268 return GEN_INT (char_type_class);
bbf6f052 5269 if (code == ENUMERAL_TYPE)
906c4e36 5270 return GEN_INT (enumeral_type_class);
bbf6f052 5271 if (code == BOOLEAN_TYPE)
906c4e36 5272 return GEN_INT (boolean_type_class);
bbf6f052 5273 if (code == POINTER_TYPE)
906c4e36 5274 return GEN_INT (pointer_type_class);
bbf6f052 5275 if (code == REFERENCE_TYPE)
906c4e36 5276 return GEN_INT (reference_type_class);
bbf6f052 5277 if (code == OFFSET_TYPE)
906c4e36 5278 return GEN_INT (offset_type_class);
bbf6f052 5279 if (code == REAL_TYPE)
906c4e36 5280 return GEN_INT (real_type_class);
bbf6f052 5281 if (code == COMPLEX_TYPE)
906c4e36 5282 return GEN_INT (complex_type_class);
bbf6f052 5283 if (code == FUNCTION_TYPE)
906c4e36 5284 return GEN_INT (function_type_class);
bbf6f052 5285 if (code == METHOD_TYPE)
906c4e36 5286 return GEN_INT (method_type_class);
bbf6f052 5287 if (code == RECORD_TYPE)
906c4e36 5288 return GEN_INT (record_type_class);
bbf6f052 5289 if (code == UNION_TYPE)
906c4e36 5290 return GEN_INT (union_type_class);
bbf6f052 5291 if (code == ARRAY_TYPE)
906c4e36 5292 return GEN_INT (array_type_class);
bbf6f052 5293 if (code == STRING_TYPE)
906c4e36 5294 return GEN_INT (string_type_class);
bbf6f052 5295 if (code == SET_TYPE)
906c4e36 5296 return GEN_INT (set_type_class);
bbf6f052 5297 if (code == FILE_TYPE)
906c4e36 5298 return GEN_INT (file_type_class);
bbf6f052 5299 if (code == LANG_TYPE)
906c4e36 5300 return GEN_INT (lang_type_class);
bbf6f052 5301 }
906c4e36 5302 return GEN_INT (no_type_class);
bbf6f052
RK
5303
5304 case BUILT_IN_CONSTANT_P:
5305 if (arglist == 0)
5306 return const0_rtx;
5307 else
cda0ec81 5308 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5309 ? const1_rtx : const0_rtx);
5310
5311 case BUILT_IN_FRAME_ADDRESS:
5312 /* The argument must be a nonnegative integer constant.
5313 It counts the number of frames to scan up the stack.
5314 The value is the address of that frame. */
5315 case BUILT_IN_RETURN_ADDRESS:
5316 /* The argument must be a nonnegative integer constant.
5317 It counts the number of frames to scan up the stack.
5318 The value is the return address saved in that frame. */
5319 if (arglist == 0)
5320 /* Warning about missing arg was already issued. */
5321 return const0_rtx;
5322 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5323 {
5324 error ("invalid arg to __builtin_return_address");
5325 return const0_rtx;
5326 }
5327 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5328 {
5329 error ("invalid arg to __builtin_return_address");
5330 return const0_rtx;
5331 }
5332 else
5333 {
5334 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5335 rtx tem = frame_pointer_rtx;
5336 int i;
5337
5338 /* Scan back COUNT frames to the specified frame. */
5339 for (i = 0; i < count; i++)
5340 {
5341 /* Assume the dynamic chain pointer is in the word that
5342 the frame address points to, unless otherwise specified. */
5343#ifdef DYNAMIC_CHAIN_ADDRESS
5344 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5345#endif
5346 tem = memory_address (Pmode, tem);
5347 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5348 }
5349
5350 /* For __builtin_frame_address, return what we've got. */
5351 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5352 return tem;
5353
5354 /* For __builtin_return_address,
5355 Get the return address from that frame. */
5356#ifdef RETURN_ADDR_RTX
5357 return RETURN_ADDR_RTX (count, tem);
5358#else
5359 tem = memory_address (Pmode,
5360 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5361 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5362#endif
5363 }
5364
5365 case BUILT_IN_ALLOCA:
5366 if (arglist == 0
5367 /* Arg could be non-integer if user redeclared this fcn wrong. */
5368 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5369 return const0_rtx;
5370 current_function_calls_alloca = 1;
5371 /* Compute the argument. */
906c4e36 5372 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5373
5374 /* Allocate the desired space. */
8c8a8e34 5375 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5376
5377 /* Record the new stack level for nonlocal gotos. */
6dc42e49 5378 if (nonlocal_goto_handler_slot != 0)
906c4e36 5379 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
5380 return target;
5381
5382 case BUILT_IN_FFS:
5383 /* If not optimizing, call the library function. */
5384 if (!optimize)
5385 break;
5386
5387 if (arglist == 0
5388 /* Arg could be non-integer if user redeclared this fcn wrong. */
5389 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5390 return const0_rtx;
5391
5392 /* Compute the argument. */
5393 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5394 /* Compute ffs, into TARGET if possible.
5395 Set TARGET to wherever the result comes back. */
5396 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5397 ffs_optab, op0, target, 1);
5398 if (target == 0)
5399 abort ();
5400 return target;
5401
5402 case BUILT_IN_STRLEN:
5403 /* If not optimizing, call the library function. */
5404 if (!optimize)
5405 break;
5406
5407 if (arglist == 0
5408 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5409 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5410 return const0_rtx;
5411 else
5412 {
e7c33f54
RK
5413 tree src = TREE_VALUE (arglist);
5414 tree len = c_strlen (src);
bbf6f052 5415
e7c33f54
RK
5416 int align
5417 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5418
5419 rtx result, src_rtx, char_rtx;
5420 enum machine_mode insn_mode = value_mode, char_mode;
5421 enum insn_code icode;
5422
5423 /* If the length is known, just return it. */
5424 if (len != 0)
5425 return expand_expr (len, target, mode, 0);
5426
5427 /* If SRC is not a pointer type, don't do this operation inline. */
5428 if (align == 0)
5429 break;
5430
5431 /* Call a function if we can't compute strlen in the right mode. */
5432
5433 while (insn_mode != VOIDmode)
5434 {
5435 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5436 if (icode != CODE_FOR_nothing)
5437 break;
5438
5439 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5440 }
5441 if (insn_mode == VOIDmode)
bbf6f052 5442 break;
e7c33f54
RK
5443
5444 /* Make a place to write the result of the instruction. */
5445 result = target;
5446 if (! (result != 0
5447 && GET_CODE (result) == REG
5448 && GET_MODE (result) == insn_mode
5449 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5450 result = gen_reg_rtx (insn_mode);
5451
4d613828 5452 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 5453
4d613828 5454 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
5455 result = gen_reg_rtx (insn_mode);
5456
5457 src_rtx = memory_address (BLKmode,
906c4e36 5458 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 5459 EXPAND_NORMAL));
4d613828 5460 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
5461 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5462
5463 char_rtx = const0_rtx;
4d613828
RS
5464 char_mode = insn_operand_mode[(int)icode][2];
5465 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
5466 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5467
5468 emit_insn (GEN_FCN (icode) (result,
5469 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 5470 char_rtx, GEN_INT (align)));
e7c33f54
RK
5471
5472 /* Return the value in the proper mode for this function. */
5473 if (GET_MODE (result) == value_mode)
5474 return result;
5475 else if (target != 0)
5476 {
5477 convert_move (target, result, 0);
5478 return target;
5479 }
5480 else
5481 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
5482 }
5483
5484 case BUILT_IN_STRCPY:
5485 /* If not optimizing, call the library function. */
5486 if (!optimize)
5487 break;
5488
5489 if (arglist == 0
5490 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5491 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5492 || TREE_CHAIN (arglist) == 0
5493 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5494 return const0_rtx;
5495 else
5496 {
5497 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5498
5499 if (len == 0)
5500 break;
5501
5502 len = size_binop (PLUS_EXPR, len, integer_one_node);
5503
906c4e36 5504 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
5505 }
5506
5507 /* Drops in. */
5508 case BUILT_IN_MEMCPY:
5509 /* If not optimizing, call the library function. */
5510 if (!optimize)
5511 break;
5512
5513 if (arglist == 0
5514 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5515 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5516 || TREE_CHAIN (arglist) == 0
5517 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5518 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5519 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5520 return const0_rtx;
5521 else
5522 {
5523 tree dest = TREE_VALUE (arglist);
5524 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5525 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5526
5527 int src_align
5528 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5529 int dest_align
5530 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5531 rtx dest_rtx;
5532
5533 /* If either SRC or DEST is not a pointer type, don't do
5534 this operation in-line. */
5535 if (src_align == 0 || dest_align == 0)
5536 {
5537 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5538 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5539 break;
5540 }
5541
906c4e36 5542 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
bbf6f052
RK
5543
5544 /* Copy word part most expediently. */
5545 emit_block_move (gen_rtx (MEM, BLKmode,
5546 memory_address (BLKmode, dest_rtx)),
5547 gen_rtx (MEM, BLKmode,
5548 memory_address (BLKmode,
906c4e36
RK
5549 expand_expr (src, NULL_RTX,
5550 Pmode,
bbf6f052 5551 EXPAND_NORMAL))),
906c4e36 5552 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
5553 MIN (src_align, dest_align));
5554 return dest_rtx;
5555 }
5556
5557/* These comparison functions need an instruction that returns an actual
5558 index. An ordinary compare that just sets the condition codes
5559 is not enough. */
5560#ifdef HAVE_cmpstrsi
5561 case BUILT_IN_STRCMP:
5562 /* If not optimizing, call the library function. */
5563 if (!optimize)
5564 break;
5565
5566 if (arglist == 0
5567 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5568 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5569 || TREE_CHAIN (arglist) == 0
5570 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5571 return const0_rtx;
5572 else if (!HAVE_cmpstrsi)
5573 break;
5574 {
5575 tree arg1 = TREE_VALUE (arglist);
5576 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5577 tree offset;
5578 tree len, len2;
5579
5580 len = c_strlen (arg1);
5581 if (len)
5582 len = size_binop (PLUS_EXPR, integer_one_node, len);
5583 len2 = c_strlen (arg2);
5584 if (len2)
5585 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5586
5587 /* If we don't have a constant length for the first, use the length
5588 of the second, if we know it. We don't require a constant for
5589 this case; some cost analysis could be done if both are available
5590 but neither is constant. For now, assume they're equally cheap.
5591
5592 If both strings have constant lengths, use the smaller. This
5593 could arise if optimization results in strcpy being called with
5594 two fixed strings, or if the code was machine-generated. We should
5595 add some code to the `memcmp' handler below to deal with such
5596 situations, someday. */
5597 if (!len || TREE_CODE (len) != INTEGER_CST)
5598 {
5599 if (len2)
5600 len = len2;
5601 else if (len == 0)
5602 break;
5603 }
5604 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5605 {
5606 if (tree_int_cst_lt (len2, len))
5607 len = len2;
5608 }
5609
906c4e36 5610 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
5611 }
5612
5613 /* Drops in. */
5614 case BUILT_IN_MEMCMP:
5615 /* If not optimizing, call the library function. */
5616 if (!optimize)
5617 break;
5618
5619 if (arglist == 0
5620 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5621 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5622 || TREE_CHAIN (arglist) == 0
5623 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5624 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5625 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5626 return const0_rtx;
5627 else if (!HAVE_cmpstrsi)
5628 break;
5629 {
5630 tree arg1 = TREE_VALUE (arglist);
5631 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5632 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5633 rtx result;
5634
5635 int arg1_align
5636 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5637 int arg2_align
5638 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5639 enum machine_mode insn_mode
5640 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5641
5642 /* If we don't have POINTER_TYPE, call the function. */
5643 if (arg1_align == 0 || arg2_align == 0)
5644 {
5645 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5646 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5647 break;
5648 }
5649
5650 /* Make a place to write the result of the instruction. */
5651 result = target;
5652 if (! (result != 0
5653 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5654 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5655 result = gen_reg_rtx (insn_mode);
5656
5657 emit_insn (gen_cmpstrsi (result,
5658 gen_rtx (MEM, BLKmode,
906c4e36
RK
5659 expand_expr (arg1, NULL_RTX, Pmode,
5660 EXPAND_NORMAL)),
bbf6f052 5661 gen_rtx (MEM, BLKmode,
906c4e36
RK
5662 expand_expr (arg2, NULL_RTX, Pmode,
5663 EXPAND_NORMAL)),
5664 expand_expr (len, NULL_RTX, VOIDmode, 0),
5665 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
5666
5667 /* Return the value in the proper mode for this function. */
5668 mode = TYPE_MODE (TREE_TYPE (exp));
5669 if (GET_MODE (result) == mode)
5670 return result;
5671 else if (target != 0)
5672 {
5673 convert_move (target, result, 0);
5674 return target;
5675 }
5676 else
5677 return convert_to_mode (mode, result, 0);
5678 }
5679#else
5680 case BUILT_IN_STRCMP:
5681 case BUILT_IN_MEMCMP:
5682 break;
5683#endif
5684
5685 default: /* just do library call, if unknown builtin */
5686 error ("built-in function %s not currently supported",
5687 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5688 }
5689
5690 /* The switch statement above can drop through to cause the function
5691 to be called normally. */
5692
5693 return expand_call (exp, target, ignore);
5694}
5695\f
5696/* Expand code for a post- or pre- increment or decrement
5697 and return the RTX for the result.
5698 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5699
5700static rtx
5701expand_increment (exp, post)
5702 register tree exp;
5703 int post;
5704{
5705 register rtx op0, op1;
5706 register rtx temp, value;
5707 register tree incremented = TREE_OPERAND (exp, 0);
5708 optab this_optab = add_optab;
5709 int icode;
5710 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5711 int op0_is_copy = 0;
5712
5713 /* Stabilize any component ref that might need to be
5714 evaluated more than once below. */
5715 if (TREE_CODE (incremented) == BIT_FIELD_REF
5716 || (TREE_CODE (incremented) == COMPONENT_REF
5717 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5718 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5719 incremented = stabilize_reference (incremented);
5720
5721 /* Compute the operands as RTX.
5722 Note whether OP0 is the actual lvalue or a copy of it:
94a58076
RS
5723 I believe it is a copy iff it is a register or subreg
5724 and insns were generated in computing it. */
bbf6f052 5725 temp = get_last_insn ();
906c4e36 5726 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
94a58076
RS
5727 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5728 && temp != get_last_insn ());
906c4e36 5729 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5730
5731 /* Decide whether incrementing or decrementing. */
5732 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5733 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5734 this_optab = sub_optab;
5735
5736 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5737 then we cannot just increment OP0. We must
5738 therefore contrive to increment the original value.
5739 Then we can return OP0 since it is a copy of the old value. */
5740 if (op0_is_copy)
5741 {
5742 /* This is the easiest way to increment the value wherever it is.
5743 Problems with multiple evaluation of INCREMENTED
5744 are prevented because either (1) it is a component_ref,
5745 in which case it was stabilized above, or (2) it is an array_ref
5746 with constant index in an array in a register, which is
5747 safe to reevaluate. */
5748 tree newexp = build ((this_optab == add_optab
5749 ? PLUS_EXPR : MINUS_EXPR),
5750 TREE_TYPE (exp),
5751 incremented,
5752 TREE_OPERAND (exp, 1));
5753 temp = expand_assignment (incremented, newexp, ! post, 0);
5754 return post ? op0 : temp;
5755 }
5756
5757 /* Convert decrement by a constant into a negative increment. */
5758 if (this_optab == sub_optab
5759 && GET_CODE (op1) == CONST_INT)
5760 {
906c4e36 5761 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
5762 this_optab = add_optab;
5763 }
5764
5765 if (post)
5766 {
5767 /* We have a true reference to the value in OP0.
5768 If there is an insn to add or subtract in this mode, queue it. */
5769
5770#if 0 /* Turned off to avoid making extra insn for indexed memref. */
5771 op0 = stabilize (op0);
5772#endif
5773
5774 icode = (int) this_optab->handlers[(int) mode].insn_code;
5775 if (icode != (int) CODE_FOR_nothing
5776 /* Make sure that OP0 is valid for operands 0 and 1
5777 of the insn we want to queue. */
5778 && (*insn_operand_predicate[icode][0]) (op0, mode)
5779 && (*insn_operand_predicate[icode][1]) (op0, mode))
5780 {
5781 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5782 op1 = force_reg (mode, op1);
5783
5784 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5785 }
5786 }
5787
5788 /* Preincrement, or we can't increment with one simple insn. */
5789 if (post)
5790 /* Save a copy of the value before inc or dec, to return it later. */
5791 temp = value = copy_to_reg (op0);
5792 else
5793 /* Arrange to return the incremented value. */
5794 /* Copy the rtx because expand_binop will protect from the queue,
5795 and the results of that would be invalid for us to return
5796 if our caller does emit_queue before using our result. */
5797 temp = copy_rtx (value = op0);
5798
5799 /* Increment however we can. */
5800 op1 = expand_binop (mode, this_optab, value, op1, op0,
5801 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5802 /* Make sure the value is stored into OP0. */
5803 if (op1 != op0)
5804 emit_move_insn (op0, op1);
5805
5806 return temp;
5807}
5808\f
5809/* Expand all function calls contained within EXP, innermost ones first.
5810 But don't look within expressions that have sequence points.
5811 For each CALL_EXPR, record the rtx for its value
5812 in the CALL_EXPR_RTL field. */
5813
5814static void
5815preexpand_calls (exp)
5816 tree exp;
5817{
5818 register int nops, i;
5819 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5820
5821 if (! do_preexpand_calls)
5822 return;
5823
5824 /* Only expressions and references can contain calls. */
5825
5826 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5827 return;
5828
5829 switch (TREE_CODE (exp))
5830 {
5831 case CALL_EXPR:
5832 /* Do nothing if already expanded. */
5833 if (CALL_EXPR_RTL (exp) != 0)
5834 return;
5835
5836 /* Do nothing to built-in functions. */
5837 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5838 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5839 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 5840 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
5841 return;
5842
5843 case COMPOUND_EXPR:
5844 case COND_EXPR:
5845 case TRUTH_ANDIF_EXPR:
5846 case TRUTH_ORIF_EXPR:
5847 /* If we find one of these, then we can be sure
5848 the adjust will be done for it (since it makes jumps).
5849 Do it now, so that if this is inside an argument
5850 of a function, we don't get the stack adjustment
5851 after some other args have already been pushed. */
5852 do_pending_stack_adjust ();
5853 return;
5854
5855 case BLOCK:
5856 case RTL_EXPR:
5857 case WITH_CLEANUP_EXPR:
5858 return;
5859
5860 case SAVE_EXPR:
5861 if (SAVE_EXPR_RTL (exp) != 0)
5862 return;
5863 }
5864
5865 nops = tree_code_length[(int) TREE_CODE (exp)];
5866 for (i = 0; i < nops; i++)
5867 if (TREE_OPERAND (exp, i) != 0)
5868 {
5869 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5870 if (type == 'e' || type == '<' || type == '1' || type == '2'
5871 || type == 'r')
5872 preexpand_calls (TREE_OPERAND (exp, i));
5873 }
5874}
5875\f
5876/* At the start of a function, record that we have no previously-pushed
5877 arguments waiting to be popped. */
5878
5879void
5880init_pending_stack_adjust ()
5881{
5882 pending_stack_adjust = 0;
5883}
5884
5885/* When exiting from function, if safe, clear out any pending stack adjust
5886 so the adjustment won't get done. */
5887
5888void
5889clear_pending_stack_adjust ()
5890{
5891#ifdef EXIT_IGNORE_STACK
5892 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 5893 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
5894 && ! flag_inline_functions)
5895 pending_stack_adjust = 0;
5896#endif
5897}
5898
5899/* Pop any previously-pushed arguments that have not been popped yet. */
5900
5901void
5902do_pending_stack_adjust ()
5903{
5904 if (inhibit_defer_pop == 0)
5905 {
5906 if (pending_stack_adjust != 0)
906c4e36 5907 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
5908 pending_stack_adjust = 0;
5909 }
5910}
5911
5912/* Expand all cleanups up to OLD_CLEANUPS.
5913 Needed here, and also for language-dependent calls. */
5914
5915void
5916expand_cleanups_to (old_cleanups)
5917 tree old_cleanups;
5918{
5919 while (cleanups_this_call != old_cleanups)
5920 {
906c4e36 5921 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5922 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5923 }
5924}
5925\f
5926/* Expand conditional expressions. */
5927
5928/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5929 LABEL is an rtx of code CODE_LABEL, in this function and all the
5930 functions here. */
5931
5932void
5933jumpifnot (exp, label)
5934 tree exp;
5935 rtx label;
5936{
906c4e36 5937 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
5938}
5939
5940/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5941
5942void
5943jumpif (exp, label)
5944 tree exp;
5945 rtx label;
5946{
906c4e36 5947 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
5948}
5949
5950/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5951 the result is zero, or IF_TRUE_LABEL if the result is one.
5952 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5953 meaning fall through in that case.
5954
e7c33f54
RK
5955 do_jump always does any pending stack adjust except when it does not
5956 actually perform a jump. An example where there is no jump
5957 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5958
bbf6f052
RK
5959 This function is responsible for optimizing cases such as
5960 &&, || and comparison operators in EXP. */
5961
5962void
5963do_jump (exp, if_false_label, if_true_label)
5964 tree exp;
5965 rtx if_false_label, if_true_label;
5966{
5967 register enum tree_code code = TREE_CODE (exp);
5968 /* Some cases need to create a label to jump to
5969 in order to properly fall through.
5970 These cases set DROP_THROUGH_LABEL nonzero. */
5971 rtx drop_through_label = 0;
5972 rtx temp;
5973 rtx comparison = 0;
5974 int i;
5975 tree type;
5976
5977 emit_queue ();
5978
5979 switch (code)
5980 {
5981 case ERROR_MARK:
5982 break;
5983
5984 case INTEGER_CST:
5985 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5986 if (temp)
5987 emit_jump (temp);
5988 break;
5989
5990#if 0
5991 /* This is not true with #pragma weak */
5992 case ADDR_EXPR:
5993 /* The address of something can never be zero. */
5994 if (if_true_label)
5995 emit_jump (if_true_label);
5996 break;
5997#endif
5998
5999 case NOP_EXPR:
6000 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6001 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6002 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6003 goto normal;
6004 case CONVERT_EXPR:
6005 /* If we are narrowing the operand, we have to do the compare in the
6006 narrower mode. */
6007 if ((TYPE_PRECISION (TREE_TYPE (exp))
6008 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6009 goto normal;
6010 case NON_LVALUE_EXPR:
6011 case REFERENCE_EXPR:
6012 case ABS_EXPR:
6013 case NEGATE_EXPR:
6014 case LROTATE_EXPR:
6015 case RROTATE_EXPR:
6016 /* These cannot change zero->non-zero or vice versa. */
6017 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6018 break;
6019
6020#if 0
6021 /* This is never less insns than evaluating the PLUS_EXPR followed by
6022 a test and can be longer if the test is eliminated. */
6023 case PLUS_EXPR:
6024 /* Reduce to minus. */
6025 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6026 TREE_OPERAND (exp, 0),
6027 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6028 TREE_OPERAND (exp, 1))));
6029 /* Process as MINUS. */
6030#endif
6031
6032 case MINUS_EXPR:
6033 /* Non-zero iff operands of minus differ. */
6034 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6035 TREE_OPERAND (exp, 0),
6036 TREE_OPERAND (exp, 1)),
6037 NE, NE);
6038 break;
6039
6040 case BIT_AND_EXPR:
6041 /* If we are AND'ing with a small constant, do this comparison in the
6042 smallest type that fits. If the machine doesn't have comparisons
6043 that small, it will be converted back to the wider comparison.
6044 This helps if we are testing the sign bit of a narrower object.
6045 combine can't do this for us because it can't know whether a
6046 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6047
08af8e09
RK
6048 if (! SLOW_BYTE_ACCESS
6049 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6050 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
6051 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6052 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
6053 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6054 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6055 != CODE_FOR_nothing))
bbf6f052
RK
6056 {
6057 do_jump (convert (type, exp), if_false_label, if_true_label);
6058 break;
6059 }
6060 goto normal;
6061
6062 case TRUTH_NOT_EXPR:
6063 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6064 break;
6065
6066 case TRUTH_ANDIF_EXPR:
6067 if (if_false_label == 0)
6068 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 6069 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
6070 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6071 break;
6072
6073 case TRUTH_ORIF_EXPR:
6074 if (if_true_label == 0)
6075 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 6076 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
6077 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6078 break;
6079
6080 case COMPOUND_EXPR:
6081 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6082 free_temp_slots ();
6083 emit_queue ();
e7c33f54 6084 do_pending_stack_adjust ();
bbf6f052
RK
6085 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6086 break;
6087
6088 case COMPONENT_REF:
6089 case BIT_FIELD_REF:
6090 case ARRAY_REF:
6091 {
6092 int bitsize, bitpos, unsignedp;
6093 enum machine_mode mode;
6094 tree type;
7bb0943f 6095 tree offset;
bbf6f052
RK
6096 int volatilep = 0;
6097
6098 /* Get description of this reference. We don't actually care
6099 about the underlying object here. */
7bb0943f
RS
6100 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6101 &mode, &unsignedp, &volatilep);
bbf6f052
RK
6102
6103 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
6104 if (! SLOW_BYTE_ACCESS
6105 && type != 0 && bitsize >= 0
6106 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6107 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6108 != CODE_FOR_nothing))
bbf6f052
RK
6109 {
6110 do_jump (convert (type, exp), if_false_label, if_true_label);
6111 break;
6112 }
6113 goto normal;
6114 }
6115
6116 case COND_EXPR:
6117 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6118 if (integer_onep (TREE_OPERAND (exp, 1))
6119 && integer_zerop (TREE_OPERAND (exp, 2)))
6120 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6121
6122 else if (integer_zerop (TREE_OPERAND (exp, 1))
6123 && integer_onep (TREE_OPERAND (exp, 2)))
6124 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6125
6126 else
6127 {
6128 register rtx label1 = gen_label_rtx ();
6129 drop_through_label = gen_label_rtx ();
906c4e36 6130 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
6131 /* Now the THEN-expression. */
6132 do_jump (TREE_OPERAND (exp, 1),
6133 if_false_label ? if_false_label : drop_through_label,
6134 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
6135 /* In case the do_jump just above never jumps. */
6136 do_pending_stack_adjust ();
bbf6f052
RK
6137 emit_label (label1);
6138 /* Now the ELSE-expression. */
6139 do_jump (TREE_OPERAND (exp, 2),
6140 if_false_label ? if_false_label : drop_through_label,
6141 if_true_label ? if_true_label : drop_through_label);
6142 }
6143 break;
6144
6145 case EQ_EXPR:
6146 if (integer_zerop (TREE_OPERAND (exp, 1)))
6147 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6148 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6149 == MODE_INT)
6150 &&
6151 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6152 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6153 else
6154 comparison = compare (exp, EQ, EQ);
6155 break;
6156
6157 case NE_EXPR:
6158 if (integer_zerop (TREE_OPERAND (exp, 1)))
6159 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6160 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6161 == MODE_INT)
6162 &&
6163 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6164 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6165 else
6166 comparison = compare (exp, NE, NE);
6167 break;
6168
6169 case LT_EXPR:
6170 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6171 == MODE_INT)
6172 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6173 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6174 else
6175 comparison = compare (exp, LT, LTU);
6176 break;
6177
6178 case LE_EXPR:
6179 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6180 == MODE_INT)
6181 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6182 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6183 else
6184 comparison = compare (exp, LE, LEU);
6185 break;
6186
6187 case GT_EXPR:
6188 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6189 == MODE_INT)
6190 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6191 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6192 else
6193 comparison = compare (exp, GT, GTU);
6194 break;
6195
6196 case GE_EXPR:
6197 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6198 == MODE_INT)
6199 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6200 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6201 else
6202 comparison = compare (exp, GE, GEU);
6203 break;
6204
6205 default:
6206 normal:
906c4e36 6207 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6208#if 0
6209 /* This is not needed any more and causes poor code since it causes
6210 comparisons and tests from non-SI objects to have different code
6211 sequences. */
6212 /* Copy to register to avoid generating bad insns by cse
6213 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6214 if (!cse_not_expected && GET_CODE (temp) == MEM)
6215 temp = copy_to_reg (temp);
6216#endif
6217 do_pending_stack_adjust ();
6218 if (GET_CODE (temp) == CONST_INT)
6219 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6220 else if (GET_CODE (temp) == LABEL_REF)
6221 comparison = const_true_rtx;
6222 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6223 && !can_compare_p (GET_MODE (temp)))
6224 /* Note swapping the labels gives us not-equal. */
6225 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6226 else if (GET_MODE (temp) != VOIDmode)
6227 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
6228 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6229 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
6230 else
6231 abort ();
6232 }
6233
6234 /* Do any postincrements in the expression that was tested. */
6235 emit_queue ();
6236
6237 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6238 straight into a conditional jump instruction as the jump condition.
6239 Otherwise, all the work has been done already. */
6240
6241 if (comparison == const_true_rtx)
6242 {
6243 if (if_true_label)
6244 emit_jump (if_true_label);
6245 }
6246 else if (comparison == const0_rtx)
6247 {
6248 if (if_false_label)
6249 emit_jump (if_false_label);
6250 }
6251 else if (comparison)
6252 do_jump_for_compare (comparison, if_false_label, if_true_label);
6253
6254 free_temp_slots ();
6255
6256 if (drop_through_label)
e7c33f54
RK
6257 {
6258 /* If do_jump produces code that might be jumped around,
6259 do any stack adjusts from that code, before the place
6260 where control merges in. */
6261 do_pending_stack_adjust ();
6262 emit_label (drop_through_label);
6263 }
bbf6f052
RK
6264}
6265\f
6266/* Given a comparison expression EXP for values too wide to be compared
6267 with one insn, test the comparison and jump to the appropriate label.
6268 The code of EXP is ignored; we always test GT if SWAP is 0,
6269 and LT if SWAP is 1. */
6270
6271static void
6272do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6273 tree exp;
6274 int swap;
6275 rtx if_false_label, if_true_label;
6276{
906c4e36
RK
6277 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6278 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6279 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6280 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6281 rtx drop_through_label = 0;
6282 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6283 int i;
6284
6285 if (! if_true_label || ! if_false_label)
6286 drop_through_label = gen_label_rtx ();
6287 if (! if_true_label)
6288 if_true_label = drop_through_label;
6289 if (! if_false_label)
6290 if_false_label = drop_through_label;
6291
6292 /* Compare a word at a time, high order first. */
6293 for (i = 0; i < nwords; i++)
6294 {
6295 rtx comp;
6296 rtx op0_word, op1_word;
6297
6298 if (WORDS_BIG_ENDIAN)
6299 {
6300 op0_word = operand_subword_force (op0, i, mode);
6301 op1_word = operand_subword_force (op1, i, mode);
6302 }
6303 else
6304 {
6305 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6306 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6307 }
6308
6309 /* All but high-order word must be compared as unsigned. */
6310 comp = compare_from_rtx (op0_word, op1_word,
6311 (unsignedp || i > 0) ? GTU : GT,
906c4e36 6312 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
6313 if (comp == const_true_rtx)
6314 emit_jump (if_true_label);
6315 else if (comp != const0_rtx)
906c4e36 6316 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
6317
6318 /* Consider lower words only if these are equal. */
6319 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 6320 NULL_RTX, 0);
bbf6f052
RK
6321 if (comp == const_true_rtx)
6322 emit_jump (if_false_label);
6323 else if (comp != const0_rtx)
906c4e36 6324 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
6325 }
6326
6327 if (if_false_label)
6328 emit_jump (if_false_label);
6329 if (drop_through_label)
6330 emit_label (drop_through_label);
6331}
6332
6333/* Given an EQ_EXPR expression EXP for values too wide to be compared
6334 with one insn, test the comparison and jump to the appropriate label. */
6335
6336static void
6337do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6338 tree exp;
6339 rtx if_false_label, if_true_label;
6340{
906c4e36
RK
6341 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6342 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6343 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6344 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6345 int i;
6346 rtx drop_through_label = 0;
6347
6348 if (! if_false_label)
6349 drop_through_label = if_false_label = gen_label_rtx ();
6350
6351 for (i = 0; i < nwords; i++)
6352 {
6353 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6354 operand_subword_force (op1, i, mode),
cd1b4b44
RK
6355 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6356 word_mode, NULL_RTX, 0);
bbf6f052
RK
6357 if (comp == const_true_rtx)
6358 emit_jump (if_false_label);
6359 else if (comp != const0_rtx)
906c4e36 6360 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6361 }
6362
6363 if (if_true_label)
6364 emit_jump (if_true_label);
6365 if (drop_through_label)
6366 emit_label (drop_through_label);
6367}
6368\f
6369/* Jump according to whether OP0 is 0.
6370 We assume that OP0 has an integer mode that is too wide
6371 for the available compare insns. */
6372
6373static void
6374do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6375 rtx op0;
6376 rtx if_false_label, if_true_label;
6377{
6378 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6379 int i;
6380 rtx drop_through_label = 0;
6381
6382 if (! if_false_label)
6383 drop_through_label = if_false_label = gen_label_rtx ();
6384
6385 for (i = 0; i < nwords; i++)
6386 {
6387 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6388 GET_MODE (op0)),
cd1b4b44 6389 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
6390 if (comp == const_true_rtx)
6391 emit_jump (if_false_label);
6392 else if (comp != const0_rtx)
906c4e36 6393 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6394 }
6395
6396 if (if_true_label)
6397 emit_jump (if_true_label);
6398 if (drop_through_label)
6399 emit_label (drop_through_label);
6400}
6401
6402/* Given a comparison expression in rtl form, output conditional branches to
6403 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6404
6405static void
6406do_jump_for_compare (comparison, if_false_label, if_true_label)
6407 rtx comparison, if_false_label, if_true_label;
6408{
6409 if (if_true_label)
6410 {
6411 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6412 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6413 else
6414 abort ();
6415
6416 if (if_false_label)
6417 emit_jump (if_false_label);
6418 }
6419 else if (if_false_label)
6420 {
6421 rtx insn;
6422 rtx prev = PREV_INSN (get_last_insn ());
6423 rtx branch = 0;
6424
6425 /* Output the branch with the opposite condition. Then try to invert
6426 what is generated. If more than one insn is a branch, or if the
6427 branch is not the last insn written, abort. If we can't invert
6428 the branch, emit make a true label, redirect this jump to that,
6429 emit a jump to the false label and define the true label. */
6430
6431 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6432 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6433 else
6434 abort ();
6435
6436 /* Here we get the insn before what was just emitted.
6437 On some machines, emitting the branch can discard
6438 the previous compare insn and emit a replacement. */
6439 if (prev == 0)
6440 /* If there's only one preceding insn... */
6441 insn = get_insns ();
6442 else
6443 insn = NEXT_INSN (prev);
6444
6445 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6446 if (GET_CODE (insn) == JUMP_INSN)
6447 {
6448 if (branch)
6449 abort ();
6450 branch = insn;
6451 }
6452
6453 if (branch != get_last_insn ())
6454 abort ();
6455
6456 if (! invert_jump (branch, if_false_label))
6457 {
6458 if_true_label = gen_label_rtx ();
6459 redirect_jump (branch, if_true_label);
6460 emit_jump (if_false_label);
6461 emit_label (if_true_label);
6462 }
6463 }
6464}
6465\f
6466/* Generate code for a comparison expression EXP
6467 (including code to compute the values to be compared)
6468 and set (CC0) according to the result.
6469 SIGNED_CODE should be the rtx operation for this comparison for
6470 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6471
6472 We force a stack adjustment unless there are currently
6473 things pushed on the stack that aren't yet used. */
6474
6475static rtx
6476compare (exp, signed_code, unsigned_code)
6477 register tree exp;
6478 enum rtx_code signed_code, unsigned_code;
6479{
906c4e36
RK
6480 register rtx op0
6481 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6482 register rtx op1
6483 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6484 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6485 register enum machine_mode mode = TYPE_MODE (type);
6486 int unsignedp = TREE_UNSIGNED (type);
6487 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6488
6489 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6490 ((mode == BLKmode)
906c4e36 6491 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
6492 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6493}
6494
6495/* Like compare but expects the values to compare as two rtx's.
6496 The decision as to signed or unsigned comparison must be made by the caller.
6497
6498 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6499 compared.
6500
6501 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6502 size of MODE should be used. */
6503
6504rtx
6505compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6506 register rtx op0, op1;
6507 enum rtx_code code;
6508 int unsignedp;
6509 enum machine_mode mode;
6510 rtx size;
6511 int align;
6512{
6513 /* If one operand is constant, make it the second one. */
6514
6515 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6516 {
6517 rtx tem = op0;
6518 op0 = op1;
6519 op1 = tem;
6520 code = swap_condition (code);
6521 }
6522
6523 if (flag_force_mem)
6524 {
6525 op0 = force_not_mem (op0);
6526 op1 = force_not_mem (op1);
6527 }
6528
6529 do_pending_stack_adjust ();
6530
6531 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6532 return simplify_relational_operation (code, mode, op0, op1);
6533
cd1b4b44
RK
6534#if 0
6535 /* There's no need to do this now that combine.c can eliminate lots of
6536 sign extensions. This can be less efficient in certain cases on other
6537 machines.
6538
bbf6f052
RK
6539 /* If this is a signed equality comparison, we can do it as an
6540 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
6541 extension and comparisons with zero are done as unsigned. This is
6542 the case even on machines that can do fast sign extension, since
6543 zero-extension is easier to combinen with other operations than
6544 sign-extension is. If we are comparing against a constant, we must
6545 convert it to what it would look like unsigned. */
bbf6f052 6546 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 6547 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6548 {
6549 if (GET_CODE (op1) == CONST_INT
6550 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 6551 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
6552 unsignedp = 1;
6553 }
cd1b4b44 6554#endif
bbf6f052
RK
6555
6556 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6557
6558 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6559}
6560\f
6561/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
6562 and return an rtx for the result. EXP is either a comparison
6563 or a TRUTH_NOT_EXPR whose operand is a comparison.
6564
bbf6f052
RK
6565 If TARGET is nonzero, store the result there if convenient.
6566
6567 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6568 cheap.
6569
6570 Return zero if there is no suitable set-flag instruction
6571 available on this machine.
6572
6573 Once expand_expr has been called on the arguments of the comparison,
6574 we are committed to doing the store flag, since it is not safe to
6575 re-evaluate the expression. We emit the store-flag insn by calling
6576 emit_store_flag, but only expand the arguments if we have a reason
6577 to believe that emit_store_flag will be successful. If we think that
6578 it will, but it isn't, we have to simulate the store-flag with a
6579 set/jump/set sequence. */
6580
6581static rtx
6582do_store_flag (exp, target, mode, only_cheap)
6583 tree exp;
6584 rtx target;
6585 enum machine_mode mode;
6586 int only_cheap;
6587{
6588 enum rtx_code code;
e7c33f54 6589 tree arg0, arg1, type;
bbf6f052 6590 tree tem;
e7c33f54
RK
6591 enum machine_mode operand_mode;
6592 int invert = 0;
6593 int unsignedp;
bbf6f052
RK
6594 rtx op0, op1;
6595 enum insn_code icode;
6596 rtx subtarget = target;
6597 rtx result, label, pattern, jump_pat;
6598
e7c33f54
RK
6599 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6600 result at the end. We can't simply invert the test since it would
6601 have already been inverted if it were valid. This case occurs for
6602 some floating-point comparisons. */
6603
6604 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6605 invert = 1, exp = TREE_OPERAND (exp, 0);
6606
6607 arg0 = TREE_OPERAND (exp, 0);
6608 arg1 = TREE_OPERAND (exp, 1);
6609 type = TREE_TYPE (arg0);
6610 operand_mode = TYPE_MODE (type);
6611 unsignedp = TREE_UNSIGNED (type);
6612
bbf6f052
RK
6613 /* We won't bother with BLKmode store-flag operations because it would mean
6614 passing a lot of information to emit_store_flag. */
6615 if (operand_mode == BLKmode)
6616 return 0;
6617
d964285c
CH
6618 STRIP_NOPS (arg0);
6619 STRIP_NOPS (arg1);
bbf6f052
RK
6620
6621 /* Get the rtx comparison code to use. We know that EXP is a comparison
6622 operation of some type. Some comparisons against 1 and -1 can be
6623 converted to comparisons with zero. Do so here so that the tests
6624 below will be aware that we have a comparison with zero. These
6625 tests will not catch constants in the first operand, but constants
6626 are rarely passed as the first operand. */
6627
6628 switch (TREE_CODE (exp))
6629 {
6630 case EQ_EXPR:
6631 code = EQ;
6632 break;
6633 case NE_EXPR:
6634 code = NE;
6635 break;
6636 case LT_EXPR:
6637 if (integer_onep (arg1))
6638 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6639 else
6640 code = unsignedp ? LTU : LT;
6641 break;
6642 case LE_EXPR:
6643 if (integer_all_onesp (arg1))
6644 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6645 else
6646 code = unsignedp ? LEU : LE;
6647 break;
6648 case GT_EXPR:
6649 if (integer_all_onesp (arg1))
6650 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6651 else
6652 code = unsignedp ? GTU : GT;
6653 break;
6654 case GE_EXPR:
6655 if (integer_onep (arg1))
6656 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6657 else
6658 code = unsignedp ? GEU : GE;
6659 break;
6660 default:
6661 abort ();
6662 }
6663
6664 /* Put a constant second. */
6665 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6666 {
6667 tem = arg0; arg0 = arg1; arg1 = tem;
6668 code = swap_condition (code);
6669 }
6670
6671 /* If this is an equality or inequality test of a single bit, we can
6672 do this by shifting the bit being tested to the low-order bit and
6673 masking the result with the constant 1. If the condition was EQ,
6674 we xor it with 1. This does not require an scc insn and is faster
6675 than an scc insn even if we have it. */
6676
6677 if ((code == NE || code == EQ)
6678 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6679 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 6680 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6681 {
6682 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 6683 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
6684
6685 if (subtarget == 0 || GET_CODE (subtarget) != REG
6686 || GET_MODE (subtarget) != operand_mode
6687 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6688 subtarget = 0;
6689
6690 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6691
6692 if (bitnum != 0)
6693 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6694 size_int (bitnum), target, 1);
6695
6696 if (GET_MODE (op0) != mode)
6697 op0 = convert_to_mode (mode, op0, 1);
6698
6699 if (bitnum != TYPE_PRECISION (type) - 1)
6700 op0 = expand_and (op0, const1_rtx, target);
6701
e7c33f54 6702 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
6703 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6704 OPTAB_LIB_WIDEN);
6705
6706 return op0;
6707 }
6708
6709 /* Now see if we are likely to be able to do this. Return if not. */
6710 if (! can_compare_p (operand_mode))
6711 return 0;
6712 icode = setcc_gen_code[(int) code];
6713 if (icode == CODE_FOR_nothing
6714 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6715 {
6716 /* We can only do this if it is one of the special cases that
6717 can be handled without an scc insn. */
6718 if ((code == LT && integer_zerop (arg1))
6719 || (! only_cheap && code == GE && integer_zerop (arg1)))
6720 ;
6721 else if (BRANCH_COST >= 0
6722 && ! only_cheap && (code == NE || code == EQ)
6723 && TREE_CODE (type) != REAL_TYPE
6724 && ((abs_optab->handlers[(int) operand_mode].insn_code
6725 != CODE_FOR_nothing)
6726 || (ffs_optab->handlers[(int) operand_mode].insn_code
6727 != CODE_FOR_nothing)))
6728 ;
6729 else
6730 return 0;
6731 }
6732
6733 preexpand_calls (exp);
6734 if (subtarget == 0 || GET_CODE (subtarget) != REG
6735 || GET_MODE (subtarget) != operand_mode
6736 || ! safe_from_p (subtarget, arg1))
6737 subtarget = 0;
6738
6739 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 6740 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6741
6742 if (target == 0)
6743 target = gen_reg_rtx (mode);
6744
6745 result = emit_store_flag (target, code, op0, op1, operand_mode,
6746 unsignedp, 1);
6747
6748 if (result)
e7c33f54
RK
6749 {
6750 if (invert)
6751 result = expand_binop (mode, xor_optab, result, const1_rtx,
6752 result, 0, OPTAB_LIB_WIDEN);
6753 return result;
6754 }
bbf6f052
RK
6755
6756 /* If this failed, we have to do this with set/compare/jump/set code. */
6757 if (target == 0 || GET_CODE (target) != REG
6758 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6759 target = gen_reg_rtx (GET_MODE (target));
6760
e7c33f54 6761 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
6762 result = compare_from_rtx (op0, op1, code, unsignedp,
6763 operand_mode, NULL_RTX, 0);
bbf6f052 6764 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
6765 return (((result == const0_rtx && ! invert)
6766 || (result != const0_rtx && invert))
6767 ? const0_rtx : const1_rtx);
bbf6f052
RK
6768
6769 label = gen_label_rtx ();
6770 if (bcc_gen_fctn[(int) code] == 0)
6771 abort ();
6772
6773 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 6774 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
6775 emit_label (label);
6776
6777 return target;
6778}
6779\f
6780/* Generate a tablejump instruction (used for switch statements). */
6781
6782#ifdef HAVE_tablejump
6783
6784/* INDEX is the value being switched on, with the lowest value
6785 in the table already subtracted.
88d3b7f0 6786 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
6787 RANGE is the length of the jump table.
6788 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6789
6790 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6791 index value is out of range. */
6792
6793void
e87b4f3f 6794do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 6795 rtx index, range, table_label, default_label;
e87b4f3f 6796 enum machine_mode mode;
bbf6f052
RK
6797{
6798 register rtx temp, vector;
6799
88d3b7f0
RS
6800 /* Do an unsigned comparison (in the proper mode) between the index
6801 expression and the value which represents the length of the range.
6802 Since we just finished subtracting the lower bound of the range
6803 from the index expression, this comparison allows us to simultaneously
6804 check that the original index expression value is both greater than
6805 or equal to the minimum value of the range and less than or equal to
6806 the maximum value of the range. */
e87b4f3f 6807
906c4e36 6808 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
bbf6f052 6809 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
6810
6811 /* If index is in range, it must fit in Pmode.
6812 Convert to Pmode so we can index with it. */
6813 if (mode != Pmode)
6814 index = convert_to_mode (Pmode, index, 1);
6815
bbf6f052
RK
6816 /* If flag_force_addr were to affect this address
6817 it could interfere with the tricky assumptions made
6818 about addresses that contain label-refs,
6819 which may be valid only very near the tablejump itself. */
6820 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6821 GET_MODE_SIZE, because this indicates how large insns are. The other
6822 uses should all be Pmode, because they are addresses. This code
6823 could fail if addresses and insns are not the same size. */
6824 index = memory_address_noforce
6825 (CASE_VECTOR_MODE,
6826 gen_rtx (PLUS, Pmode,
6827 gen_rtx (MULT, Pmode, index,
906c4e36 6828 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
6829 gen_rtx (LABEL_REF, Pmode, table_label)));
6830 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6831 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6832 RTX_UNCHANGING_P (vector) = 1;
6833 convert_move (temp, vector, 0);
6834
6835 emit_jump_insn (gen_tablejump (temp, table_label));
6836
6837#ifndef CASE_VECTOR_PC_RELATIVE
6838 /* If we are generating PIC code or if the table is PC-relative, the
6839 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6840 if (! flag_pic)
6841 emit_barrier ();
6842#endif
6843}
6844
6845#endif /* HAVE_tablejump */
This page took 0.748319 seconds and 5 git commands to generate.