]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Allocate enough space for attr_printf; Use obstack_alloc instead of xmalloc.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
4fa52007
RK
105/* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109static char direct_load[NUM_MACHINE_MODES];
110static char direct_store[NUM_MACHINE_MODES];
111
bbf6f052
RK
112/* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115#ifndef MOVE_RATIO
266007a7 116#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
117#define MOVE_RATIO 2
118#else
119/* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121#define MOVE_RATIO 15
122#endif
123#endif
e87b4f3f 124
266007a7
RK
125/* This array records the insn_code of insns to perform block moves. */
126static enum insn_code movstr_optab[NUM_MACHINE_MODES];
127
e87b4f3f
RS
128/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
129
130#ifndef SLOW_UNALIGNED_ACCESS
131#define SLOW_UNALIGNED_ACCESS 0
132#endif
bbf6f052 133\f
4fa52007 134/* This is run once per compilation to set up which modes can be used
266007a7 135 directly in memory and to initialize the block move optab. */
4fa52007
RK
136
137void
138init_expr_once ()
139{
140 rtx insn, pat;
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
143
144 start_sequence ();
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
147
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
150 {
151 int regno;
152 rtx reg;
153 int num_clobbers;
154
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
157
e6fe56a4
RK
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
160
161 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
162 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
163 regno++)
164 {
165 if (! HARD_REGNO_MODE_OK (regno, mode))
166 continue;
167
168 reg = gen_rtx (REG, mode, regno);
169
170 SET_SRC (pat) = mem;
171 SET_DEST (pat) = reg;
172 if (recog (pat, insn, &num_clobbers) >= 0)
173 direct_load[(int) mode] = 1;
174
175 SET_SRC (pat) = reg;
176 SET_DEST (pat) = mem;
177 if (recog (pat, insn, &num_clobbers) >= 0)
178 direct_store[(int) mode] = 1;
179 }
266007a7
RK
180
181 movstr_optab[(int) mode] = CODE_FOR_nothing;
4fa52007
RK
182 }
183
184 end_sequence ();
266007a7
RK
185
186#ifdef HAVE_movstrqi
187 if (HAVE_movstrqi)
188 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
189#endif
190#ifdef HAVE_movstrhi
191 if (HAVE_movstrhi)
192 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
193#endif
194#ifdef HAVE_movstrsi
195 if (HAVE_movstrsi)
196 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
197#endif
198#ifdef HAVE_movstrdi
199 if (HAVE_movstrdi)
200 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
201#endif
202#ifdef HAVE_movstrti
203 if (HAVE_movstrti)
204 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
205#endif
4fa52007
RK
206}
207
bbf6f052
RK
208/* This is run at the start of compiling a function. */
209
210void
211init_expr ()
212{
213 init_queue ();
214
215 pending_stack_adjust = 0;
216 inhibit_defer_pop = 0;
217 cleanups_this_call = 0;
218 saveregs_value = 0;
e87b4f3f 219 forced_labels = 0;
bbf6f052
RK
220}
221
222/* Save all variables describing the current status into the structure *P.
223 This is used before starting a nested function. */
224
225void
226save_expr_status (p)
227 struct function *p;
228{
229 /* Instead of saving the postincrement queue, empty it. */
230 emit_queue ();
231
232 p->pending_stack_adjust = pending_stack_adjust;
233 p->inhibit_defer_pop = inhibit_defer_pop;
234 p->cleanups_this_call = cleanups_this_call;
235 p->saveregs_value = saveregs_value;
e87b4f3f 236 p->forced_labels = forced_labels;
bbf6f052
RK
237
238 pending_stack_adjust = 0;
239 inhibit_defer_pop = 0;
240 cleanups_this_call = 0;
241 saveregs_value = 0;
e87b4f3f 242 forced_labels = 0;
bbf6f052
RK
243}
244
245/* Restore all variables describing the current status from the structure *P.
246 This is used after a nested function. */
247
248void
249restore_expr_status (p)
250 struct function *p;
251{
252 pending_stack_adjust = p->pending_stack_adjust;
253 inhibit_defer_pop = p->inhibit_defer_pop;
254 cleanups_this_call = p->cleanups_this_call;
255 saveregs_value = p->saveregs_value;
e87b4f3f 256 forced_labels = p->forced_labels;
bbf6f052
RK
257}
258\f
259/* Manage the queue of increment instructions to be output
260 for POSTINCREMENT_EXPR expressions, etc. */
261
262static rtx pending_chain;
263
264/* Queue up to increment (or change) VAR later. BODY says how:
265 BODY should be the same thing you would pass to emit_insn
266 to increment right away. It will go to emit_insn later on.
267
268 The value is a QUEUED expression to be used in place of VAR
269 where you want to guarantee the pre-incrementation value of VAR. */
270
271static rtx
272enqueue_insn (var, body)
273 rtx var, body;
274{
275 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 276 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
277 return pending_chain;
278}
279
280/* Use protect_from_queue to convert a QUEUED expression
281 into something that you can put immediately into an instruction.
282 If the queued incrementation has not happened yet,
283 protect_from_queue returns the variable itself.
284 If the incrementation has happened, protect_from_queue returns a temp
285 that contains a copy of the old value of the variable.
286
287 Any time an rtx which might possibly be a QUEUED is to be put
288 into an instruction, it must be passed through protect_from_queue first.
289 QUEUED expressions are not meaningful in instructions.
290
291 Do not pass a value through protect_from_queue and then hold
292 on to it for a while before putting it in an instruction!
293 If the queue is flushed in between, incorrect code will result. */
294
295rtx
296protect_from_queue (x, modify)
297 register rtx x;
298 int modify;
299{
300 register RTX_CODE code = GET_CODE (x);
301
302#if 0 /* A QUEUED can hang around after the queue is forced out. */
303 /* Shortcut for most common case. */
304 if (pending_chain == 0)
305 return x;
306#endif
307
308 if (code != QUEUED)
309 {
310 /* A special hack for read access to (MEM (QUEUED ...))
311 to facilitate use of autoincrement.
312 Make a copy of the contents of the memory location
313 rather than a copy of the address, but not
314 if the value is of mode BLKmode. */
315 if (code == MEM && GET_MODE (x) != BLKmode
316 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
317 {
318 register rtx y = XEXP (x, 0);
319 XEXP (x, 0) = QUEUED_VAR (y);
320 if (QUEUED_INSN (y))
321 {
322 register rtx temp = gen_reg_rtx (GET_MODE (x));
323 emit_insn_before (gen_move_insn (temp, x),
324 QUEUED_INSN (y));
325 return temp;
326 }
327 return x;
328 }
329 /* Otherwise, recursively protect the subexpressions of all
330 the kinds of rtx's that can contain a QUEUED. */
331 if (code == MEM)
332 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
333 else if (code == PLUS || code == MULT)
334 {
335 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
336 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
337 }
338 return x;
339 }
340 /* If the increment has not happened, use the variable itself. */
341 if (QUEUED_INSN (x) == 0)
342 return QUEUED_VAR (x);
343 /* If the increment has happened and a pre-increment copy exists,
344 use that copy. */
345 if (QUEUED_COPY (x) != 0)
346 return QUEUED_COPY (x);
347 /* The increment has happened but we haven't set up a pre-increment copy.
348 Set one up now, and use it. */
349 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
350 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
351 QUEUED_INSN (x));
352 return QUEUED_COPY (x);
353}
354
355/* Return nonzero if X contains a QUEUED expression:
356 if it contains anything that will be altered by a queued increment.
357 We handle only combinations of MEM, PLUS, MINUS and MULT operators
358 since memory addresses generally contain only those. */
359
360static int
361queued_subexp_p (x)
362 rtx x;
363{
364 register enum rtx_code code = GET_CODE (x);
365 switch (code)
366 {
367 case QUEUED:
368 return 1;
369 case MEM:
370 return queued_subexp_p (XEXP (x, 0));
371 case MULT:
372 case PLUS:
373 case MINUS:
374 return queued_subexp_p (XEXP (x, 0))
375 || queued_subexp_p (XEXP (x, 1));
376 }
377 return 0;
378}
379
380/* Perform all the pending incrementations. */
381
382void
383emit_queue ()
384{
385 register rtx p;
386 while (p = pending_chain)
387 {
388 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
389 pending_chain = QUEUED_NEXT (p);
390 }
391}
392
393static void
394init_queue ()
395{
396 if (pending_chain)
397 abort ();
398}
399\f
400/* Copy data from FROM to TO, where the machine modes are not the same.
401 Both modes may be integer, or both may be floating.
402 UNSIGNEDP should be nonzero if FROM is an unsigned type.
403 This causes zero-extension instead of sign-extension. */
404
405void
406convert_move (to, from, unsignedp)
407 register rtx to, from;
408 int unsignedp;
409{
410 enum machine_mode to_mode = GET_MODE (to);
411 enum machine_mode from_mode = GET_MODE (from);
412 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
413 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
414 enum insn_code code;
415 rtx libcall;
416
417 /* rtx code for making an equivalent value. */
418 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
419
420 to = protect_from_queue (to, 1);
421 from = protect_from_queue (from, 0);
422
423 if (to_real != from_real)
424 abort ();
425
426 if (to_mode == from_mode
427 || (from_mode == VOIDmode && CONSTANT_P (from)))
428 {
429 emit_move_insn (to, from);
430 return;
431 }
432
433 if (to_real)
434 {
435#ifdef HAVE_extendsfdf2
436 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
437 {
438 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
439 return;
440 }
441#endif
b092b471
JW
442#ifdef HAVE_extendsfxf2
443 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
444 {
445 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
446 return;
447 }
448#endif
bbf6f052
RK
449#ifdef HAVE_extendsftf2
450 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
451 {
452 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
453 return;
454 }
455#endif
b092b471
JW
456#ifdef HAVE_extenddfxf2
457 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
458 {
459 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
460 return;
461 }
462#endif
bbf6f052
RK
463#ifdef HAVE_extenddftf2
464 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
465 {
466 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
467 return;
468 }
469#endif
470#ifdef HAVE_truncdfsf2
471 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
472 {
473 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
474 return;
475 }
476#endif
b092b471
JW
477#ifdef HAVE_truncxfsf2
478 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
479 {
480 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
481 return;
482 }
483#endif
bbf6f052
RK
484#ifdef HAVE_trunctfsf2
485 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
486 {
487 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
488 return;
489 }
490#endif
b092b471
JW
491#ifdef HAVE_truncxfdf2
492 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
493 {
494 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
495 return;
496 }
497#endif
bbf6f052
RK
498#ifdef HAVE_trunctfdf2
499 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
500 {
501 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
502 return;
503 }
504#endif
505
b092b471
JW
506 libcall = (rtx) 0;
507 switch (from_mode)
508 {
509 case SFmode:
510 switch (to_mode)
511 {
512 case DFmode:
513 libcall = extendsfdf2_libfunc;
514 break;
515
516 case XFmode:
517 libcall = extendsfxf2_libfunc;
518 break;
519
520 case TFmode:
521 libcall = extendsftf2_libfunc;
522 break;
523 }
524 break;
525
526 case DFmode:
527 switch (to_mode)
528 {
529 case SFmode:
530 libcall = truncdfsf2_libfunc;
531 break;
532
533 case XFmode:
534 libcall = extenddfxf2_libfunc;
535 break;
536
537 case TFmode:
538 libcall = extenddftf2_libfunc;
539 break;
540 }
541 break;
542
543 case XFmode:
544 switch (to_mode)
545 {
546 case SFmode:
547 libcall = truncxfsf2_libfunc;
548 break;
549
550 case DFmode:
551 libcall = truncxfdf2_libfunc;
552 break;
553 }
554 break;
555
556 case TFmode:
557 switch (to_mode)
558 {
559 case SFmode:
560 libcall = trunctfsf2_libfunc;
561 break;
562
563 case DFmode:
564 libcall = trunctfdf2_libfunc;
565 break;
566 }
567 break;
568 }
569
570 if (libcall == (rtx) 0)
571 /* This conversion is not implemented yet. */
bbf6f052
RK
572 abort ();
573
e87b4f3f 574 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
575 emit_move_insn (to, hard_libcall_value (to_mode));
576 return;
577 }
578
579 /* Now both modes are integers. */
580
581 /* Handle expanding beyond a word. */
582 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
583 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
584 {
585 rtx insns;
586 rtx lowpart;
587 rtx fill_value;
588 rtx lowfrom;
589 int i;
590 enum machine_mode lowpart_mode;
591 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
592
593 /* Try converting directly if the insn is supported. */
594 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
595 != CODE_FOR_nothing)
596 {
cd1b4b44
RK
597 /* If FROM is a SUBREG, put it into a register. Do this
598 so that we always generate the same set of insns for
599 better cse'ing; if an intermediate assignment occurred,
600 we won't be doing the operation directly on the SUBREG. */
601 if (optimize > 0 && GET_CODE (from) == SUBREG)
602 from = force_reg (from_mode, from);
bbf6f052
RK
603 emit_unop_insn (code, to, from, equiv_code);
604 return;
605 }
606 /* Next, try converting via full word. */
607 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
608 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
609 != CODE_FOR_nothing))
610 {
611 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
612 emit_unop_insn (code, to,
613 gen_lowpart (word_mode, to), equiv_code);
614 return;
615 }
616
617 /* No special multiword conversion insn; do it by hand. */
618 start_sequence ();
619
620 /* Get a copy of FROM widened to a word, if necessary. */
621 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
622 lowpart_mode = word_mode;
623 else
624 lowpart_mode = from_mode;
625
626 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
627
628 lowpart = gen_lowpart (lowpart_mode, to);
629 emit_move_insn (lowpart, lowfrom);
630
631 /* Compute the value to put in each remaining word. */
632 if (unsignedp)
633 fill_value = const0_rtx;
634 else
635 {
636#ifdef HAVE_slt
637 if (HAVE_slt
638 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
639 && STORE_FLAG_VALUE == -1)
640 {
906c4e36
RK
641 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
642 lowpart_mode, 0, 0);
bbf6f052
RK
643 fill_value = gen_reg_rtx (word_mode);
644 emit_insn (gen_slt (fill_value));
645 }
646 else
647#endif
648 {
649 fill_value
650 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
651 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 652 NULL_RTX, 0);
bbf6f052
RK
653 fill_value = convert_to_mode (word_mode, fill_value, 1);
654 }
655 }
656
657 /* Fill the remaining words. */
658 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
659 {
660 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
661 rtx subword = operand_subword (to, index, 1, to_mode);
662
663 if (subword == 0)
664 abort ();
665
666 if (fill_value != subword)
667 emit_move_insn (subword, fill_value);
668 }
669
670 insns = get_insns ();
671 end_sequence ();
672
906c4e36 673 emit_no_conflict_block (insns, to, from, NULL_RTX,
bbf6f052
RK
674 gen_rtx (equiv_code, to_mode, from));
675 return;
676 }
677
678 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
679 {
680 convert_move (to, gen_lowpart (word_mode, from), 0);
681 return;
682 }
683
684 /* Handle pointer conversion */ /* SPEE 900220 */
685 if (to_mode == PSImode)
686 {
687 if (from_mode != SImode)
688 from = convert_to_mode (SImode, from, unsignedp);
689
690#ifdef HAVE_truncsipsi
691 if (HAVE_truncsipsi)
692 {
693 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
694 return;
695 }
696#endif /* HAVE_truncsipsi */
697 abort ();
698 }
699
700 if (from_mode == PSImode)
701 {
702 if (to_mode != SImode)
703 {
704 from = convert_to_mode (SImode, from, unsignedp);
705 from_mode = SImode;
706 }
707 else
708 {
709#ifdef HAVE_extendpsisi
710 if (HAVE_extendpsisi)
711 {
712 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
713 return;
714 }
715#endif /* HAVE_extendpsisi */
716 abort ();
717 }
718 }
719
720 /* Now follow all the conversions between integers
721 no more than a word long. */
722
723 /* For truncation, usually we can just refer to FROM in a narrower mode. */
724 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
725 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
726 GET_MODE_BITSIZE (from_mode))
727 && ((GET_CODE (from) == MEM
728 && ! MEM_VOLATILE_P (from)
4fa52007 729 && direct_load[(int) to_mode]
bbf6f052
RK
730 && ! mode_dependent_address_p (XEXP (from, 0)))
731 || GET_CODE (from) == REG
732 || GET_CODE (from) == SUBREG))
733 {
734 emit_move_insn (to, gen_lowpart (to_mode, from));
735 return;
736 }
737
738 /* For truncation, usually we can just refer to FROM in a narrower mode. */
739 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
740 {
741 /* Convert directly if that works. */
742 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
743 != CODE_FOR_nothing)
744 {
3dc4195c
RK
745 /* If FROM is a SUBREG, put it into a register. Do this
746 so that we always generate the same set of insns for
747 better cse'ing; if an intermediate assignment occurred,
748 we won't be doing the operation directly on the SUBREG. */
749 if (optimize > 0 && GET_CODE (from) == SUBREG)
750 from = force_reg (from_mode, from);
bbf6f052
RK
751 emit_unop_insn (code, to, from, equiv_code);
752 return;
753 }
754 else
755 {
756 enum machine_mode intermediate;
757
758 /* Search for a mode to convert via. */
759 for (intermediate = from_mode; intermediate != VOIDmode;
760 intermediate = GET_MODE_WIDER_MODE (intermediate))
761 if ((can_extend_p (to_mode, intermediate, unsignedp)
762 != CODE_FOR_nothing)
763 && (can_extend_p (intermediate, from_mode, unsignedp)
764 != CODE_FOR_nothing))
765 {
766 convert_move (to, convert_to_mode (intermediate, from,
767 unsignedp), unsignedp);
768 return;
769 }
770
771 /* No suitable intermediate mode. */
772 abort ();
773 }
774 }
775
776 /* Support special truncate insns for certain modes. */
777
778 if (from_mode == DImode && to_mode == SImode)
779 {
780#ifdef HAVE_truncdisi2
781 if (HAVE_truncdisi2)
782 {
783 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
784 return;
785 }
786#endif
787 convert_move (to, force_reg (from_mode, from), unsignedp);
788 return;
789 }
790
791 if (from_mode == DImode && to_mode == HImode)
792 {
793#ifdef HAVE_truncdihi2
794 if (HAVE_truncdihi2)
795 {
796 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
797 return;
798 }
799#endif
800 convert_move (to, force_reg (from_mode, from), unsignedp);
801 return;
802 }
803
804 if (from_mode == DImode && to_mode == QImode)
805 {
806#ifdef HAVE_truncdiqi2
807 if (HAVE_truncdiqi2)
808 {
809 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
810 return;
811 }
812#endif
813 convert_move (to, force_reg (from_mode, from), unsignedp);
814 return;
815 }
816
817 if (from_mode == SImode && to_mode == HImode)
818 {
819#ifdef HAVE_truncsihi2
820 if (HAVE_truncsihi2)
821 {
822 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
823 return;
824 }
825#endif
826 convert_move (to, force_reg (from_mode, from), unsignedp);
827 return;
828 }
829
830 if (from_mode == SImode && to_mode == QImode)
831 {
832#ifdef HAVE_truncsiqi2
833 if (HAVE_truncsiqi2)
834 {
835 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
836 return;
837 }
838#endif
839 convert_move (to, force_reg (from_mode, from), unsignedp);
840 return;
841 }
842
843 if (from_mode == HImode && to_mode == QImode)
844 {
845#ifdef HAVE_trunchiqi2
846 if (HAVE_trunchiqi2)
847 {
848 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
849 return;
850 }
851#endif
852 convert_move (to, force_reg (from_mode, from), unsignedp);
853 return;
854 }
855
856 /* Handle truncation of volatile memrefs, and so on;
857 the things that couldn't be truncated directly,
858 and for which there was no special instruction. */
859 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
860 {
861 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
862 emit_move_insn (to, temp);
863 return;
864 }
865
866 /* Mode combination is not recognized. */
867 abort ();
868}
869
870/* Return an rtx for a value that would result
871 from converting X to mode MODE.
872 Both X and MODE may be floating, or both integer.
873 UNSIGNEDP is nonzero if X is an unsigned value.
874 This can be done by referring to a part of X in place
5d901c31
RS
875 or by copying to a new temporary with conversion.
876
877 This function *must not* call protect_from_queue
878 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
879
880rtx
881convert_to_mode (mode, x, unsignedp)
882 enum machine_mode mode;
883 rtx x;
884 int unsignedp;
885{
886 register rtx temp;
887
bbf6f052
RK
888 if (mode == GET_MODE (x))
889 return x;
890
891 /* There is one case that we must handle specially: If we are converting
906c4e36 892 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
893 we are to interpret the constant as unsigned, gen_lowpart will do
894 the wrong if the constant appears negative. What we want to do is
895 make the high-order word of the constant zero, not all ones. */
896
897 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 898 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 899 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 900 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
901
902 /* We can do this with a gen_lowpart if both desired and current modes
903 are integer, and this is either a constant integer, a register, or a
904 non-volatile MEM. Except for the constant case, we must be narrowing
905 the operand. */
906
907 if (GET_CODE (x) == CONST_INT
908 || (GET_MODE_CLASS (mode) == MODE_INT
909 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
910 && (GET_CODE (x) == CONST_DOUBLE
911 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
912 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 913 && direct_load[(int) mode]
bbf6f052
RK
914 || GET_CODE (x) == REG)))))
915 return gen_lowpart (mode, x);
916
917 temp = gen_reg_rtx (mode);
918 convert_move (temp, x, unsignedp);
919 return temp;
920}
921\f
922/* Generate several move instructions to copy LEN bytes
923 from block FROM to block TO. (These are MEM rtx's with BLKmode).
924 The caller must pass FROM and TO
925 through protect_from_queue before calling.
926 ALIGN (in bytes) is maximum alignment we can assume. */
927
928struct move_by_pieces
929{
930 rtx to;
931 rtx to_addr;
932 int autinc_to;
933 int explicit_inc_to;
934 rtx from;
935 rtx from_addr;
936 int autinc_from;
937 int explicit_inc_from;
938 int len;
939 int offset;
940 int reverse;
941};
942
943static void move_by_pieces_1 ();
944static int move_by_pieces_ninsns ();
945
946static void
947move_by_pieces (to, from, len, align)
948 rtx to, from;
949 int len, align;
950{
951 struct move_by_pieces data;
952 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 953 int max_size = MOVE_MAX + 1;
bbf6f052
RK
954
955 data.offset = 0;
956 data.to_addr = to_addr;
957 data.from_addr = from_addr;
958 data.to = to;
959 data.from = from;
960 data.autinc_to
961 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
962 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
963 data.autinc_from
964 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
965 || GET_CODE (from_addr) == POST_INC
966 || GET_CODE (from_addr) == POST_DEC);
967
968 data.explicit_inc_from = 0;
969 data.explicit_inc_to = 0;
970 data.reverse
971 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
972 if (data.reverse) data.offset = len;
973 data.len = len;
974
975 /* If copying requires more than two move insns,
976 copy addresses to registers (to make displacements shorter)
977 and use post-increment if available. */
978 if (!(data.autinc_from && data.autinc_to)
979 && move_by_pieces_ninsns (len, align) > 2)
980 {
981#ifdef HAVE_PRE_DECREMENT
982 if (data.reverse && ! data.autinc_from)
983 {
984 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
985 data.autinc_from = 1;
986 data.explicit_inc_from = -1;
987 }
988#endif
989#ifdef HAVE_POST_INCREMENT
990 if (! data.autinc_from)
991 {
992 data.from_addr = copy_addr_to_reg (from_addr);
993 data.autinc_from = 1;
994 data.explicit_inc_from = 1;
995 }
996#endif
997 if (!data.autinc_from && CONSTANT_P (from_addr))
998 data.from_addr = copy_addr_to_reg (from_addr);
999#ifdef HAVE_PRE_DECREMENT
1000 if (data.reverse && ! data.autinc_to)
1001 {
1002 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1003 data.autinc_to = 1;
1004 data.explicit_inc_to = -1;
1005 }
1006#endif
1007#ifdef HAVE_POST_INCREMENT
1008 if (! data.reverse && ! data.autinc_to)
1009 {
1010 data.to_addr = copy_addr_to_reg (to_addr);
1011 data.autinc_to = 1;
1012 data.explicit_inc_to = 1;
1013 }
1014#endif
1015 if (!data.autinc_to && CONSTANT_P (to_addr))
1016 data.to_addr = copy_addr_to_reg (to_addr);
1017 }
1018
e87b4f3f
RS
1019 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1020 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1021 align = MOVE_MAX;
bbf6f052
RK
1022
1023 /* First move what we can in the largest integer mode, then go to
1024 successively smaller modes. */
1025
1026 while (max_size > 1)
1027 {
1028 enum machine_mode mode = VOIDmode, tmode;
1029 enum insn_code icode;
1030
e7c33f54
RK
1031 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1032 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1033 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1034 mode = tmode;
1035
1036 if (mode == VOIDmode)
1037 break;
1038
1039 icode = mov_optab->handlers[(int) mode].insn_code;
1040 if (icode != CODE_FOR_nothing
1041 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1042 GET_MODE_SIZE (mode)))
1043 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1044
1045 max_size = GET_MODE_SIZE (mode);
1046 }
1047
1048 /* The code above should have handled everything. */
1049 if (data.len != 0)
1050 abort ();
1051}
1052
1053/* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bytes) is maximum alignment we can assume. */
1055
1056static int
1057move_by_pieces_ninsns (l, align)
1058 unsigned int l;
1059 int align;
1060{
1061 register int n_insns = 0;
e87b4f3f 1062 int max_size = MOVE_MAX + 1;
bbf6f052 1063
e87b4f3f
RS
1064 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1065 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1066 align = MOVE_MAX;
bbf6f052
RK
1067
1068 while (max_size > 1)
1069 {
1070 enum machine_mode mode = VOIDmode, tmode;
1071 enum insn_code icode;
1072
e7c33f54
RK
1073 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1074 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1075 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1076 mode = tmode;
1077
1078 if (mode == VOIDmode)
1079 break;
1080
1081 icode = mov_optab->handlers[(int) mode].insn_code;
1082 if (icode != CODE_FOR_nothing
1083 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1084 GET_MODE_SIZE (mode)))
1085 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1086
1087 max_size = GET_MODE_SIZE (mode);
1088 }
1089
1090 return n_insns;
1091}
1092
1093/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1094 with move instructions for mode MODE. GENFUN is the gen_... function
1095 to make a move insn for that mode. DATA has all the other info. */
1096
1097static void
1098move_by_pieces_1 (genfun, mode, data)
1099 rtx (*genfun) ();
1100 enum machine_mode mode;
1101 struct move_by_pieces *data;
1102{
1103 register int size = GET_MODE_SIZE (mode);
1104 register rtx to1, from1;
1105
1106 while (data->len >= size)
1107 {
1108 if (data->reverse) data->offset -= size;
1109
1110 to1 = (data->autinc_to
1111 ? gen_rtx (MEM, mode, data->to_addr)
1112 : change_address (data->to, mode,
1113 plus_constant (data->to_addr, data->offset)));
1114 from1 =
1115 (data->autinc_from
1116 ? gen_rtx (MEM, mode, data->from_addr)
1117 : change_address (data->from, mode,
1118 plus_constant (data->from_addr, data->offset)));
1119
1120#ifdef HAVE_PRE_DECREMENT
1121 if (data->explicit_inc_to < 0)
906c4e36 1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1123 if (data->explicit_inc_from < 0)
906c4e36 1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1125#endif
1126
1127 emit_insn ((*genfun) (to1, from1));
1128#ifdef HAVE_POST_INCREMENT
1129 if (data->explicit_inc_to > 0)
906c4e36 1130 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1131 if (data->explicit_inc_from > 0)
906c4e36 1132 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1133#endif
1134
1135 if (! data->reverse) data->offset += size;
1136
1137 data->len -= size;
1138 }
1139}
1140\f
1141/* Emit code to move a block Y to a block X.
1142 This may be done with string-move instructions,
1143 with multiple scalar move instructions, or with a library call.
1144
1145 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1146 with mode BLKmode.
1147 SIZE is an rtx that says how long they are.
1148 ALIGN is the maximum alignment we can assume they have,
1149 measured in bytes. */
1150
1151void
1152emit_block_move (x, y, size, align)
1153 rtx x, y;
1154 rtx size;
1155 int align;
1156{
1157 if (GET_MODE (x) != BLKmode)
1158 abort ();
1159
1160 if (GET_MODE (y) != BLKmode)
1161 abort ();
1162
1163 x = protect_from_queue (x, 1);
1164 y = protect_from_queue (y, 0);
5d901c31 1165 size = protect_from_queue (size, 0);
bbf6f052
RK
1166
1167 if (GET_CODE (x) != MEM)
1168 abort ();
1169 if (GET_CODE (y) != MEM)
1170 abort ();
1171 if (size == 0)
1172 abort ();
1173
1174 if (GET_CODE (size) == CONST_INT
906c4e36 1175 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1176 move_by_pieces (x, y, INTVAL (size), align);
1177 else
1178 {
1179 /* Try the most limited insn first, because there's no point
1180 including more than one in the machine description unless
1181 the more limited one has some advantage. */
266007a7 1182
0bba3f6f 1183 rtx opalign = GEN_INT (align);
266007a7
RK
1184 enum machine_mode mode;
1185
1186 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1187 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1188 {
266007a7 1189 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1190
1191 if (code != CODE_FOR_nothing
0bba3f6f 1192 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
266007a7 1193 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1194 && (insn_operand_predicate[(int) code][0] == 0
1195 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1196 && (insn_operand_predicate[(int) code][1] == 0
1197 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1198 && (insn_operand_predicate[(int) code][3] == 0
1199 || (*insn_operand_predicate[(int) code][3]) (opalign,
1200 VOIDmode)))
bbf6f052 1201 {
266007a7
RK
1202 rtx op2 = size;
1203 rtx last = get_last_insn ();
1204 rtx pat;
1205
0bba3f6f
RK
1206 if (insn_operand_predicate[(int) code][2] != 0
1207 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1208 op2 = copy_to_mode_reg (mode, op2);
1209
1210 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1211 if (pat)
1212 {
1213 emit_insn (pat);
1214 return;
1215 }
1216 else
1217 delete_insns_since (last);
bbf6f052
RK
1218 }
1219 }
bbf6f052
RK
1220
1221#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1222 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1223 VOIDmode, 3, XEXP (x, 0), Pmode,
1224 XEXP (y, 0), Pmode,
5a2724d7 1225 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052 1226#else
d562e42e 1227 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1228 VOIDmode, 3, XEXP (y, 0), Pmode,
1229 XEXP (x, 0), Pmode,
5a2724d7 1230 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052
RK
1231#endif
1232 }
1233}
1234\f
1235/* Copy all or part of a value X into registers starting at REGNO.
1236 The number of registers to be filled is NREGS. */
1237
1238void
1239move_block_to_reg (regno, x, nregs, mode)
1240 int regno;
1241 rtx x;
1242 int nregs;
1243 enum machine_mode mode;
1244{
1245 int i;
1246 rtx pat, last;
1247
1248 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1249 x = validize_mem (force_const_mem (mode, x));
1250
1251 /* See if the machine can do this with a load multiple insn. */
1252#ifdef HAVE_load_multiple
1253 last = get_last_insn ();
1254 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1255 GEN_INT (nregs));
bbf6f052
RK
1256 if (pat)
1257 {
1258 emit_insn (pat);
1259 return;
1260 }
1261 else
1262 delete_insns_since (last);
1263#endif
1264
1265 for (i = 0; i < nregs; i++)
1266 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1267 operand_subword_force (x, i, mode));
1268}
1269
1270/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1271 The number of registers to be filled is NREGS. */
1272
1273void
1274move_block_from_reg (regno, x, nregs)
1275 int regno;
1276 rtx x;
1277 int nregs;
1278{
1279 int i;
1280 rtx pat, last;
1281
1282 /* See if the machine can do this with a store multiple insn. */
1283#ifdef HAVE_store_multiple
1284 last = get_last_insn ();
1285 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1286 GEN_INT (nregs));
bbf6f052
RK
1287 if (pat)
1288 {
1289 emit_insn (pat);
1290 return;
1291 }
1292 else
1293 delete_insns_since (last);
1294#endif
1295
1296 for (i = 0; i < nregs; i++)
1297 {
1298 rtx tem = operand_subword (x, i, 1, BLKmode);
1299
1300 if (tem == 0)
1301 abort ();
1302
1303 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1304 }
1305}
1306
1307/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1308
1309void
1310use_regs (regno, nregs)
1311 int regno;
1312 int nregs;
1313{
1314 int i;
1315
1316 for (i = 0; i < nregs; i++)
1317 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1318}
1319\f
1320/* Write zeros through the storage of OBJECT.
1321 If OBJECT has BLKmode, SIZE is its length in bytes. */
1322
1323void
1324clear_storage (object, size)
1325 rtx object;
1326 int size;
1327{
1328 if (GET_MODE (object) == BLKmode)
1329 {
1330#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1331 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1332 VOIDmode, 3,
1333 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1334 GEN_INT (size), Pmode);
bbf6f052 1335#else
d562e42e 1336 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1337 VOIDmode, 2,
1338 XEXP (object, 0), Pmode,
906c4e36 1339 GEN_INT (size), Pmode);
bbf6f052
RK
1340#endif
1341 }
1342 else
1343 emit_move_insn (object, const0_rtx);
1344}
1345
1346/* Generate code to copy Y into X.
1347 Both Y and X must have the same mode, except that
1348 Y can be a constant with VOIDmode.
1349 This mode cannot be BLKmode; use emit_block_move for that.
1350
1351 Return the last instruction emitted. */
1352
1353rtx
1354emit_move_insn (x, y)
1355 rtx x, y;
1356{
1357 enum machine_mode mode = GET_MODE (x);
1358 int i;
1359
1360 x = protect_from_queue (x, 1);
1361 y = protect_from_queue (y, 0);
1362
1363 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1364 abort ();
1365
1366 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1367 y = force_const_mem (mode, y);
1368
1369 /* If X or Y are memory references, verify that their addresses are valid
1370 for the machine. */
1371 if (GET_CODE (x) == MEM
1372 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1373 && ! push_operand (x, GET_MODE (x)))
1374 || (flag_force_addr
1375 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1376 x = change_address (x, VOIDmode, XEXP (x, 0));
1377
1378 if (GET_CODE (y) == MEM
1379 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1380 || (flag_force_addr
1381 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1382 y = change_address (y, VOIDmode, XEXP (y, 0));
1383
1384 if (mode == BLKmode)
1385 abort ();
1386
1387 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1388 return
1389 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1390
1391 /* This will handle any multi-word mode that lacks a move_insn pattern.
1392 However, you will get better code if you define such patterns,
1393 even if they must turn into multiple assembler instructions. */
a4320483 1394 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1395 {
1396 rtx last_insn = 0;
1397
1398 for (i = 0;
1399 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1400 i++)
1401 {
1402 rtx xpart = operand_subword (x, i, 1, mode);
1403 rtx ypart = operand_subword (y, i, 1, mode);
1404
1405 /* If we can't get a part of Y, put Y into memory if it is a
1406 constant. Otherwise, force it into a register. If we still
1407 can't get a part of Y, abort. */
1408 if (ypart == 0 && CONSTANT_P (y))
1409 {
1410 y = force_const_mem (mode, y);
1411 ypart = operand_subword (y, i, 1, mode);
1412 }
1413 else if (ypart == 0)
1414 ypart = operand_subword_force (y, i, mode);
1415
1416 if (xpart == 0 || ypart == 0)
1417 abort ();
1418
1419 last_insn = emit_move_insn (xpart, ypart);
1420 }
1421 return last_insn;
1422 }
1423 else
1424 abort ();
1425}
1426\f
1427/* Pushing data onto the stack. */
1428
1429/* Push a block of length SIZE (perhaps variable)
1430 and return an rtx to address the beginning of the block.
1431 Note that it is not possible for the value returned to be a QUEUED.
1432 The value may be virtual_outgoing_args_rtx.
1433
1434 EXTRA is the number of bytes of padding to push in addition to SIZE.
1435 BELOW nonzero means this padding comes at low addresses;
1436 otherwise, the padding comes at high addresses. */
1437
1438rtx
1439push_block (size, extra, below)
1440 rtx size;
1441 int extra, below;
1442{
1443 register rtx temp;
1444 if (CONSTANT_P (size))
1445 anti_adjust_stack (plus_constant (size, extra));
1446 else if (GET_CODE (size) == REG && extra == 0)
1447 anti_adjust_stack (size);
1448 else
1449 {
1450 rtx temp = copy_to_mode_reg (Pmode, size);
1451 if (extra != 0)
906c4e36 1452 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1453 temp, 0, OPTAB_LIB_WIDEN);
1454 anti_adjust_stack (temp);
1455 }
1456
1457#ifdef STACK_GROWS_DOWNWARD
1458 temp = virtual_outgoing_args_rtx;
1459 if (extra != 0 && below)
1460 temp = plus_constant (temp, extra);
1461#else
1462 if (GET_CODE (size) == CONST_INT)
1463 temp = plus_constant (virtual_outgoing_args_rtx,
1464 - INTVAL (size) - (below ? 0 : extra));
1465 else if (extra != 0 && !below)
1466 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1467 negate_rtx (Pmode, plus_constant (size, extra)));
1468 else
1469 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1470 negate_rtx (Pmode, size));
1471#endif
1472
1473 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1474}
1475
1476static rtx
1477gen_push_operand ()
1478{
1479 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1480}
1481
1482/* Generate code to push X onto the stack, assuming it has mode MODE and
1483 type TYPE.
1484 MODE is redundant except when X is a CONST_INT (since they don't
1485 carry mode info).
1486 SIZE is an rtx for the size of data to be copied (in bytes),
1487 needed only if X is BLKmode.
1488
1489 ALIGN (in bytes) is maximum alignment we can assume.
1490
1491 If PARTIAL is nonzero, then copy that many of the first words
1492 of X into registers starting with REG, and push the rest of X.
1493 The amount of space pushed is decreased by PARTIAL words,
1494 rounded *down* to a multiple of PARM_BOUNDARY.
1495 REG must be a hard register in this case.
1496
1497 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1498 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1499
1500 On a machine that lacks real push insns, ARGS_ADDR is the address of
1501 the bottom of the argument block for this call. We use indexing off there
1502 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1503 argument block has not been preallocated.
1504
1505 ARGS_SO_FAR is the size of args previously pushed for this call. */
1506
1507void
1508emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1509 args_addr, args_so_far)
1510 register rtx x;
1511 enum machine_mode mode;
1512 tree type;
1513 rtx size;
1514 int align;
1515 int partial;
1516 rtx reg;
1517 int extra;
1518 rtx args_addr;
1519 rtx args_so_far;
1520{
1521 rtx xinner;
1522 enum direction stack_direction
1523#ifdef STACK_GROWS_DOWNWARD
1524 = downward;
1525#else
1526 = upward;
1527#endif
1528
1529 /* Decide where to pad the argument: `downward' for below,
1530 `upward' for above, or `none' for don't pad it.
1531 Default is below for small data on big-endian machines; else above. */
1532 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1533
1534 /* Invert direction if stack is post-update. */
1535 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1536 if (where_pad != none)
1537 where_pad = (where_pad == downward ? upward : downward);
1538
1539 xinner = x = protect_from_queue (x, 0);
1540
1541 if (mode == BLKmode)
1542 {
1543 /* Copy a block into the stack, entirely or partially. */
1544
1545 register rtx temp;
1546 int used = partial * UNITS_PER_WORD;
1547 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1548 int skip;
1549
1550 if (size == 0)
1551 abort ();
1552
1553 used -= offset;
1554
1555 /* USED is now the # of bytes we need not copy to the stack
1556 because registers will take care of them. */
1557
1558 if (partial != 0)
1559 xinner = change_address (xinner, BLKmode,
1560 plus_constant (XEXP (xinner, 0), used));
1561
1562 /* If the partial register-part of the arg counts in its stack size,
1563 skip the part of stack space corresponding to the registers.
1564 Otherwise, start copying to the beginning of the stack space,
1565 by setting SKIP to 0. */
1566#ifndef REG_PARM_STACK_SPACE
1567 skip = 0;
1568#else
1569 skip = used;
1570#endif
1571
1572#ifdef PUSH_ROUNDING
1573 /* Do it with several push insns if that doesn't take lots of insns
1574 and if there is no difficulty with push insns that skip bytes
1575 on the stack for alignment purposes. */
1576 if (args_addr == 0
1577 && GET_CODE (size) == CONST_INT
1578 && skip == 0
1579 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1580 < MOVE_RATIO)
bbf6f052
RK
1581 /* Here we avoid the case of a structure whose weak alignment
1582 forces many pushes of a small amount of data,
1583 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1584 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1585 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1586 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1587 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1588 {
1589 /* Push padding now if padding above and stack grows down,
1590 or if padding below and stack grows up.
1591 But if space already allocated, this has already been done. */
1592 if (extra && args_addr == 0
1593 && where_pad != none && where_pad != stack_direction)
906c4e36 1594 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1595
1596 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1597 INTVAL (size) - used, align);
1598 }
1599 else
1600#endif /* PUSH_ROUNDING */
1601 {
1602 /* Otherwise make space on the stack and copy the data
1603 to the address of that space. */
1604
1605 /* Deduct words put into registers from the size we must copy. */
1606 if (partial != 0)
1607 {
1608 if (GET_CODE (size) == CONST_INT)
906c4e36 1609 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1610 else
1611 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1612 GEN_INT (used), NULL_RTX, 0,
1613 OPTAB_LIB_WIDEN);
bbf6f052
RK
1614 }
1615
1616 /* Get the address of the stack space.
1617 In this case, we do not deal with EXTRA separately.
1618 A single stack adjust will do. */
1619 if (! args_addr)
1620 {
1621 temp = push_block (size, extra, where_pad == downward);
1622 extra = 0;
1623 }
1624 else if (GET_CODE (args_so_far) == CONST_INT)
1625 temp = memory_address (BLKmode,
1626 plus_constant (args_addr,
1627 skip + INTVAL (args_so_far)));
1628 else
1629 temp = memory_address (BLKmode,
1630 plus_constant (gen_rtx (PLUS, Pmode,
1631 args_addr, args_so_far),
1632 skip));
1633
1634 /* TEMP is the address of the block. Copy the data there. */
1635 if (GET_CODE (size) == CONST_INT
1636 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1637 < MOVE_RATIO))
1638 {
1639 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1640 INTVAL (size), align);
1641 goto ret;
1642 }
1643 /* Try the most limited insn first, because there's no point
1644 including more than one in the machine description unless
1645 the more limited one has some advantage. */
1646#ifdef HAVE_movstrqi
1647 if (HAVE_movstrqi
1648 && GET_CODE (size) == CONST_INT
1649 && ((unsigned) INTVAL (size)
1650 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1651 {
1652 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1653 xinner, size, GEN_INT (align)));
bbf6f052
RK
1654 goto ret;
1655 }
1656#endif
1657#ifdef HAVE_movstrhi
1658 if (HAVE_movstrhi
1659 && GET_CODE (size) == CONST_INT
1660 && ((unsigned) INTVAL (size)
1661 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1662 {
1663 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1664 xinner, size, GEN_INT (align)));
bbf6f052
RK
1665 goto ret;
1666 }
1667#endif
1668#ifdef HAVE_movstrsi
1669 if (HAVE_movstrsi)
1670 {
1671 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1672 xinner, size, GEN_INT (align)));
bbf6f052
RK
1673 goto ret;
1674 }
1675#endif
1676#ifdef HAVE_movstrdi
1677 if (HAVE_movstrdi)
1678 {
1679 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1680 xinner, size, GEN_INT (align)));
bbf6f052
RK
1681 goto ret;
1682 }
1683#endif
1684
1685#ifndef ACCUMULATE_OUTGOING_ARGS
1686 /* If the source is referenced relative to the stack pointer,
1687 copy it to another register to stabilize it. We do not need
1688 to do this if we know that we won't be changing sp. */
1689
1690 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1691 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1692 temp = copy_to_reg (temp);
1693#endif
1694
1695 /* Make inhibit_defer_pop nonzero around the library call
1696 to force it to pop the bcopy-arguments right away. */
1697 NO_DEFER_POP;
1698#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1699 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1700 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1701 size, Pmode);
1702#else
d562e42e 1703 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1704 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1705 size, Pmode);
1706#endif
1707 OK_DEFER_POP;
1708 }
1709 }
1710 else if (partial > 0)
1711 {
1712 /* Scalar partly in registers. */
1713
1714 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1715 int i;
1716 int not_stack;
1717 /* # words of start of argument
1718 that we must make space for but need not store. */
1719 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1720 int args_offset = INTVAL (args_so_far);
1721 int skip;
1722
1723 /* Push padding now if padding above and stack grows down,
1724 or if padding below and stack grows up.
1725 But if space already allocated, this has already been done. */
1726 if (extra && args_addr == 0
1727 && where_pad != none && where_pad != stack_direction)
906c4e36 1728 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1729
1730 /* If we make space by pushing it, we might as well push
1731 the real data. Otherwise, we can leave OFFSET nonzero
1732 and leave the space uninitialized. */
1733 if (args_addr == 0)
1734 offset = 0;
1735
1736 /* Now NOT_STACK gets the number of words that we don't need to
1737 allocate on the stack. */
1738 not_stack = partial - offset;
1739
1740 /* If the partial register-part of the arg counts in its stack size,
1741 skip the part of stack space corresponding to the registers.
1742 Otherwise, start copying to the beginning of the stack space,
1743 by setting SKIP to 0. */
1744#ifndef REG_PARM_STACK_SPACE
1745 skip = 0;
1746#else
1747 skip = not_stack;
1748#endif
1749
1750 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1751 x = validize_mem (force_const_mem (mode, x));
1752
1753 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1754 SUBREGs of such registers are not allowed. */
1755 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1756 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1757 x = copy_to_reg (x);
1758
1759 /* Loop over all the words allocated on the stack for this arg. */
1760 /* We can do it by words, because any scalar bigger than a word
1761 has a size a multiple of a word. */
1762#ifndef PUSH_ARGS_REVERSED
1763 for (i = not_stack; i < size; i++)
1764#else
1765 for (i = size - 1; i >= not_stack; i--)
1766#endif
1767 if (i >= not_stack + offset)
1768 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
1769 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1770 0, args_addr,
1771 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
1772 * UNITS_PER_WORD)));
1773 }
1774 else
1775 {
1776 rtx addr;
1777
1778 /* Push padding now if padding above and stack grows down,
1779 or if padding below and stack grows up.
1780 But if space already allocated, this has already been done. */
1781 if (extra && args_addr == 0
1782 && where_pad != none && where_pad != stack_direction)
906c4e36 1783 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1784
1785#ifdef PUSH_ROUNDING
1786 if (args_addr == 0)
1787 addr = gen_push_operand ();
1788 else
1789#endif
1790 if (GET_CODE (args_so_far) == CONST_INT)
1791 addr
1792 = memory_address (mode,
1793 plus_constant (args_addr, INTVAL (args_so_far)));
1794 else
1795 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1796 args_so_far));
1797
1798 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1799 }
1800
1801 ret:
1802 /* If part should go in registers, copy that part
1803 into the appropriate registers. Do this now, at the end,
1804 since mem-to-mem copies above may do function calls. */
1805 if (partial > 0)
1806 move_block_to_reg (REGNO (reg), x, partial, mode);
1807
1808 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 1809 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1810}
1811\f
1812/* Output a library call to function FUN (a SYMBOL_REF rtx)
1813 (emitting the queue unless NO_QUEUE is nonzero),
1814 for a value of mode OUTMODE,
1815 with NARGS different arguments, passed as alternating rtx values
1816 and machine_modes to convert them to.
1817 The rtx values should have been passed through protect_from_queue already.
1818
1819 NO_QUEUE will be true if and only if the library call is a `const' call
1820 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
d562e42e
JW
1821 to the variable is_const in expand_call.
1822
1823 NO_QUEUE must be true for const calls, because if it isn't, then
1824 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1825 and will be lost if the libcall sequence is optimized away.
1826
1827 NO_QUEUE must be false for non-const calls, because if it isn't, the
1828 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1829 optimized. For instance, the instruction scheduler may incorrectly
1830 move memory references across the non-const call. */
bbf6f052
RK
1831
1832void
1833emit_library_call (va_alist)
1834 va_dcl
1835{
1836 va_list p;
1837 struct args_size args_size;
1838 register int argnum;
1839 enum machine_mode outmode;
1840 int nargs;
1841 rtx fun;
1842 rtx orgfun;
1843 int inc;
1844 int count;
1845 rtx argblock = 0;
1846 CUMULATIVE_ARGS args_so_far;
1847 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1848 struct args_size offset; struct args_size size; };
1849 struct arg *argvec;
1850 int old_inhibit_defer_pop = inhibit_defer_pop;
1851 int no_queue = 0;
1852 rtx use_insns;
1853
1854 va_start (p);
1855 orgfun = fun = va_arg (p, rtx);
1856 no_queue = va_arg (p, int);
1857 outmode = va_arg (p, enum machine_mode);
1858 nargs = va_arg (p, int);
1859
1860 /* Copy all the libcall-arguments out of the varargs data
1861 and into a vector ARGVEC.
1862
1863 Compute how to pass each argument. We only support a very small subset
1864 of the full argument passing conventions to limit complexity here since
1865 library functions shouldn't have many args. */
1866
1867 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1868
1869 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1870
1871 args_size.constant = 0;
1872 args_size.var = 0;
1873
1874 for (count = 0; count < nargs; count++)
1875 {
1876 rtx val = va_arg (p, rtx);
1877 enum machine_mode mode = va_arg (p, enum machine_mode);
1878
1879 /* We cannot convert the arg value to the mode the library wants here;
1880 must do it earlier where we know the signedness of the arg. */
1881 if (mode == BLKmode
1882 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1883 abort ();
1884
1885 /* On some machines, there's no way to pass a float to a library fcn.
1886 Pass it as a double instead. */
1887#ifdef LIBGCC_NEEDS_DOUBLE
1888 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1889 val = convert_to_mode (DFmode, val), mode = DFmode;
1890#endif
1891
5d901c31
RS
1892 /* There's no need to call protect_from_queue, because
1893 either emit_move_insn or emit_push_insn will do that. */
1894
bbf6f052
RK
1895 /* Make sure it is a reasonable operand for a move or push insn. */
1896 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1897 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
906c4e36 1898 val = force_operand (val, NULL_RTX);
bbf6f052
RK
1899
1900 argvec[count].value = val;
1901 argvec[count].mode = mode;
1902
1903#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
906c4e36 1904 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
bbf6f052
RK
1905 abort ();
1906#endif
1907
906c4e36 1908 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
1909 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1910 abort ();
1911#ifdef FUNCTION_ARG_PARTIAL_NREGS
1912 argvec[count].partial
906c4e36 1913 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
1914#else
1915 argvec[count].partial = 0;
1916#endif
1917
906c4e36 1918 locate_and_pad_parm (mode, NULL_TREE,
bbf6f052 1919 argvec[count].reg && argvec[count].partial == 0,
906c4e36 1920 NULL_TREE, &args_size, &argvec[count].offset,
bbf6f052
RK
1921 &argvec[count].size);
1922
1923 if (argvec[count].size.var)
1924 abort ();
1925
1926#ifndef REG_PARM_STACK_SPACE
1927 if (argvec[count].partial)
1928 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1929#endif
1930
1931 if (argvec[count].reg == 0 || argvec[count].partial != 0
1932#ifdef REG_PARM_STACK_SPACE
1933 || 1
1934#endif
1935 )
1936 args_size.constant += argvec[count].size.constant;
1937
1938#ifdef ACCUMULATE_OUTGOING_ARGS
1939 /* If this arg is actually passed on the stack, it might be
1940 clobbering something we already put there (this library call might
1941 be inside the evaluation of an argument to a function whose call
1942 requires the stack). This will only occur when the library call
1943 has sufficient args to run out of argument registers. Abort in
1944 this case; if this ever occurs, code must be added to save and
1945 restore the arg slot. */
1946
1947 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1948 abort ();
1949#endif
1950
1951 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1952 }
1953 va_end (p);
1954
1955 /* If this machine requires an external definition for library
1956 functions, write one out. */
1957 assemble_external_libcall (fun);
1958
1959#ifdef STACK_BOUNDARY
1960 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1961 / STACK_BYTES) * STACK_BYTES);
1962#endif
1963
1964#ifdef REG_PARM_STACK_SPACE
1965 args_size.constant = MAX (args_size.constant,
1966 REG_PARM_STACK_SPACE ((tree) 0));
1967#endif
1968
1969#ifdef ACCUMULATE_OUTGOING_ARGS
1970 if (args_size.constant > current_function_outgoing_args_size)
1971 current_function_outgoing_args_size = args_size.constant;
1972 args_size.constant = 0;
1973#endif
1974
1975#ifndef PUSH_ROUNDING
906c4e36 1976 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
bbf6f052
RK
1977#endif
1978
1979#ifdef PUSH_ARGS_REVERSED
1980 inc = -1;
1981 argnum = nargs - 1;
1982#else
1983 inc = 1;
1984 argnum = 0;
1985#endif
1986
1987 /* Push the args that need to be pushed. */
1988
1989 for (count = 0; count < nargs; count++, argnum += inc)
1990 {
1991 register enum machine_mode mode = argvec[argnum].mode;
1992 register rtx val = argvec[argnum].value;
1993 rtx reg = argvec[argnum].reg;
1994 int partial = argvec[argnum].partial;
1995
1996 if (! (reg != 0 && partial == 0))
906c4e36
RK
1997 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
1998 argblock, GEN_INT (argvec[count].offset.constant));
bbf6f052
RK
1999 NO_DEFER_POP;
2000 }
2001
2002#ifdef PUSH_ARGS_REVERSED
2003 argnum = nargs - 1;
2004#else
2005 argnum = 0;
2006#endif
2007
2008 /* Now load any reg parms into their regs. */
2009
2010 for (count = 0; count < nargs; count++, argnum += inc)
2011 {
2012 register enum machine_mode mode = argvec[argnum].mode;
2013 register rtx val = argvec[argnum].value;
2014 rtx reg = argvec[argnum].reg;
2015 int partial = argvec[argnum].partial;
2016
2017 if (reg != 0 && partial == 0)
2018 emit_move_insn (reg, val);
2019 NO_DEFER_POP;
2020 }
2021
2022 /* For version 1.37, try deleting this entirely. */
2023 if (! no_queue)
2024 emit_queue ();
2025
2026 /* Any regs containing parms remain in use through the call. */
2027 start_sequence ();
2028 for (count = 0; count < nargs; count++)
2029 if (argvec[count].reg != 0)
2030 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2031
2032 use_insns = get_insns ();
2033 end_sequence ();
2034
906c4e36 2035 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
bbf6f052
RK
2036
2037 /* Don't allow popping to be deferred, since then
2038 cse'ing of library calls could delete a call and leave the pop. */
2039 NO_DEFER_POP;
2040
2041 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2042 will set inhibit_defer_pop to that value. */
2043
2044 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2045 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
906c4e36 2046 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
bbf6f052
RK
2047 old_inhibit_defer_pop + 1, use_insns, no_queue);
2048
2049 /* Now restore inhibit_defer_pop to its actual original value. */
2050 OK_DEFER_POP;
2051}
2052\f
2053/* Expand an assignment that stores the value of FROM into TO.
2054 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2055 (This may contain a QUEUED rtx.)
2056 Otherwise, the returned value is not meaningful.
2057
2058 SUGGEST_REG is no longer actually used.
2059 It used to mean, copy the value through a register
2060 and return that register, if that is possible.
2061 But now we do this if WANT_VALUE.
2062
2063 If the value stored is a constant, we return the constant. */
2064
2065rtx
2066expand_assignment (to, from, want_value, suggest_reg)
2067 tree to, from;
2068 int want_value;
2069 int suggest_reg;
2070{
2071 register rtx to_rtx = 0;
2072 rtx result;
2073
2074 /* Don't crash if the lhs of the assignment was erroneous. */
2075
2076 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2077 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2078
2079 /* Assignment of a structure component needs special treatment
2080 if the structure component's rtx is not simply a MEM.
2081 Assignment of an array element at a constant index
2082 has the same problem. */
2083
2084 if (TREE_CODE (to) == COMPONENT_REF
2085 || TREE_CODE (to) == BIT_FIELD_REF
2086 || (TREE_CODE (to) == ARRAY_REF
2087 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2088 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2089 {
2090 enum machine_mode mode1;
2091 int bitsize;
2092 int bitpos;
7bb0943f 2093 tree offset;
bbf6f052
RK
2094 int unsignedp;
2095 int volatilep = 0;
7bb0943f 2096 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2097 &mode1, &unsignedp, &volatilep);
2098
2099 /* If we are going to use store_bit_field and extract_bit_field,
2100 make sure to_rtx will be safe for multiple use. */
2101
2102 if (mode1 == VOIDmode && want_value)
2103 tem = stabilize_reference (tem);
2104
906c4e36 2105 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2106 if (offset != 0)
2107 {
906c4e36 2108 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2109
2110 if (GET_CODE (to_rtx) != MEM)
2111 abort ();
2112 to_rtx = change_address (to_rtx, VOIDmode,
2113 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2114 force_reg (Pmode, offset_rtx)));
2115 }
bbf6f052
RK
2116 if (volatilep)
2117 {
2118 if (GET_CODE (to_rtx) == MEM)
2119 MEM_VOLATILE_P (to_rtx) = 1;
2120#if 0 /* This was turned off because, when a field is volatile
2121 in an object which is not volatile, the object may be in a register,
2122 and then we would abort over here. */
2123 else
2124 abort ();
2125#endif
2126 }
2127
2128 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2129 (want_value
2130 /* Spurious cast makes HPUX compiler happy. */
2131 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2132 : VOIDmode),
2133 unsignedp,
2134 /* Required alignment of containing datum. */
2135 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2136 int_size_in_bytes (TREE_TYPE (tem)));
2137 preserve_temp_slots (result);
2138 free_temp_slots ();
2139
2140 return result;
2141 }
2142
2143 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2144 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2145
2146 if (to_rtx == 0)
906c4e36 2147 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2148
2149 /* In case we are returning the contents of an object which overlaps
2150 the place the value is being stored, use a safe function when copying
2151 a value through a pointer into a structure value return block. */
2152 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2153 && current_function_returns_struct
2154 && !current_function_returns_pcc_struct)
2155 {
906c4e36 2156 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2157 rtx size = expr_size (from);
2158
2159#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2160 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2161 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2162 XEXP (from_rtx, 0), Pmode,
2163 size, Pmode);
2164#else
d562e42e 2165 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2166 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2167 XEXP (to_rtx, 0), Pmode,
2168 size, Pmode);
2169#endif
2170
2171 preserve_temp_slots (to_rtx);
2172 free_temp_slots ();
2173 return to_rtx;
2174 }
2175
2176 /* Compute FROM and store the value in the rtx we got. */
2177
2178 result = store_expr (from, to_rtx, want_value);
2179 preserve_temp_slots (result);
2180 free_temp_slots ();
2181 return result;
2182}
2183
2184/* Generate code for computing expression EXP,
2185 and storing the value into TARGET.
2186 Returns TARGET or an equivalent value.
2187 TARGET may contain a QUEUED rtx.
2188
2189 If SUGGEST_REG is nonzero, copy the value through a register
2190 and return that register, if that is possible.
2191
2192 If the value stored is a constant, we return the constant. */
2193
2194rtx
2195store_expr (exp, target, suggest_reg)
2196 register tree exp;
2197 register rtx target;
2198 int suggest_reg;
2199{
2200 register rtx temp;
2201 int dont_return_target = 0;
2202
2203 if (TREE_CODE (exp) == COMPOUND_EXPR)
2204 {
2205 /* Perform first part of compound expression, then assign from second
2206 part. */
2207 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2208 emit_queue ();
2209 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2210 }
2211 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2212 {
2213 /* For conditional expression, get safe form of the target. Then
2214 test the condition, doing the appropriate assignment on either
2215 side. This avoids the creation of unnecessary temporaries.
2216 For non-BLKmode, it is more efficient not to do this. */
2217
2218 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2219
2220 emit_queue ();
2221 target = protect_from_queue (target, 1);
2222
2223 NO_DEFER_POP;
2224 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2225 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2226 emit_queue ();
2227 emit_jump_insn (gen_jump (lab2));
2228 emit_barrier ();
2229 emit_label (lab1);
2230 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2231 emit_queue ();
2232 emit_label (lab2);
2233 OK_DEFER_POP;
2234 return target;
2235 }
2236 else if (suggest_reg && GET_CODE (target) == MEM
2237 && GET_MODE (target) != BLKmode)
2238 /* If target is in memory and caller wants value in a register instead,
2239 arrange that. Pass TARGET as target for expand_expr so that,
2240 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2241 We know expand_expr will not use the target in that case. */
2242 {
906c4e36 2243 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2244 GET_MODE (target), 0);
2245 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2246 temp = copy_to_reg (temp);
2247 dont_return_target = 1;
2248 }
2249 else if (queued_subexp_p (target))
2250 /* If target contains a postincrement, it is not safe
2251 to use as the returned value. It would access the wrong
2252 place by the time the queued increment gets output.
2253 So copy the value through a temporary and use that temp
2254 as the result. */
2255 {
2256 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2257 {
2258 /* Expand EXP into a new pseudo. */
2259 temp = gen_reg_rtx (GET_MODE (target));
2260 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2261 }
2262 else
906c4e36 2263 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2264 dont_return_target = 1;
2265 }
2266 else
2267 {
2268 temp = expand_expr (exp, target, GET_MODE (target), 0);
2269 /* DO return TARGET if it's a specified hardware register.
2270 expand_return relies on this. */
2271 if (!(target && GET_CODE (target) == REG
2272 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2273 && CONSTANT_P (temp))
2274 dont_return_target = 1;
2275 }
2276
2277 /* If value was not generated in the target, store it there.
2278 Convert the value to TARGET's type first if nec. */
2279
2280 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2281 {
2282 target = protect_from_queue (target, 1);
2283 if (GET_MODE (temp) != GET_MODE (target)
2284 && GET_MODE (temp) != VOIDmode)
2285 {
2286 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2287 if (dont_return_target)
2288 {
2289 /* In this case, we will return TEMP,
2290 so make sure it has the proper mode.
2291 But don't forget to store the value into TARGET. */
2292 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2293 emit_move_insn (target, temp);
2294 }
2295 else
2296 convert_move (target, temp, unsignedp);
2297 }
2298
2299 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2300 {
2301 /* Handle copying a string constant into an array.
2302 The string constant may be shorter than the array.
2303 So copy just the string's actual length, and clear the rest. */
2304 rtx size;
2305
e87b4f3f
RS
2306 /* Get the size of the data type of the string,
2307 which is actually the size of the target. */
2308 size = expr_size (exp);
2309 if (GET_CODE (size) == CONST_INT
2310 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2311 emit_block_move (target, temp, size,
2312 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2313 else
bbf6f052 2314 {
e87b4f3f
RS
2315 /* Compute the size of the data to copy from the string. */
2316 tree copy_size
2317 = fold (build (MIN_EXPR, sizetype,
2318 size_binop (CEIL_DIV_EXPR,
2319 TYPE_SIZE (TREE_TYPE (exp)),
2320 size_int (BITS_PER_UNIT)),
2321 convert (sizetype,
2322 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
906c4e36
RK
2323 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2324 VOIDmode, 0);
e87b4f3f
RS
2325 rtx label = 0;
2326
2327 /* Copy that much. */
2328 emit_block_move (target, temp, copy_size_rtx,
2329 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2330
2331 /* Figure out how much is left in TARGET
2332 that we have to clear. */
2333 if (GET_CODE (copy_size_rtx) == CONST_INT)
2334 {
2335 temp = plus_constant (XEXP (target, 0),
2336 TREE_STRING_LENGTH (exp));
2337 size = plus_constant (size,
2338 - TREE_STRING_LENGTH (exp));
2339 }
2340 else
2341 {
2342 enum machine_mode size_mode = Pmode;
2343
2344 temp = force_reg (Pmode, XEXP (target, 0));
2345 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2346 copy_size_rtx, NULL_RTX, 0,
2347 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2348
2349 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2350 copy_size_rtx, NULL_RTX, 0,
2351 OPTAB_LIB_WIDEN);
e87b4f3f 2352
906c4e36 2353 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2354 GET_MODE (size), 0, 0);
2355 label = gen_label_rtx ();
2356 emit_jump_insn (gen_blt (label));
2357 }
2358
2359 if (size != const0_rtx)
2360 {
bbf6f052 2361#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2362 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2363 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2364#else
d562e42e 2365 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2366 temp, Pmode, size, Pmode);
bbf6f052 2367#endif
e87b4f3f
RS
2368 }
2369 if (label)
2370 emit_label (label);
bbf6f052
RK
2371 }
2372 }
2373 else if (GET_MODE (temp) == BLKmode)
2374 emit_block_move (target, temp, expr_size (exp),
2375 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2376 else
2377 emit_move_insn (target, temp);
2378 }
2379 if (dont_return_target)
2380 return temp;
2381 return target;
2382}
2383\f
2384/* Store the value of constructor EXP into the rtx TARGET.
2385 TARGET is either a REG or a MEM. */
2386
2387static void
2388store_constructor (exp, target)
2389 tree exp;
2390 rtx target;
2391{
4af3895e
JVA
2392 tree type = TREE_TYPE (exp);
2393
bbf6f052
RK
2394 /* We know our target cannot conflict, since safe_from_p has been called. */
2395#if 0
2396 /* Don't try copying piece by piece into a hard register
2397 since that is vulnerable to being clobbered by EXP.
2398 Instead, construct in a pseudo register and then copy it all. */
2399 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2400 {
2401 rtx temp = gen_reg_rtx (GET_MODE (target));
2402 store_constructor (exp, temp);
2403 emit_move_insn (target, temp);
2404 return;
2405 }
2406#endif
2407
4af3895e 2408 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2409 {
2410 register tree elt;
2411
4af3895e
JVA
2412 /* Inform later passes that the whole union value is dead. */
2413 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2414 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2415
2416 /* If we are building a static constructor into a register,
2417 set the initial value as zero so we can fold the value into
2418 a constant. */
2419 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2420 emit_move_insn (target, const0_rtx);
2421
bbf6f052
RK
2422 /* If the constructor has fewer fields than the structure,
2423 clear the whole structure first. */
2424 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2425 != list_length (TYPE_FIELDS (type)))
2426 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2427 else
2428 /* Inform later passes that the old value is dead. */
2429 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2430
2431 /* Store each element of the constructor into
2432 the corresponding field of TARGET. */
2433
2434 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2435 {
2436 register tree field = TREE_PURPOSE (elt);
2437 register enum machine_mode mode;
2438 int bitsize;
2439 int bitpos;
2440 int unsignedp;
2441
f32fd778
RS
2442 /* Just ignore missing fields.
2443 We cleared the whole structure, above,
2444 if any fields are missing. */
2445 if (field == 0)
2446 continue;
2447
bbf6f052
RK
2448 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2449 unsignedp = TREE_UNSIGNED (field);
2450 mode = DECL_MODE (field);
2451 if (DECL_BIT_FIELD (field))
2452 mode = VOIDmode;
2453
2454 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2455 /* ??? This case remains to be written. */
2456 abort ();
2457
2458 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2459
2460 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2461 /* The alignment of TARGET is
2462 at least what its type requires. */
2463 VOIDmode, 0,
4af3895e
JVA
2464 TYPE_ALIGN (type) / BITS_PER_UNIT,
2465 int_size_in_bytes (type));
bbf6f052
RK
2466 }
2467 }
4af3895e 2468 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2469 {
2470 register tree elt;
2471 register int i;
4af3895e 2472 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2473 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2474 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2475 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2476
2477 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2478 clear the whole structure first. Similarly if this this is
2479 static constructor of a non-BLKmode object. */
bbf6f052 2480
4af3895e
JVA
2481 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2482 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2483 clear_storage (target, maxelt - minelt + 1);
2484 else
2485 /* Inform later passes that the old value is dead. */
2486 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2487
2488 /* Store each element of the constructor into
2489 the corresponding element of TARGET, determined
2490 by counting the elements. */
2491 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2492 elt;
2493 elt = TREE_CHAIN (elt), i++)
2494 {
2495 register enum machine_mode mode;
2496 int bitsize;
2497 int bitpos;
2498 int unsignedp;
2499
2500 mode = TYPE_MODE (elttype);
2501 bitsize = GET_MODE_BITSIZE (mode);
2502 unsignedp = TREE_UNSIGNED (elttype);
2503
2504 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2505
2506 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2507 /* The alignment of TARGET is
2508 at least what its type requires. */
2509 VOIDmode, 0,
4af3895e
JVA
2510 TYPE_ALIGN (type) / BITS_PER_UNIT,
2511 int_size_in_bytes (type));
bbf6f052
RK
2512 }
2513 }
2514
2515 else
2516 abort ();
2517}
2518
2519/* Store the value of EXP (an expression tree)
2520 into a subfield of TARGET which has mode MODE and occupies
2521 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2522 If MODE is VOIDmode, it means that we are storing into a bit-field.
2523
2524 If VALUE_MODE is VOIDmode, return nothing in particular.
2525 UNSIGNEDP is not used in this case.
2526
2527 Otherwise, return an rtx for the value stored. This rtx
2528 has mode VALUE_MODE if that is convenient to do.
2529 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2530
2531 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2532 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2533
2534static rtx
2535store_field (target, bitsize, bitpos, mode, exp, value_mode,
2536 unsignedp, align, total_size)
2537 rtx target;
2538 int bitsize, bitpos;
2539 enum machine_mode mode;
2540 tree exp;
2541 enum machine_mode value_mode;
2542 int unsignedp;
2543 int align;
2544 int total_size;
2545{
906c4e36 2546 HOST_WIDE_INT width_mask = 0;
bbf6f052 2547
906c4e36
RK
2548 if (bitsize < HOST_BITS_PER_WIDE_INT)
2549 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2550
2551 /* If we are storing into an unaligned field of an aligned union that is
2552 in a register, we may have the mode of TARGET being an integer mode but
2553 MODE == BLKmode. In that case, get an aligned object whose size and
2554 alignment are the same as TARGET and store TARGET into it (we can avoid
2555 the store if the field being stored is the entire width of TARGET). Then
2556 call ourselves recursively to store the field into a BLKmode version of
2557 that object. Finally, load from the object into TARGET. This is not
2558 very efficient in general, but should only be slightly more expensive
2559 than the otherwise-required unaligned accesses. Perhaps this can be
2560 cleaned up later. */
2561
2562 if (mode == BLKmode
2563 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2564 {
2565 rtx object = assign_stack_temp (GET_MODE (target),
2566 GET_MODE_SIZE (GET_MODE (target)), 0);
2567 rtx blk_object = copy_rtx (object);
2568
2569 PUT_MODE (blk_object, BLKmode);
2570
2571 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2572 emit_move_insn (object, target);
2573
2574 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2575 align, total_size);
2576
2577 emit_move_insn (target, object);
2578
2579 return target;
2580 }
2581
2582 /* If the structure is in a register or if the component
2583 is a bit field, we cannot use addressing to access it.
2584 Use bit-field techniques or SUBREG to store in it. */
2585
4fa52007
RK
2586 if (mode == VOIDmode
2587 || (mode != BLKmode && ! direct_store[(int) mode])
2588 || GET_CODE (target) == REG
bbf6f052
RK
2589 || GET_CODE (target) == SUBREG)
2590 {
906c4e36 2591 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2592 /* Store the value in the bitfield. */
2593 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2594 if (value_mode != VOIDmode)
2595 {
2596 /* The caller wants an rtx for the value. */
2597 /* If possible, avoid refetching from the bitfield itself. */
2598 if (width_mask != 0
2599 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
906c4e36 2600 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
bbf6f052 2601 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2602 NULL_RTX, value_mode, 0, align,
2603 total_size);
bbf6f052
RK
2604 }
2605 return const0_rtx;
2606 }
2607 else
2608 {
2609 rtx addr = XEXP (target, 0);
2610 rtx to_rtx;
2611
2612 /* If a value is wanted, it must be the lhs;
2613 so make the address stable for multiple use. */
2614
2615 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2616 && ! CONSTANT_ADDRESS_P (addr)
2617 /* A frame-pointer reference is already stable. */
2618 && ! (GET_CODE (addr) == PLUS
2619 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2620 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2621 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2622 addr = copy_to_reg (addr);
2623
2624 /* Now build a reference to just the desired component. */
2625
2626 to_rtx = change_address (target, mode,
2627 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2628 MEM_IN_STRUCT_P (to_rtx) = 1;
2629
2630 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2631 }
2632}
2633\f
2634/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2635 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2636 ARRAY_REFs at constant positions and find the ultimate containing object,
2637 which we return.
2638
2639 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2640 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2641 If the position of the field is variable, we store a tree
2642 giving the variable offset (in units) in *POFFSET.
2643 This offset is in addition to the bit position.
2644 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2645
2646 If any of the extraction expressions is volatile,
2647 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2648
2649 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2650 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2651 is redundant.
2652
2653 If the field describes a variable-sized object, *PMODE is set to
2654 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2655 this case, but the address of the object can be found. */
bbf6f052
RK
2656
2657tree
7bb0943f 2658get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
bbf6f052
RK
2659 tree exp;
2660 int *pbitsize;
2661 int *pbitpos;
7bb0943f 2662 tree *poffset;
bbf6f052
RK
2663 enum machine_mode *pmode;
2664 int *punsignedp;
2665 int *pvolatilep;
2666{
2667 tree size_tree = 0;
2668 enum machine_mode mode = VOIDmode;
7bb0943f 2669 tree offset = 0;
bbf6f052
RK
2670
2671 if (TREE_CODE (exp) == COMPONENT_REF)
2672 {
2673 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2674 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2675 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2676 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2677 }
2678 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2679 {
2680 size_tree = TREE_OPERAND (exp, 1);
2681 *punsignedp = TREE_UNSIGNED (exp);
2682 }
2683 else
2684 {
2685 mode = TYPE_MODE (TREE_TYPE (exp));
2686 *pbitsize = GET_MODE_BITSIZE (mode);
2687 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2688 }
2689
2690 if (size_tree)
2691 {
2692 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2693 mode = BLKmode, *pbitsize = -1;
2694 else
2695 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2696 }
2697
2698 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2699 and find the ultimate containing object. */
2700
2701 *pbitpos = 0;
2702
2703 while (1)
2704 {
7bb0943f 2705 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2706 {
7bb0943f
RS
2707 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2708 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2709 : TREE_OPERAND (exp, 2));
bbf6f052 2710
7bb0943f
RS
2711 if (TREE_CODE (pos) == PLUS_EXPR)
2712 {
2713 tree constant, var;
2714 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2715 {
2716 constant = TREE_OPERAND (pos, 0);
2717 var = TREE_OPERAND (pos, 1);
2718 }
2719 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2720 {
2721 constant = TREE_OPERAND (pos, 1);
2722 var = TREE_OPERAND (pos, 0);
2723 }
2724 else
2725 abort ();
2726 *pbitpos += TREE_INT_CST_LOW (constant);
2727 if (offset)
2728 offset = size_binop (PLUS_EXPR, offset,
2729 size_binop (FLOOR_DIV_EXPR, var,
2730 size_int (BITS_PER_UNIT)));
2731 else
2732 offset = size_binop (FLOOR_DIV_EXPR, var,
2733 size_int (BITS_PER_UNIT));
2734 }
2735 else if (TREE_CODE (pos) == INTEGER_CST)
2736 *pbitpos += TREE_INT_CST_LOW (pos);
2737 else
2738 {
2739 /* Assume here that the offset is a multiple of a unit.
2740 If not, there should be an explicitly added constant. */
2741 if (offset)
2742 offset = size_binop (PLUS_EXPR, offset,
2743 size_binop (FLOOR_DIV_EXPR, pos,
2744 size_int (BITS_PER_UNIT)));
2745 else
2746 offset = size_binop (FLOOR_DIV_EXPR, pos,
2747 size_int (BITS_PER_UNIT));
2748 }
bbf6f052 2749 }
bbf6f052 2750
bbf6f052
RK
2751 else if (TREE_CODE (exp) == ARRAY_REF
2752 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2753 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2754 {
2755 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2756 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
2757 }
2758 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2759 && ! ((TREE_CODE (exp) == NOP_EXPR
2760 || TREE_CODE (exp) == CONVERT_EXPR)
2761 && (TYPE_MODE (TREE_TYPE (exp))
2762 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2763 break;
7bb0943f
RS
2764
2765 /* If any reference in the chain is volatile, the effect is volatile. */
2766 if (TREE_THIS_VOLATILE (exp))
2767 *pvolatilep = 1;
bbf6f052
RK
2768 exp = TREE_OPERAND (exp, 0);
2769 }
2770
2771 /* If this was a bit-field, see if there is a mode that allows direct
2772 access in case EXP is in memory. */
2773 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2774 {
2775 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2776 if (mode == BLKmode)
2777 mode = VOIDmode;
2778 }
2779
2780 *pmode = mode;
7bb0943f
RS
2781 *poffset = offset;
2782#if 0
2783 /* We aren't finished fixing the callers to really handle nonzero offset. */
2784 if (offset != 0)
2785 abort ();
2786#endif
bbf6f052
RK
2787
2788 return exp;
2789}
2790\f
2791/* Given an rtx VALUE that may contain additions and multiplications,
2792 return an equivalent value that just refers to a register or memory.
2793 This is done by generating instructions to perform the arithmetic
2794 and returning a pseudo-register containing the value. */
2795
2796rtx
2797force_operand (value, target)
2798 rtx value, target;
2799{
2800 register optab binoptab = 0;
2801 /* Use a temporary to force order of execution of calls to
2802 `force_operand'. */
2803 rtx tmp;
2804 register rtx op2;
2805 /* Use subtarget as the target for operand 0 of a binary operation. */
2806 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2807
2808 if (GET_CODE (value) == PLUS)
2809 binoptab = add_optab;
2810 else if (GET_CODE (value) == MINUS)
2811 binoptab = sub_optab;
2812 else if (GET_CODE (value) == MULT)
2813 {
2814 op2 = XEXP (value, 1);
2815 if (!CONSTANT_P (op2)
2816 && !(GET_CODE (op2) == REG && op2 != subtarget))
2817 subtarget = 0;
2818 tmp = force_operand (XEXP (value, 0), subtarget);
2819 return expand_mult (GET_MODE (value), tmp,
906c4e36 2820 force_operand (op2, NULL_RTX),
bbf6f052
RK
2821 target, 0);
2822 }
2823
2824 if (binoptab)
2825 {
2826 op2 = XEXP (value, 1);
2827 if (!CONSTANT_P (op2)
2828 && !(GET_CODE (op2) == REG && op2 != subtarget))
2829 subtarget = 0;
2830 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2831 {
2832 binoptab = add_optab;
2833 op2 = negate_rtx (GET_MODE (value), op2);
2834 }
2835
2836 /* Check for an addition with OP2 a constant integer and our first
2837 operand a PLUS of a virtual register and something else. In that
2838 case, we want to emit the sum of the virtual register and the
2839 constant first and then add the other value. This allows virtual
2840 register instantiation to simply modify the constant rather than
2841 creating another one around this addition. */
2842 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2843 && GET_CODE (XEXP (value, 0)) == PLUS
2844 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2845 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2846 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2847 {
2848 rtx temp = expand_binop (GET_MODE (value), binoptab,
2849 XEXP (XEXP (value, 0), 0), op2,
2850 subtarget, 0, OPTAB_LIB_WIDEN);
2851 return expand_binop (GET_MODE (value), binoptab, temp,
2852 force_operand (XEXP (XEXP (value, 0), 1), 0),
2853 target, 0, OPTAB_LIB_WIDEN);
2854 }
2855
2856 tmp = force_operand (XEXP (value, 0), subtarget);
2857 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 2858 force_operand (op2, NULL_RTX),
bbf6f052
RK
2859 target, 0, OPTAB_LIB_WIDEN);
2860 /* We give UNSIGNEP = 0 to expand_binop
2861 because the only operations we are expanding here are signed ones. */
2862 }
2863 return value;
2864}
2865\f
2866/* Subroutine of expand_expr:
2867 save the non-copied parts (LIST) of an expr (LHS), and return a list
2868 which can restore these values to their previous values,
2869 should something modify their storage. */
2870
2871static tree
2872save_noncopied_parts (lhs, list)
2873 tree lhs;
2874 tree list;
2875{
2876 tree tail;
2877 tree parts = 0;
2878
2879 for (tail = list; tail; tail = TREE_CHAIN (tail))
2880 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2881 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2882 else
2883 {
2884 tree part = TREE_VALUE (tail);
2885 tree part_type = TREE_TYPE (part);
906c4e36 2886 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
2887 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2888 int_size_in_bytes (part_type), 0);
2889 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 2890 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 2891 parts = tree_cons (to_be_saved,
906c4e36
RK
2892 build (RTL_EXPR, part_type, NULL_TREE,
2893 (tree) target),
bbf6f052
RK
2894 parts);
2895 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2896 }
2897 return parts;
2898}
2899
2900/* Subroutine of expand_expr:
2901 record the non-copied parts (LIST) of an expr (LHS), and return a list
2902 which specifies the initial values of these parts. */
2903
2904static tree
2905init_noncopied_parts (lhs, list)
2906 tree lhs;
2907 tree list;
2908{
2909 tree tail;
2910 tree parts = 0;
2911
2912 for (tail = list; tail; tail = TREE_CHAIN (tail))
2913 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2914 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2915 else
2916 {
2917 tree part = TREE_VALUE (tail);
2918 tree part_type = TREE_TYPE (part);
906c4e36 2919 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
2920 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2921 }
2922 return parts;
2923}
2924
2925/* Subroutine of expand_expr: return nonzero iff there is no way that
2926 EXP can reference X, which is being modified. */
2927
2928static int
2929safe_from_p (x, exp)
2930 rtx x;
2931 tree exp;
2932{
2933 rtx exp_rtl = 0;
2934 int i, nops;
2935
2936 if (x == 0)
2937 return 1;
2938
2939 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2940 find the underlying pseudo. */
2941 if (GET_CODE (x) == SUBREG)
2942 {
2943 x = SUBREG_REG (x);
2944 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2945 return 0;
2946 }
2947
2948 /* If X is a location in the outgoing argument area, it is always safe. */
2949 if (GET_CODE (x) == MEM
2950 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2951 || (GET_CODE (XEXP (x, 0)) == PLUS
2952 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2953 return 1;
2954
2955 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2956 {
2957 case 'd':
2958 exp_rtl = DECL_RTL (exp);
2959 break;
2960
2961 case 'c':
2962 return 1;
2963
2964 case 'x':
2965 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
2966 return ((TREE_VALUE (exp) == 0
2967 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
2968 && (TREE_CHAIN (exp) == 0
2969 || safe_from_p (x, TREE_CHAIN (exp))));
2970 else
2971 return 0;
2972
2973 case '1':
2974 return safe_from_p (x, TREE_OPERAND (exp, 0));
2975
2976 case '2':
2977 case '<':
2978 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2979 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2980
2981 case 'e':
2982 case 'r':
2983 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2984 the expression. If it is set, we conflict iff we are that rtx or
2985 both are in memory. Otherwise, we check all operands of the
2986 expression recursively. */
2987
2988 switch (TREE_CODE (exp))
2989 {
2990 case ADDR_EXPR:
2991 return staticp (TREE_OPERAND (exp, 0));
2992
2993 case INDIRECT_REF:
2994 if (GET_CODE (x) == MEM)
2995 return 0;
2996 break;
2997
2998 case CALL_EXPR:
2999 exp_rtl = CALL_EXPR_RTL (exp);
3000 if (exp_rtl == 0)
3001 {
3002 /* Assume that the call will clobber all hard registers and
3003 all of memory. */
3004 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3005 || GET_CODE (x) == MEM)
3006 return 0;
3007 }
3008
3009 break;
3010
3011 case RTL_EXPR:
3012 exp_rtl = RTL_EXPR_RTL (exp);
3013 if (exp_rtl == 0)
3014 /* We don't know what this can modify. */
3015 return 0;
3016
3017 break;
3018
3019 case WITH_CLEANUP_EXPR:
3020 exp_rtl = RTL_EXPR_RTL (exp);
3021 break;
3022
3023 case SAVE_EXPR:
3024 exp_rtl = SAVE_EXPR_RTL (exp);
3025 break;
3026
8129842c
RS
3027 case BIND_EXPR:
3028 /* The only operand we look at is operand 1. The rest aren't
3029 part of the expression. */
3030 return safe_from_p (x, TREE_OPERAND (exp, 1));
3031
bbf6f052
RK
3032 case METHOD_CALL_EXPR:
3033 /* This takes a rtx argument, but shouldn't appear here. */
3034 abort ();
3035 }
3036
3037 /* If we have an rtx, we do not need to scan our operands. */
3038 if (exp_rtl)
3039 break;
3040
3041 nops = tree_code_length[(int) TREE_CODE (exp)];
3042 for (i = 0; i < nops; i++)
3043 if (TREE_OPERAND (exp, i) != 0
3044 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3045 return 0;
3046 }
3047
3048 /* If we have an rtl, find any enclosed object. Then see if we conflict
3049 with it. */
3050 if (exp_rtl)
3051 {
3052 if (GET_CODE (exp_rtl) == SUBREG)
3053 {
3054 exp_rtl = SUBREG_REG (exp_rtl);
3055 if (GET_CODE (exp_rtl) == REG
3056 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3057 return 0;
3058 }
3059
3060 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3061 are memory and EXP is not readonly. */
3062 return ! (rtx_equal_p (x, exp_rtl)
3063 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3064 && ! TREE_READONLY (exp)));
3065 }
3066
3067 /* If we reach here, it is safe. */
3068 return 1;
3069}
3070
3071/* Subroutine of expand_expr: return nonzero iff EXP is an
3072 expression whose type is statically determinable. */
3073
3074static int
3075fixed_type_p (exp)
3076 tree exp;
3077{
3078 if (TREE_CODE (exp) == PARM_DECL
3079 || TREE_CODE (exp) == VAR_DECL
3080 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3081 || TREE_CODE (exp) == COMPONENT_REF
3082 || TREE_CODE (exp) == ARRAY_REF)
3083 return 1;
3084 return 0;
3085}
3086\f
3087/* expand_expr: generate code for computing expression EXP.
3088 An rtx for the computed value is returned. The value is never null.
3089 In the case of a void EXP, const0_rtx is returned.
3090
3091 The value may be stored in TARGET if TARGET is nonzero.
3092 TARGET is just a suggestion; callers must assume that
3093 the rtx returned may not be the same as TARGET.
3094
3095 If TARGET is CONST0_RTX, it means that the value will be ignored.
3096
3097 If TMODE is not VOIDmode, it suggests generating the
3098 result in mode TMODE. But this is done only when convenient.
3099 Otherwise, TMODE is ignored and the value generated in its natural mode.
3100 TMODE is just a suggestion; callers must assume that
3101 the rtx returned may not have mode TMODE.
3102
3103 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3104 with a constant address even if that address is not normally legitimate.
3105 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3106
3107 If MODIFIER is EXPAND_SUM then when EXP is an addition
3108 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3109 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3110 products as above, or REG or MEM, or constant.
3111 Ordinarily in such cases we would output mul or add instructions
3112 and then return a pseudo reg containing the sum.
3113
3114 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3115 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3116 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3117 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3118
3119rtx
3120expand_expr (exp, target, tmode, modifier)
3121 register tree exp;
3122 rtx target;
3123 enum machine_mode tmode;
3124 enum expand_modifier modifier;
3125{
3126 register rtx op0, op1, temp;
3127 tree type = TREE_TYPE (exp);
3128 int unsignedp = TREE_UNSIGNED (type);
3129 register enum machine_mode mode = TYPE_MODE (type);
3130 register enum tree_code code = TREE_CODE (exp);
3131 optab this_optab;
3132 /* Use subtarget as the target for operand 0 of a binary operation. */
3133 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3134 rtx original_target = target;
3135 int ignore = target == const0_rtx;
3136 tree context;
3137
3138 /* Don't use hard regs as subtargets, because the combiner
3139 can only handle pseudo regs. */
3140 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3141 subtarget = 0;
3142 /* Avoid subtargets inside loops,
3143 since they hide some invariant expressions. */
3144 if (preserve_subexpressions_p ())
3145 subtarget = 0;
3146
3147 if (ignore) target = 0, original_target = 0;
3148
3149 /* If will do cse, generate all results into pseudo registers
3150 since 1) that allows cse to find more things
3151 and 2) otherwise cse could produce an insn the machine
3152 cannot support. */
3153
3154 if (! cse_not_expected && mode != BLKmode && target
3155 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3156 target = subtarget;
3157
3158 /* Ensure we reference a volatile object even if value is ignored. */
3159 if (ignore && TREE_THIS_VOLATILE (exp)
3160 && mode != VOIDmode && mode != BLKmode)
3161 {
3162 target = gen_reg_rtx (mode);
3163 temp = expand_expr (exp, target, VOIDmode, modifier);
3164 if (temp != target)
3165 emit_move_insn (target, temp);
3166 return target;
3167 }
3168
3169 switch (code)
3170 {
3171 case LABEL_DECL:
b552441b
RS
3172 {
3173 tree function = decl_function_context (exp);
3174 /* Handle using a label in a containing function. */
3175 if (function != current_function_decl && function != 0)
3176 {
3177 struct function *p = find_function_data (function);
3178 /* Allocate in the memory associated with the function
3179 that the label is in. */
3180 push_obstacks (p->function_obstack,
3181 p->function_maybepermanent_obstack);
3182
3183 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3184 label_rtx (exp), p->forced_labels);
3185 pop_obstacks ();
3186 }
3187 else if (modifier == EXPAND_INITIALIZER)
3188 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3189 label_rtx (exp), forced_labels);
26fcb35a 3190 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3191 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3192 if (function != current_function_decl && function != 0)
3193 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3194 return temp;
b552441b 3195 }
bbf6f052
RK
3196
3197 case PARM_DECL:
3198 if (DECL_RTL (exp) == 0)
3199 {
3200 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3201 return CONST0_RTX (mode);
bbf6f052
RK
3202 }
3203
3204 case FUNCTION_DECL:
3205 case VAR_DECL:
3206 case RESULT_DECL:
3207 if (DECL_RTL (exp) == 0)
3208 abort ();
3209 /* Ensure variable marked as used
3210 even if it doesn't go through a parser. */
3211 TREE_USED (exp) = 1;
3212 /* Handle variables inherited from containing functions. */
3213 context = decl_function_context (exp);
3214
3215 /* We treat inline_function_decl as an alias for the current function
3216 because that is the inline function whose vars, types, etc.
3217 are being merged into the current function.
3218 See expand_inline_function. */
3219 if (context != 0 && context != current_function_decl
3220 && context != inline_function_decl
3221 /* If var is static, we don't need a static chain to access it. */
3222 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3223 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3224 {
3225 rtx addr;
3226
3227 /* Mark as non-local and addressable. */
81feeecb 3228 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3229 mark_addressable (exp);
3230 if (GET_CODE (DECL_RTL (exp)) != MEM)
3231 abort ();
3232 addr = XEXP (DECL_RTL (exp), 0);
3233 if (GET_CODE (addr) == MEM)
3234 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3235 else
3236 addr = fix_lexical_addr (addr, exp);
3237 return change_address (DECL_RTL (exp), mode, addr);
3238 }
4af3895e 3239
bbf6f052
RK
3240 /* This is the case of an array whose size is to be determined
3241 from its initializer, while the initializer is still being parsed.
3242 See expand_decl. */
3243 if (GET_CODE (DECL_RTL (exp)) == MEM
3244 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3245 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3246 XEXP (DECL_RTL (exp), 0));
3247 if (GET_CODE (DECL_RTL (exp)) == MEM
3248 && modifier != EXPAND_CONST_ADDRESS
3249 && modifier != EXPAND_SUM
3250 && modifier != EXPAND_INITIALIZER)
3251 {
3252 /* DECL_RTL probably contains a constant address.
3253 On RISC machines where a constant address isn't valid,
3254 make some insns to get that address into a register. */
3255 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3256 || (flag_force_addr
3257 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3258 return change_address (DECL_RTL (exp), VOIDmode,
3259 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3260 }
3261 return DECL_RTL (exp);
3262
3263 case INTEGER_CST:
3264 return immed_double_const (TREE_INT_CST_LOW (exp),
3265 TREE_INT_CST_HIGH (exp),
3266 mode);
3267
3268 case CONST_DECL:
3269 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3270
3271 case REAL_CST:
3272 /* If optimized, generate immediate CONST_DOUBLE
3273 which will be turned into memory by reload if necessary.
3274
3275 We used to force a register so that loop.c could see it. But
3276 this does not allow gen_* patterns to perform optimizations with
3277 the constants. It also produces two insns in cases like "x = 1.0;".
3278 On most machines, floating-point constants are not permitted in
3279 many insns, so we'd end up copying it to a register in any case.
3280
3281 Now, we do the copying in expand_binop, if appropriate. */
3282 return immed_real_const (exp);
3283
3284 case COMPLEX_CST:
3285 case STRING_CST:
3286 if (! TREE_CST_RTL (exp))
3287 output_constant_def (exp);
3288
3289 /* TREE_CST_RTL probably contains a constant address.
3290 On RISC machines where a constant address isn't valid,
3291 make some insns to get that address into a register. */
3292 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3293 && modifier != EXPAND_CONST_ADDRESS
3294 && modifier != EXPAND_INITIALIZER
3295 && modifier != EXPAND_SUM
3296 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3297 return change_address (TREE_CST_RTL (exp), VOIDmode,
3298 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3299 return TREE_CST_RTL (exp);
3300
3301 case SAVE_EXPR:
3302 context = decl_function_context (exp);
3303 /* We treat inline_function_decl as an alias for the current function
3304 because that is the inline function whose vars, types, etc.
3305 are being merged into the current function.
3306 See expand_inline_function. */
3307 if (context == current_function_decl || context == inline_function_decl)
3308 context = 0;
3309
3310 /* If this is non-local, handle it. */
3311 if (context)
3312 {
3313 temp = SAVE_EXPR_RTL (exp);
3314 if (temp && GET_CODE (temp) == REG)
3315 {
3316 put_var_into_stack (exp);
3317 temp = SAVE_EXPR_RTL (exp);
3318 }
3319 if (temp == 0 || GET_CODE (temp) != MEM)
3320 abort ();
3321 return change_address (temp, mode,
3322 fix_lexical_addr (XEXP (temp, 0), exp));
3323 }
3324 if (SAVE_EXPR_RTL (exp) == 0)
3325 {
3326 if (mode == BLKmode)
3327 temp
3328 = assign_stack_temp (mode,
3329 int_size_in_bytes (TREE_TYPE (exp)), 0);
3330 else
3331 temp = gen_reg_rtx (mode);
3332 SAVE_EXPR_RTL (exp) = temp;
3333 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3334 if (!optimize && GET_CODE (temp) == REG)
3335 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3336 save_expr_regs);
3337 }
3338 return SAVE_EXPR_RTL (exp);
3339
3340 case EXIT_EXPR:
3341 /* Exit the current loop if the body-expression is true. */
3342 {
3343 rtx label = gen_label_rtx ();
906c4e36
RK
3344 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3345 expand_exit_loop (NULL_PTR);
bbf6f052
RK
3346 emit_label (label);
3347 }
3348 return const0_rtx;
3349
3350 case LOOP_EXPR:
3351 expand_start_loop (1);
3352 expand_expr_stmt (TREE_OPERAND (exp, 0));
3353 expand_end_loop ();
3354
3355 return const0_rtx;
3356
3357 case BIND_EXPR:
3358 {
3359 tree vars = TREE_OPERAND (exp, 0);
3360 int vars_need_expansion = 0;
3361
3362 /* Need to open a binding contour here because
3363 if there are any cleanups they most be contained here. */
3364 expand_start_bindings (0);
3365
2df53c0b
RS
3366 /* Mark the corresponding BLOCK for output in its proper place. */
3367 if (TREE_OPERAND (exp, 2) != 0
3368 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3369 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3370
3371 /* If VARS have not yet been expanded, expand them now. */
3372 while (vars)
3373 {
3374 if (DECL_RTL (vars) == 0)
3375 {
3376 vars_need_expansion = 1;
3377 expand_decl (vars);
3378 }
3379 expand_decl_init (vars);
3380 vars = TREE_CHAIN (vars);
3381 }
3382
3383 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3384
3385 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3386
3387 return temp;
3388 }
3389
3390 case RTL_EXPR:
3391 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3392 abort ();
3393 emit_insns (RTL_EXPR_SEQUENCE (exp));
3394 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3395 return RTL_EXPR_RTL (exp);
3396
3397 case CONSTRUCTOR:
4af3895e
JVA
3398 /* All elts simple constants => refer to a constant in memory. But
3399 if this is a non-BLKmode mode, let it store a field at a time
3400 since that should make a CONST_INT or CONST_DOUBLE when we
3401 fold. */
3402 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3403 {
3404 rtx constructor = output_constant_def (exp);
b552441b
RS
3405 if (modifier != EXPAND_CONST_ADDRESS
3406 && modifier != EXPAND_INITIALIZER
3407 && modifier != EXPAND_SUM
3408 && !memory_address_p (GET_MODE (constructor),
3409 XEXP (constructor, 0)))
bbf6f052
RK
3410 constructor = change_address (constructor, VOIDmode,
3411 XEXP (constructor, 0));
3412 return constructor;
3413 }
3414
3415 if (ignore)
3416 {
3417 tree elt;
3418 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3419 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3420 return const0_rtx;
3421 }
3422 else
3423 {
3424 if (target == 0 || ! safe_from_p (target, exp))
3425 {
3426 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3427 target = gen_reg_rtx (mode);
3428 else
3429 {
3430 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3431 if (target)
3432 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3433 target = safe_target;
3434 }
3435 }
3436 store_constructor (exp, target);
3437 return target;
3438 }
3439
3440 case INDIRECT_REF:
3441 {
3442 tree exp1 = TREE_OPERAND (exp, 0);
3443 tree exp2;
3444
3445 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3446 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3447 This code has the same general effect as simply doing
3448 expand_expr on the save expr, except that the expression PTR
3449 is computed for use as a memory address. This means different
3450 code, suitable for indexing, may be generated. */
3451 if (TREE_CODE (exp1) == SAVE_EXPR
3452 && SAVE_EXPR_RTL (exp1) == 0
3453 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3454 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3455 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3456 {
906c4e36
RK
3457 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3458 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3459 op0 = memory_address (mode, temp);
3460 op0 = copy_all_regs (op0);
3461 SAVE_EXPR_RTL (exp1) = op0;
3462 }
3463 else
3464 {
906c4e36 3465 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3466 op0 = memory_address (mode, op0);
3467 }
8c8a8e34
JW
3468
3469 temp = gen_rtx (MEM, mode, op0);
3470 /* If address was computed by addition,
3471 mark this as an element of an aggregate. */
3472 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3473 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3474 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3475 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3476 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3477 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3478 || (TREE_CODE (exp1) == ADDR_EXPR
3479 && (exp2 = TREE_OPERAND (exp1, 0))
3480 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3481 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3482 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3483 MEM_IN_STRUCT_P (temp) = 1;
3484 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
bbf6f052
RK
3485#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3486 a location is accessed through a pointer to const does not mean
3487 that the value there can never change. */
8c8a8e34 3488 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3489#endif
8c8a8e34
JW
3490 return temp;
3491 }
bbf6f052
RK
3492
3493 case ARRAY_REF:
3494 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3495 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3496 {
3497 /* Nonconstant array index or nonconstant element size.
3498 Generate the tree for *(&array+index) and expand that,
3499 except do it in a language-independent way
3500 and don't complain about non-lvalue arrays.
3501 `mark_addressable' should already have been called
3502 for any array for which this case will be reached. */
3503
3504 /* Don't forget the const or volatile flag from the array element. */
3505 tree variant_type = build_type_variant (type,
3506 TREE_READONLY (exp),
3507 TREE_THIS_VOLATILE (exp));
3508 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3509 TREE_OPERAND (exp, 0));
3510 tree index = TREE_OPERAND (exp, 1);
3511 tree elt;
3512
3513 /* Convert the integer argument to a type the same size as a pointer
3514 so the multiply won't overflow spuriously. */
3515 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3516 index = convert (type_for_size (POINTER_SIZE, 0), index);
3517
3518 /* Don't think the address has side effects
3519 just because the array does.
3520 (In some cases the address might have side effects,
3521 and we fail to record that fact here. However, it should not
3522 matter, since expand_expr should not care.) */
3523 TREE_SIDE_EFFECTS (array_adr) = 0;
3524
3525 elt = build1 (INDIRECT_REF, type,
3526 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3527 array_adr,
3528 fold (build (MULT_EXPR,
3529 TYPE_POINTER_TO (variant_type),
3530 index, size_in_bytes (type))))));
3531
3532 /* Volatility, etc., of new expression is same as old expression. */
3533 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3534 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3535 TREE_READONLY (elt) = TREE_READONLY (exp);
3536
3537 return expand_expr (elt, target, tmode, modifier);
3538 }
3539
3540 /* Fold an expression like: "foo"[2].
3541 This is not done in fold so it won't happen inside &. */
3542 {
3543 int i;
3544 tree arg0 = TREE_OPERAND (exp, 0);
3545 tree arg1 = TREE_OPERAND (exp, 1);
3546
3547 if (TREE_CODE (arg0) == STRING_CST
3548 && TREE_CODE (arg1) == INTEGER_CST
3549 && !TREE_INT_CST_HIGH (arg1)
3550 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3551 {
3552 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3553 {
3554 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3555 TREE_TYPE (exp) = integer_type_node;
3556 return expand_expr (exp, target, tmode, modifier);
3557 }
3558 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3559 {
3560 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3561 TREE_TYPE (exp) = integer_type_node;
3562 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3563 }
3564 }
3565 }
3566
3567 /* If this is a constant index into a constant array,
4af3895e
JVA
3568 just get the value from the array. Handle both the cases when
3569 we have an explicit constructor and when our operand is a variable
3570 that was declared const. */
3571
3572 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3573 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3574 {
3575 tree index = fold (TREE_OPERAND (exp, 1));
3576 if (TREE_CODE (index) == INTEGER_CST
3577 && TREE_INT_CST_HIGH (index) == 0)
3578 {
3579 int i = TREE_INT_CST_LOW (index);
3580 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3581
3582 while (elem && i--)
3583 elem = TREE_CHAIN (elem);
3584 if (elem)
3585 return expand_expr (fold (TREE_VALUE (elem)), target,
3586 tmode, modifier);
3587 }
3588 }
3589
3590 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3591 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3592 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3593 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3594 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3595 && optimize >= 1
3596 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3597 != ERROR_MARK))
bbf6f052
RK
3598 {
3599 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3600 if (TREE_CODE (index) == INTEGER_CST
3601 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3602 {
3603 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3604 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3605
8c8a8e34
JW
3606 if (TREE_CODE (init) == CONSTRUCTOR)
3607 {
3608 tree elem = CONSTRUCTOR_ELTS (init);
3609
3610 while (elem && i--)
3611 elem = TREE_CHAIN (elem);
3612 if (elem)
3613 return expand_expr (fold (TREE_VALUE (elem)), target,
3614 tmode, modifier);
3615 }
3616 else if (TREE_CODE (init) == STRING_CST
3617 && i < TREE_STRING_LENGTH (init))
3618 {
906c4e36 3619 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
8c8a8e34
JW
3620 return convert_to_mode (mode, temp, 0);
3621 }
bbf6f052
RK
3622 }
3623 }
3624 /* Treat array-ref with constant index as a component-ref. */
3625
3626 case COMPONENT_REF:
3627 case BIT_FIELD_REF:
4af3895e
JVA
3628 /* If the operand is a CONSTRUCTOR, we can just extract the
3629 appropriate field if it is present. */
3630 if (code != ARRAY_REF
3631 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3632 {
3633 tree elt;
3634
3635 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3636 elt = TREE_CHAIN (elt))
3637 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3638 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3639 }
3640
bbf6f052
RK
3641 {
3642 enum machine_mode mode1;
3643 int bitsize;
3644 int bitpos;
7bb0943f 3645 tree offset;
bbf6f052 3646 int volatilep = 0;
7bb0943f 3647 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3648 &mode1, &unsignedp, &volatilep);
3649
3650 /* In some cases, we will be offsetting OP0's address by a constant.
3651 So get it as a sum, if possible. If we will be using it
3652 directly in an insn, we validate it. */
906c4e36 3653 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3654
8c8a8e34
JW
3655 /* If this is a constant, put it into a register if it is a
3656 legimate constant and memory if it isn't. */
3657 if (CONSTANT_P (op0))
3658 {
3659 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3660 if (LEGITIMATE_CONSTANT_P (op0))
3661 op0 = force_reg (mode, op0);
3662 else
3663 op0 = validize_mem (force_const_mem (mode, op0));
3664 }
3665
7bb0943f
RS
3666 if (offset != 0)
3667 {
906c4e36 3668 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3669
3670 if (GET_CODE (op0) != MEM)
3671 abort ();
3672 op0 = change_address (op0, VOIDmode,
3673 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3674 force_reg (Pmode, offset_rtx)));
3675 }
3676
bbf6f052
RK
3677 /* Don't forget about volatility even if this is a bitfield. */
3678 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3679 {
3680 op0 = copy_rtx (op0);
3681 MEM_VOLATILE_P (op0) = 1;
3682 }
3683
3684 if (mode1 == VOIDmode
0bba3f6f
RK
3685 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3686 && modifier != EXPAND_CONST_ADDRESS
3687 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
3688 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3689 {
3690 /* In cases where an aligned union has an unaligned object
3691 as a field, we might be extracting a BLKmode value from
3692 an integer-mode (e.g., SImode) object. Handle this case
3693 by doing the extract into an object as wide as the field
3694 (which we know to be the width of a basic mode), then
3695 storing into memory, and changing the mode to BLKmode. */
3696 enum machine_mode ext_mode = mode;
3697
3698 if (ext_mode == BLKmode)
3699 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3700
3701 if (ext_mode == BLKmode)
3702 abort ();
3703
3704 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3705 unsignedp, target, ext_mode, ext_mode,
3706 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3707 int_size_in_bytes (TREE_TYPE (tem)));
3708 if (mode == BLKmode)
3709 {
3710 rtx new = assign_stack_temp (ext_mode,
3711 bitsize / BITS_PER_UNIT, 0);
3712
3713 emit_move_insn (new, op0);
3714 op0 = copy_rtx (new);
3715 PUT_MODE (op0, BLKmode);
3716 }
3717
3718 return op0;
3719 }
3720
3721 /* Get a reference to just this component. */
3722 if (modifier == EXPAND_CONST_ADDRESS
3723 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3724 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3725 (bitpos / BITS_PER_UNIT)));
3726 else
3727 op0 = change_address (op0, mode1,
3728 plus_constant (XEXP (op0, 0),
3729 (bitpos / BITS_PER_UNIT)));
3730 MEM_IN_STRUCT_P (op0) = 1;
3731 MEM_VOLATILE_P (op0) |= volatilep;
3732 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3733 return op0;
3734 if (target == 0)
3735 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3736 convert_move (target, op0, unsignedp);
3737 return target;
3738 }
3739
3740 case OFFSET_REF:
3741 {
3742 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3743 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 3744 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3745 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3746 MEM_IN_STRUCT_P (temp) = 1;
3747 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3748#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3749 a location is accessed through a pointer to const does not mean
3750 that the value there can never change. */
3751 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3752#endif
3753 return temp;
3754 }
3755
3756 /* Intended for a reference to a buffer of a file-object in Pascal.
3757 But it's not certain that a special tree code will really be
3758 necessary for these. INDIRECT_REF might work for them. */
3759 case BUFFER_REF:
3760 abort ();
3761
3762 case WITH_CLEANUP_EXPR:
3763 if (RTL_EXPR_RTL (exp) == 0)
3764 {
3765 RTL_EXPR_RTL (exp)
3766 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
3767 cleanups_this_call
3768 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
3769 /* That's it for this cleanup. */
3770 TREE_OPERAND (exp, 2) = 0;
3771 }
3772 return RTL_EXPR_RTL (exp);
3773
3774 case CALL_EXPR:
3775 /* Check for a built-in function. */
3776 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3777 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3778 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3779 return expand_builtin (exp, target, subtarget, tmode, ignore);
3780 /* If this call was expanded already by preexpand_calls,
3781 just return the result we got. */
3782 if (CALL_EXPR_RTL (exp) != 0)
3783 return CALL_EXPR_RTL (exp);
8129842c 3784 return expand_call (exp, target, ignore);
bbf6f052
RK
3785
3786 case NON_LVALUE_EXPR:
3787 case NOP_EXPR:
3788 case CONVERT_EXPR:
3789 case REFERENCE_EXPR:
3790 if (TREE_CODE (type) == VOID_TYPE || ignore)
3791 {
3792 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3793 return const0_rtx;
3794 }
3795 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3796 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3797 if (TREE_CODE (type) == UNION_TYPE)
3798 {
3799 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3800 if (target == 0)
3801 {
3802 if (mode == BLKmode)
3803 {
3804 if (TYPE_SIZE (type) == 0
3805 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3806 abort ();
3807 target = assign_stack_temp (BLKmode,
3808 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3809 + BITS_PER_UNIT - 1)
3810 / BITS_PER_UNIT, 0);
3811 }
3812 else
3813 target = gen_reg_rtx (mode);
3814 }
3815 if (GET_CODE (target) == MEM)
3816 /* Store data into beginning of memory target. */
3817 store_expr (TREE_OPERAND (exp, 0),
906c4e36
RK
3818 change_address (target, TYPE_MODE (valtype), 0),
3819 NULL_RTX);
bbf6f052
RK
3820 else if (GET_CODE (target) == REG)
3821 /* Store this field into a union of the proper type. */
3822 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3823 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3824 VOIDmode, 0, 1,
3825 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3826 else
3827 abort ();
3828
3829 /* Return the entire union. */
3830 return target;
3831 }
26fcb35a 3832 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, modifier);
bbf6f052
RK
3833 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3834 return op0;
26fcb35a
RS
3835 if (modifier == EXPAND_INITIALIZER)
3836 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
3837 if (flag_force_mem && GET_CODE (op0) == MEM)
3838 op0 = copy_to_reg (op0);
3839
3840 if (target == 0)
3841 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3842 else
3843 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3844 return target;
3845
3846 case PLUS_EXPR:
3847 /* We come here from MINUS_EXPR when the second operand is a constant. */
3848 plus_expr:
3849 this_optab = add_optab;
3850
3851 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3852 something else, make sure we add the register to the constant and
3853 then to the other thing. This case can occur during strength
3854 reduction and doing it this way will produce better code if the
3855 frame pointer or argument pointer is eliminated.
3856
3857 fold-const.c will ensure that the constant is always in the inner
3858 PLUS_EXPR, so the only case we need to do anything about is if
3859 sp, ap, or fp is our second argument, in which case we must swap
3860 the innermost first argument and our second argument. */
3861
3862 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3863 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3864 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3865 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3866 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3867 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3868 {
3869 tree t = TREE_OPERAND (exp, 1);
3870
3871 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3872 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3873 }
3874
3875 /* If the result is to be Pmode and we are adding an integer to
3876 something, we might be forming a constant. So try to use
3877 plus_constant. If it produces a sum and we can't accept it,
3878 use force_operand. This allows P = &ARR[const] to generate
3879 efficient code on machines where a SYMBOL_REF is not a valid
3880 address.
3881
3882 If this is an EXPAND_SUM call, always return the sum. */
3883 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 3884 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
3885 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3886 || mode == Pmode))
3887 {
3888 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3889 EXPAND_SUM);
3890 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3891 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3892 op1 = force_operand (op1, target);
3893 return op1;
3894 }
3895
3896 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3897 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3898 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3899 || mode == Pmode))
3900 {
3901 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3902 EXPAND_SUM);
3903 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3904 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3905 op0 = force_operand (op0, target);
3906 return op0;
3907 }
3908
3909 /* No sense saving up arithmetic to be done
3910 if it's all in the wrong mode to form part of an address.
3911 And force_operand won't know whether to sign-extend or
3912 zero-extend. */
3913 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3914 || mode != Pmode) goto binop;
3915
3916 preexpand_calls (exp);
3917 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3918 subtarget = 0;
3919
3920 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 3921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
3922
3923 /* Make sure any term that's a sum with a constant comes last. */
3924 if (GET_CODE (op0) == PLUS
3925 && CONSTANT_P (XEXP (op0, 1)))
3926 {
3927 temp = op0;
3928 op0 = op1;
3929 op1 = temp;
3930 }
3931 /* If adding to a sum including a constant,
3932 associate it to put the constant outside. */
3933 if (GET_CODE (op1) == PLUS
3934 && CONSTANT_P (XEXP (op1, 1)))
3935 {
3936 rtx constant_term = const0_rtx;
3937
3938 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3939 if (temp != 0)
3940 op0 = temp;
6f90e075
JW
3941 /* Ensure that MULT comes first if there is one. */
3942 else if (GET_CODE (op0) == MULT)
3943 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
3944 else
3945 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3946
3947 /* Let's also eliminate constants from op0 if possible. */
3948 op0 = eliminate_constant_term (op0, &constant_term);
3949
3950 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3951 their sum should be a constant. Form it into OP1, since the
3952 result we want will then be OP0 + OP1. */
3953
3954 temp = simplify_binary_operation (PLUS, mode, constant_term,
3955 XEXP (op1, 1));
3956 if (temp != 0)
3957 op1 = temp;
3958 else
3959 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3960 }
3961
3962 /* Put a constant term last and put a multiplication first. */
3963 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3964 temp = op1, op1 = op0, op0 = temp;
3965
3966 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3967 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3968
3969 case MINUS_EXPR:
3970 /* Handle difference of two symbolic constants,
3971 for the sake of an initializer. */
3972 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3973 && really_constant_p (TREE_OPERAND (exp, 0))
3974 && really_constant_p (TREE_OPERAND (exp, 1)))
3975 {
906c4e36
RK
3976 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
3977 VOIDmode, modifier);
3978 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
3979 VOIDmode, modifier);
bbf6f052
RK
3980 return gen_rtx (MINUS, mode, op0, op1);
3981 }
3982 /* Convert A - const to A + (-const). */
3983 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3984 {
3985 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3986 fold (build1 (NEGATE_EXPR, type,
3987 TREE_OPERAND (exp, 1))));
3988 goto plus_expr;
3989 }
3990 this_optab = sub_optab;
3991 goto binop;
3992
3993 case MULT_EXPR:
3994 preexpand_calls (exp);
3995 /* If first operand is constant, swap them.
3996 Thus the following special case checks need only
3997 check the second operand. */
3998 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3999 {
4000 register tree t1 = TREE_OPERAND (exp, 0);
4001 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4002 TREE_OPERAND (exp, 1) = t1;
4003 }
4004
4005 /* Attempt to return something suitable for generating an
4006 indexed address, for machines that support that. */
4007
4008 if (modifier == EXPAND_SUM && mode == Pmode
4009 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4010 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4011 {
4012 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4013
4014 /* Apply distributive law if OP0 is x+c. */
4015 if (GET_CODE (op0) == PLUS
4016 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4017 return gen_rtx (PLUS, mode,
4018 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4019 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4020 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4021 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4022
4023 if (GET_CODE (op0) != REG)
906c4e36 4024 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4025 if (GET_CODE (op0) != REG)
4026 op0 = copy_to_mode_reg (mode, op0);
4027
4028 return gen_rtx (MULT, mode, op0,
906c4e36 4029 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4030 }
4031
4032 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4033 subtarget = 0;
4034
4035 /* Check for multiplying things that have been extended
4036 from a narrower type. If this machine supports multiplying
4037 in that narrower type with a result in the desired type,
4038 do it that way, and avoid the explicit type-conversion. */
4039 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4040 && TREE_CODE (type) == INTEGER_TYPE
4041 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4042 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4043 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4044 && int_fits_type_p (TREE_OPERAND (exp, 1),
4045 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4046 /* Don't use a widening multiply if a shift will do. */
4047 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4048 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4049 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4050 ||
4051 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4052 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4053 ==
4054 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4055 /* If both operands are extended, they must either both
4056 be zero-extended or both be sign-extended. */
4057 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4058 ==
4059 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4060 {
4061 enum machine_mode innermode
4062 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4063 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4064 ? umul_widen_optab : smul_widen_optab);
4065 if (mode == GET_MODE_WIDER_MODE (innermode)
4066 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4067 {
4068 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4069 NULL_RTX, VOIDmode, 0);
bbf6f052 4070 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4071 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4072 VOIDmode, 0);
bbf6f052
RK
4073 else
4074 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4075 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4076 goto binop2;
4077 }
4078 }
4079 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4080 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4081 return expand_mult (mode, op0, op1, target, unsignedp);
4082
4083 case TRUNC_DIV_EXPR:
4084 case FLOOR_DIV_EXPR:
4085 case CEIL_DIV_EXPR:
4086 case ROUND_DIV_EXPR:
4087 case EXACT_DIV_EXPR:
4088 preexpand_calls (exp);
4089 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4090 subtarget = 0;
4091 /* Possible optimization: compute the dividend with EXPAND_SUM
4092 then if the divisor is constant can optimize the case
4093 where some terms of the dividend have coeffs divisible by it. */
4094 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4095 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4096 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4097
4098 case RDIV_EXPR:
4099 this_optab = flodiv_optab;
4100 goto binop;
4101
4102 case TRUNC_MOD_EXPR:
4103 case FLOOR_MOD_EXPR:
4104 case CEIL_MOD_EXPR:
4105 case ROUND_MOD_EXPR:
4106 preexpand_calls (exp);
4107 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4108 subtarget = 0;
4109 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4110 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4111 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4112
4113 case FIX_ROUND_EXPR:
4114 case FIX_FLOOR_EXPR:
4115 case FIX_CEIL_EXPR:
4116 abort (); /* Not used for C. */
4117
4118 case FIX_TRUNC_EXPR:
906c4e36 4119 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4120 if (target == 0)
4121 target = gen_reg_rtx (mode);
4122 expand_fix (target, op0, unsignedp);
4123 return target;
4124
4125 case FLOAT_EXPR:
906c4e36 4126 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4127 if (target == 0)
4128 target = gen_reg_rtx (mode);
4129 /* expand_float can't figure out what to do if FROM has VOIDmode.
4130 So give it the correct mode. With -O, cse will optimize this. */
4131 if (GET_MODE (op0) == VOIDmode)
4132 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4133 op0);
4134 expand_float (target, op0,
4135 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4136 return target;
4137
4138 case NEGATE_EXPR:
4139 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4140 temp = expand_unop (mode, neg_optab, op0, target, 0);
4141 if (temp == 0)
4142 abort ();
4143 return temp;
4144
4145 case ABS_EXPR:
4146 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4147
4148 /* Unsigned abs is simply the operand. Testing here means we don't
4149 risk generating incorrect code below. */
4150 if (TREE_UNSIGNED (type))
4151 return op0;
4152
4153 /* First try to do it with a special abs instruction. */
4154 temp = expand_unop (mode, abs_optab, op0, target, 0);
4155 if (temp != 0)
4156 return temp;
4157
4158 /* If this machine has expensive jumps, we can do integer absolute
4159 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4160 where W is the width of MODE. */
4161
4162 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4163 {
4164 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4165 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4166 NULL_RTX, 0);
bbf6f052
RK
4167
4168 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4169 OPTAB_LIB_WIDEN);
4170 if (temp != 0)
4171 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4172 OPTAB_LIB_WIDEN);
4173
4174 if (temp != 0)
4175 return temp;
4176 }
4177
4178 /* If that does not win, use conditional jump and negate. */
4179 target = original_target;
4180 temp = gen_label_rtx ();
4181 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4182 || (GET_CODE (target) == REG
4183 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4184 target = gen_reg_rtx (mode);
4185 emit_move_insn (target, op0);
4186 emit_cmp_insn (target,
4187 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4188 NULL_RTX, VOIDmode, 0),
4189 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4190 NO_DEFER_POP;
4191 emit_jump_insn (gen_bge (temp));
4192 op0 = expand_unop (mode, neg_optab, target, target, 0);
4193 if (op0 != target)
4194 emit_move_insn (target, op0);
4195 emit_label (temp);
4196 OK_DEFER_POP;
4197 return target;
4198
4199 case MAX_EXPR:
4200 case MIN_EXPR:
4201 target = original_target;
4202 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4203 || (GET_CODE (target) == REG
4204 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4205 target = gen_reg_rtx (mode);
906c4e36 4206 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4207 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4208
4209 /* First try to do it with a special MIN or MAX instruction.
4210 If that does not win, use a conditional jump to select the proper
4211 value. */
4212 this_optab = (TREE_UNSIGNED (type)
4213 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4214 : (code == MIN_EXPR ? smin_optab : smax_optab));
4215
4216 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4217 OPTAB_WIDEN);
4218 if (temp != 0)
4219 return temp;
4220
4221 if (target != op0)
4222 emit_move_insn (target, op0);
4223 op0 = gen_label_rtx ();
4224 if (code == MAX_EXPR)
4225 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4226 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4227 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4228 else
4229 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4230 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4231 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4232 if (temp == const0_rtx)
4233 emit_move_insn (target, op1);
4234 else if (temp != const_true_rtx)
4235 {
4236 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4237 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4238 else
4239 abort ();
4240 emit_move_insn (target, op1);
4241 }
4242 emit_label (op0);
4243 return target;
4244
4245/* ??? Can optimize when the operand of this is a bitwise operation,
4246 by using a different bitwise operation. */
4247 case BIT_NOT_EXPR:
4248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4249 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4250 if (temp == 0)
4251 abort ();
4252 return temp;
4253
4254 case FFS_EXPR:
4255 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4256 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4257 if (temp == 0)
4258 abort ();
4259 return temp;
4260
4261/* ??? Can optimize bitwise operations with one arg constant.
4262 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4263 and (a bitwise1 b) bitwise2 b (etc)
4264 but that is probably not worth while. */
4265
4266/* BIT_AND_EXPR is for bitwise anding.
4267 TRUTH_AND_EXPR is for anding two boolean values
4268 when we want in all cases to compute both of them.
4269 In general it is fastest to do TRUTH_AND_EXPR by
4270 computing both operands as actual zero-or-1 values
4271 and then bitwise anding. In cases where there cannot
4272 be any side effects, better code would be made by
4273 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4274 but the question is how to recognize those cases. */
4275
4276 case TRUTH_AND_EXPR:
4277 case BIT_AND_EXPR:
4278 this_optab = and_optab;
4279 goto binop;
4280
4281/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4282 case TRUTH_OR_EXPR:
4283 case BIT_IOR_EXPR:
4284 this_optab = ior_optab;
4285 goto binop;
4286
4287 case BIT_XOR_EXPR:
4288 this_optab = xor_optab;
4289 goto binop;
4290
4291 case LSHIFT_EXPR:
4292 case RSHIFT_EXPR:
4293 case LROTATE_EXPR:
4294 case RROTATE_EXPR:
4295 preexpand_calls (exp);
4296 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4297 subtarget = 0;
4298 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4299 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4300 unsignedp);
4301
4302/* Could determine the answer when only additive constants differ.
4303 Also, the addition of one can be handled by changing the condition. */
4304 case LT_EXPR:
4305 case LE_EXPR:
4306 case GT_EXPR:
4307 case GE_EXPR:
4308 case EQ_EXPR:
4309 case NE_EXPR:
4310 preexpand_calls (exp);
4311 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4312 if (temp != 0)
4313 return temp;
4314 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4315 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4316 && original_target
4317 && GET_CODE (original_target) == REG
4318 && (GET_MODE (original_target)
4319 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4320 {
4321 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4322 if (temp != original_target)
4323 temp = copy_to_reg (temp);
4324 op1 = gen_label_rtx ();
906c4e36 4325 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4326 GET_MODE (temp), unsignedp, 0);
4327 emit_jump_insn (gen_beq (op1));
4328 emit_move_insn (temp, const1_rtx);
4329 emit_label (op1);
4330 return temp;
4331 }
4332 /* If no set-flag instruction, must generate a conditional
4333 store into a temporary variable. Drop through
4334 and handle this like && and ||. */
4335
4336 case TRUTH_ANDIF_EXPR:
4337 case TRUTH_ORIF_EXPR:
4338 if (target == 0 || ! safe_from_p (target, exp)
4339 /* Make sure we don't have a hard reg (such as function's return
4340 value) live across basic blocks, if not optimizing. */
4341 || (!optimize && GET_CODE (target) == REG
4342 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4343 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4344 emit_clr_insn (target);
4345 op1 = gen_label_rtx ();
4346 jumpifnot (exp, op1);
4347 emit_0_to_1_insn (target);
4348 emit_label (op1);
4349 return target;
4350
4351 case TRUTH_NOT_EXPR:
4352 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4353 /* The parser is careful to generate TRUTH_NOT_EXPR
4354 only with operands that are always zero or one. */
906c4e36 4355 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4356 target, 1, OPTAB_LIB_WIDEN);
4357 if (temp == 0)
4358 abort ();
4359 return temp;
4360
4361 case COMPOUND_EXPR:
4362 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4363 emit_queue ();
4364 return expand_expr (TREE_OPERAND (exp, 1),
4365 (ignore ? const0_rtx : target),
4366 VOIDmode, 0);
4367
4368 case COND_EXPR:
4369 {
4370 /* Note that COND_EXPRs whose type is a structure or union
4371 are required to be constructed to contain assignments of
4372 a temporary variable, so that we can evaluate them here
4373 for side effect only. If type is void, we must do likewise. */
4374
4375 /* If an arm of the branch requires a cleanup,
4376 only that cleanup is performed. */
4377
4378 tree singleton = 0;
4379 tree binary_op = 0, unary_op = 0;
4380 tree old_cleanups = cleanups_this_call;
4381 cleanups_this_call = 0;
4382
4383 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4384 convert it to our mode, if necessary. */
4385 if (integer_onep (TREE_OPERAND (exp, 1))
4386 && integer_zerop (TREE_OPERAND (exp, 2))
4387 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4388 {
4389 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4390 if (GET_MODE (op0) == mode)
4391 return op0;
4392 if (target == 0)
4393 target = gen_reg_rtx (mode);
4394 convert_move (target, op0, unsignedp);
4395 return target;
4396 }
4397
4398 /* If we are not to produce a result, we have no target. Otherwise,
4399 if a target was specified use it; it will not be used as an
4400 intermediate target unless it is safe. If no target, use a
4401 temporary. */
4402
4403 if (mode == VOIDmode || ignore)
4404 temp = 0;
4405 else if (original_target
4406 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4407 temp = original_target;
4408 else if (mode == BLKmode)
4409 {
4410 if (TYPE_SIZE (type) == 0
4411 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4412 abort ();
4413 temp = assign_stack_temp (BLKmode,
4414 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4415 + BITS_PER_UNIT - 1)
4416 / BITS_PER_UNIT, 0);
4417 }
4418 else
4419 temp = gen_reg_rtx (mode);
4420
4421 /* Check for X ? A + B : A. If we have this, we can copy
4422 A to the output and conditionally add B. Similarly for unary
4423 operations. Don't do this if X has side-effects because
4424 those side effects might affect A or B and the "?" operation is
4425 a sequence point in ANSI. (We test for side effects later.) */
4426
4427 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4428 && operand_equal_p (TREE_OPERAND (exp, 2),
4429 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4430 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4431 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4432 && operand_equal_p (TREE_OPERAND (exp, 1),
4433 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4434 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4435 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4436 && operand_equal_p (TREE_OPERAND (exp, 2),
4437 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4438 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4439 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4440 && operand_equal_p (TREE_OPERAND (exp, 1),
4441 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4442 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4443
4444 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4445 operation, do this as A + (X != 0). Similarly for other simple
4446 binary operators. */
4447 if (singleton && binary_op
4448 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4449 && (TREE_CODE (binary_op) == PLUS_EXPR
4450 || TREE_CODE (binary_op) == MINUS_EXPR
4451 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4452 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4453 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4454 && integer_onep (TREE_OPERAND (binary_op, 1))
4455 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4456 {
4457 rtx result;
4458 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4459 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4460 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4461 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4462 : and_optab);
4463
4464 /* If we had X ? A : A + 1, do this as A + (X == 0).
4465
4466 We have to invert the truth value here and then put it
4467 back later if do_store_flag fails. We cannot simply copy
4468 TREE_OPERAND (exp, 0) to another variable and modify that
4469 because invert_truthvalue can modify the tree pointed to
4470 by its argument. */
4471 if (singleton == TREE_OPERAND (exp, 1))
4472 TREE_OPERAND (exp, 0)
4473 = invert_truthvalue (TREE_OPERAND (exp, 0));
4474
4475 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4476 (safe_from_p (temp, singleton)
4477 ? temp : NULL_RTX),
bbf6f052
RK
4478 mode, BRANCH_COST <= 1);
4479
4480 if (result)
4481 {
906c4e36 4482 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4483 return expand_binop (mode, boptab, op1, result, temp,
4484 unsignedp, OPTAB_LIB_WIDEN);
4485 }
4486 else if (singleton == TREE_OPERAND (exp, 1))
4487 TREE_OPERAND (exp, 0)
4488 = invert_truthvalue (TREE_OPERAND (exp, 0));
4489 }
4490
4491 NO_DEFER_POP;
4492 op0 = gen_label_rtx ();
4493
4494 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4495 {
4496 if (temp != 0)
4497 {
4498 /* If the target conflicts with the other operand of the
4499 binary op, we can't use it. Also, we can't use the target
4500 if it is a hard register, because evaluating the condition
4501 might clobber it. */
4502 if ((binary_op
4503 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4504 || (GET_CODE (temp) == REG
4505 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4506 temp = gen_reg_rtx (mode);
4507 store_expr (singleton, temp, 0);
4508 }
4509 else
906c4e36
RK
4510 expand_expr (singleton,
4511 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4512 if (cleanups_this_call)
4513 {
4514 sorry ("aggregate value in COND_EXPR");
4515 cleanups_this_call = 0;
4516 }
4517 if (singleton == TREE_OPERAND (exp, 1))
4518 jumpif (TREE_OPERAND (exp, 0), op0);
4519 else
4520 jumpifnot (TREE_OPERAND (exp, 0), op0);
4521
4522 if (binary_op && temp == 0)
4523 /* Just touch the other operand. */
4524 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 4525 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4526 else if (binary_op)
4527 store_expr (build (TREE_CODE (binary_op), type,
4528 make_tree (type, temp),
4529 TREE_OPERAND (binary_op, 1)),
4530 temp, 0);
4531 else
4532 store_expr (build1 (TREE_CODE (unary_op), type,
4533 make_tree (type, temp)),
4534 temp, 0);
4535 op1 = op0;
4536 }
4537#if 0
4538 /* This is now done in jump.c and is better done there because it
4539 produces shorter register lifetimes. */
4540
4541 /* Check for both possibilities either constants or variables
4542 in registers (but not the same as the target!). If so, can
4543 save branches by assigning one, branching, and assigning the
4544 other. */
4545 else if (temp && GET_MODE (temp) != BLKmode
4546 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4547 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4548 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4549 && DECL_RTL (TREE_OPERAND (exp, 1))
4550 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4551 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4552 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4553 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4554 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4555 && DECL_RTL (TREE_OPERAND (exp, 2))
4556 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4557 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4558 {
4559 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4560 temp = gen_reg_rtx (mode);
4561 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4562 jumpifnot (TREE_OPERAND (exp, 0), op0);
4563 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4564 op1 = op0;
4565 }
4566#endif
4567 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4568 comparison operator. If we have one of these cases, set the
4569 output to A, branch on A (cse will merge these two references),
4570 then set the output to FOO. */
4571 else if (temp
4572 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4573 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4574 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4575 TREE_OPERAND (exp, 1), 0)
4576 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4577 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4578 {
4579 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4580 temp = gen_reg_rtx (mode);
4581 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4582 jumpif (TREE_OPERAND (exp, 0), op0);
4583 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4584 op1 = op0;
4585 }
4586 else if (temp
4587 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4588 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4589 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4590 TREE_OPERAND (exp, 2), 0)
4591 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4592 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4593 {
4594 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4595 temp = gen_reg_rtx (mode);
4596 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4597 jumpifnot (TREE_OPERAND (exp, 0), op0);
4598 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4599 op1 = op0;
4600 }
4601 else
4602 {
4603 op1 = gen_label_rtx ();
4604 jumpifnot (TREE_OPERAND (exp, 0), op0);
4605 if (temp != 0)
4606 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4607 else
906c4e36
RK
4608 expand_expr (TREE_OPERAND (exp, 1),
4609 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4610 if (cleanups_this_call)
4611 {
4612 sorry ("aggregate value in COND_EXPR");
4613 cleanups_this_call = 0;
4614 }
4615
4616 emit_queue ();
4617 emit_jump_insn (gen_jump (op1));
4618 emit_barrier ();
4619 emit_label (op0);
4620 if (temp != 0)
4621 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4622 else
906c4e36
RK
4623 expand_expr (TREE_OPERAND (exp, 2),
4624 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4625 }
4626
4627 if (cleanups_this_call)
4628 {
4629 sorry ("aggregate value in COND_EXPR");
4630 cleanups_this_call = 0;
4631 }
4632
4633 emit_queue ();
4634 emit_label (op1);
4635 OK_DEFER_POP;
4636 cleanups_this_call = old_cleanups;
4637 return temp;
4638 }
4639
4640 case TARGET_EXPR:
4641 {
4642 /* Something needs to be initialized, but we didn't know
4643 where that thing was when building the tree. For example,
4644 it could be the return value of a function, or a parameter
4645 to a function which lays down in the stack, or a temporary
4646 variable which must be passed by reference.
4647
4648 We guarantee that the expression will either be constructed
4649 or copied into our original target. */
4650
4651 tree slot = TREE_OPERAND (exp, 0);
5c062816 4652 tree exp1;
bbf6f052
RK
4653
4654 if (TREE_CODE (slot) != VAR_DECL)
4655 abort ();
4656
4657 if (target == 0)
4658 {
4659 if (DECL_RTL (slot) != 0)
ac993f4f
MS
4660 {
4661 target = DECL_RTL (slot);
5c062816 4662 /* If we have already expanded the slot, so don't do
ac993f4f 4663 it again. (mrs) */
5c062816
MS
4664 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4665 return target;
ac993f4f 4666 }
bbf6f052
RK
4667 else
4668 {
4669 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4670 /* All temp slots at this level must not conflict. */
4671 preserve_temp_slots (target);
4672 DECL_RTL (slot) = target;
4673 }
4674
4675#if 0
ac993f4f
MS
4676 /* I bet this needs to be done, and I bet that it needs to
4677 be above, inside the else clause. The reason is
4678 simple, how else is it going to get cleaned up? (mrs)
4679
4680 The reason is probably did not work before, and was
4681 commented out is because this was re-expanding already
4682 expanded target_exprs (target == 0 and DECL_RTL (slot)
4683 != 0) also cleaning them up many times as well. :-( */
4684
bbf6f052
RK
4685 /* Since SLOT is not known to the called function
4686 to belong to its stack frame, we must build an explicit
4687 cleanup. This case occurs when we must build up a reference
4688 to pass the reference as an argument. In this case,
4689 it is very likely that such a reference need not be
4690 built here. */
4691
4692 if (TREE_OPERAND (exp, 2) == 0)
4693 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4694 if (TREE_OPERAND (exp, 2))
906c4e36
RK
4695 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4696 cleanups_this_call);
bbf6f052
RK
4697#endif
4698 }
4699 else
4700 {
4701 /* This case does occur, when expanding a parameter which
4702 needs to be constructed on the stack. The target
4703 is the actual stack address that we want to initialize.
4704 The function we call will perform the cleanup in this case. */
4705
4706 DECL_RTL (slot) = target;
4707 }
4708
5c062816
MS
4709 exp1 = TREE_OPERAND (exp, 1);
4710 /* Mark it as expanded. */
4711 TREE_OPERAND (exp, 1) = NULL_TREE;
4712
4713 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
4714 }
4715
4716 case INIT_EXPR:
4717 {
4718 tree lhs = TREE_OPERAND (exp, 0);
4719 tree rhs = TREE_OPERAND (exp, 1);
4720 tree noncopied_parts = 0;
4721 tree lhs_type = TREE_TYPE (lhs);
4722
4723 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4724 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4725 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4726 TYPE_NONCOPIED_PARTS (lhs_type));
4727 while (noncopied_parts != 0)
4728 {
4729 expand_assignment (TREE_VALUE (noncopied_parts),
4730 TREE_PURPOSE (noncopied_parts), 0, 0);
4731 noncopied_parts = TREE_CHAIN (noncopied_parts);
4732 }
4733 return temp;
4734 }
4735
4736 case MODIFY_EXPR:
4737 {
4738 /* If lhs is complex, expand calls in rhs before computing it.
4739 That's so we don't compute a pointer and save it over a call.
4740 If lhs is simple, compute it first so we can give it as a
4741 target if the rhs is just a call. This avoids an extra temp and copy
4742 and that prevents a partial-subsumption which makes bad code.
4743 Actually we could treat component_ref's of vars like vars. */
4744
4745 tree lhs = TREE_OPERAND (exp, 0);
4746 tree rhs = TREE_OPERAND (exp, 1);
4747 tree noncopied_parts = 0;
4748 tree lhs_type = TREE_TYPE (lhs);
4749
4750 temp = 0;
4751
4752 if (TREE_CODE (lhs) != VAR_DECL
4753 && TREE_CODE (lhs) != RESULT_DECL
4754 && TREE_CODE (lhs) != PARM_DECL)
4755 preexpand_calls (exp);
4756
4757 /* Check for |= or &= of a bitfield of size one into another bitfield
4758 of size 1. In this case, (unless we need the result of the
4759 assignment) we can do this more efficiently with a
4760 test followed by an assignment, if necessary.
4761
4762 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4763 things change so we do, this code should be enhanced to
4764 support it. */
4765 if (ignore
4766 && TREE_CODE (lhs) == COMPONENT_REF
4767 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4768 || TREE_CODE (rhs) == BIT_AND_EXPR)
4769 && TREE_OPERAND (rhs, 0) == lhs
4770 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4771 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4772 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4773 {
4774 rtx label = gen_label_rtx ();
4775
4776 do_jump (TREE_OPERAND (rhs, 1),
4777 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4778 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4779 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4780 (TREE_CODE (rhs) == BIT_IOR_EXPR
4781 ? integer_one_node
4782 : integer_zero_node)),
4783 0, 0);
e7c33f54 4784 do_pending_stack_adjust ();
bbf6f052
RK
4785 emit_label (label);
4786 return const0_rtx;
4787 }
4788
4789 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4790 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4791 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4792 TYPE_NONCOPIED_PARTS (lhs_type));
4793
4794 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4795 while (noncopied_parts != 0)
4796 {
4797 expand_assignment (TREE_PURPOSE (noncopied_parts),
4798 TREE_VALUE (noncopied_parts), 0, 0);
4799 noncopied_parts = TREE_CHAIN (noncopied_parts);
4800 }
4801 return temp;
4802 }
4803
4804 case PREINCREMENT_EXPR:
4805 case PREDECREMENT_EXPR:
4806 return expand_increment (exp, 0);
4807
4808 case POSTINCREMENT_EXPR:
4809 case POSTDECREMENT_EXPR:
4810 /* Faster to treat as pre-increment if result is not used. */
4811 return expand_increment (exp, ! ignore);
4812
4813 case ADDR_EXPR:
4814 /* Are we taking the address of a nested function? */
4815 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4816 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4817 {
4818 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4819 op0 = force_operand (op0, target);
4820 }
4821 else
4822 {
906c4e36 4823 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
4824 (modifier == EXPAND_INITIALIZER
4825 ? modifier : EXPAND_CONST_ADDRESS));
4826 if (GET_CODE (op0) != MEM)
4827 abort ();
4828
4829 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4830 return XEXP (op0, 0);
4831 op0 = force_operand (XEXP (op0, 0), target);
4832 }
4833 if (flag_force_addr && GET_CODE (op0) != REG)
4834 return force_reg (Pmode, op0);
4835 return op0;
4836
4837 case ENTRY_VALUE_EXPR:
4838 abort ();
4839
4840 case ERROR_MARK:
4841 return const0_rtx;
4842
4843 default:
4844 return (*lang_expand_expr) (exp, target, tmode, modifier);
4845 }
4846
4847 /* Here to do an ordinary binary operator, generating an instruction
4848 from the optab already placed in `this_optab'. */
4849 binop:
4850 preexpand_calls (exp);
4851 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4852 subtarget = 0;
4853 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4854 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4855 binop2:
4856 temp = expand_binop (mode, this_optab, op0, op1, target,
4857 unsignedp, OPTAB_LIB_WIDEN);
4858 if (temp == 0)
4859 abort ();
4860 return temp;
4861}
4862\f
e87b4f3f
RS
4863/* Return the alignment in bits of EXP, a pointer valued expression.
4864 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
4865 The alignment returned is, by default, the alignment of the thing that
4866 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4867
4868 Otherwise, look at the expression to see if we can do better, i.e., if the
4869 expression is actually pointing at an object whose alignment is tighter. */
4870
4871static int
4872get_pointer_alignment (exp, max_align)
4873 tree exp;
4874 unsigned max_align;
4875{
4876 unsigned align, inner;
4877
4878 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4879 return 0;
4880
4881 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4882 align = MIN (align, max_align);
4883
4884 while (1)
4885 {
4886 switch (TREE_CODE (exp))
4887 {
4888 case NOP_EXPR:
4889 case CONVERT_EXPR:
4890 case NON_LVALUE_EXPR:
4891 exp = TREE_OPERAND (exp, 0);
4892 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4893 return align;
4894 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4895 inner = MIN (inner, max_align);
4896 align = MAX (align, inner);
4897 break;
4898
4899 case PLUS_EXPR:
4900 /* If sum of pointer + int, restrict our maximum alignment to that
4901 imposed by the integer. If not, we can't do any better than
4902 ALIGN. */
4903 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4904 return align;
4905
e87b4f3f
RS
4906 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4907 & (max_align - 1))
4908 != 0)
bbf6f052
RK
4909 max_align >>= 1;
4910
4911 exp = TREE_OPERAND (exp, 0);
4912 break;
4913
4914 case ADDR_EXPR:
4915 /* See what we are pointing at and look at its alignment. */
4916 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
4917 if (TREE_CODE (exp) == FUNCTION_DECL)
4918 align = MAX (align, FUNCTION_BOUNDARY);
4919 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
4920 align = MAX (align, DECL_ALIGN (exp));
4921#ifdef CONSTANT_ALIGNMENT
4922 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4923 align = CONSTANT_ALIGNMENT (exp, align);
4924#endif
4925 return MIN (align, max_align);
4926
4927 default:
4928 return align;
4929 }
4930 }
4931}
4932\f
4933/* Return the tree node and offset if a given argument corresponds to
4934 a string constant. */
4935
4936static tree
4937string_constant (arg, ptr_offset)
4938 tree arg;
4939 tree *ptr_offset;
4940{
4941 STRIP_NOPS (arg);
4942
4943 if (TREE_CODE (arg) == ADDR_EXPR
4944 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4945 {
4946 *ptr_offset = integer_zero_node;
4947 return TREE_OPERAND (arg, 0);
4948 }
4949 else if (TREE_CODE (arg) == PLUS_EXPR)
4950 {
4951 tree arg0 = TREE_OPERAND (arg, 0);
4952 tree arg1 = TREE_OPERAND (arg, 1);
4953
4954 STRIP_NOPS (arg0);
4955 STRIP_NOPS (arg1);
4956
4957 if (TREE_CODE (arg0) == ADDR_EXPR
4958 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4959 {
4960 *ptr_offset = arg1;
4961 return TREE_OPERAND (arg0, 0);
4962 }
4963 else if (TREE_CODE (arg1) == ADDR_EXPR
4964 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4965 {
4966 *ptr_offset = arg0;
4967 return TREE_OPERAND (arg1, 0);
4968 }
4969 }
4970
4971 return 0;
4972}
4973
4974/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4975 way, because it could contain a zero byte in the middle.
4976 TREE_STRING_LENGTH is the size of the character array, not the string.
4977
4978 Unfortunately, string_constant can't access the values of const char
4979 arrays with initializers, so neither can we do so here. */
4980
4981static tree
4982c_strlen (src)
4983 tree src;
4984{
4985 tree offset_node;
4986 int offset, max;
4987 char *ptr;
4988
4989 src = string_constant (src, &offset_node);
4990 if (src == 0)
4991 return 0;
4992 max = TREE_STRING_LENGTH (src);
4993 ptr = TREE_STRING_POINTER (src);
4994 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4995 {
4996 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4997 compute the offset to the following null if we don't know where to
4998 start searching for it. */
4999 int i;
5000 for (i = 0; i < max; i++)
5001 if (ptr[i] == 0)
5002 return 0;
5003 /* We don't know the starting offset, but we do know that the string
5004 has no internal zero bytes. We can assume that the offset falls
5005 within the bounds of the string; otherwise, the programmer deserves
5006 what he gets. Subtract the offset from the length of the string,
5007 and return that. */
5008 /* This would perhaps not be valid if we were dealing with named
5009 arrays in addition to literal string constants. */
5010 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5011 }
5012
5013 /* We have a known offset into the string. Start searching there for
5014 a null character. */
5015 if (offset_node == 0)
5016 offset = 0;
5017 else
5018 {
5019 /* Did we get a long long offset? If so, punt. */
5020 if (TREE_INT_CST_HIGH (offset_node) != 0)
5021 return 0;
5022 offset = TREE_INT_CST_LOW (offset_node);
5023 }
5024 /* If the offset is known to be out of bounds, warn, and call strlen at
5025 runtime. */
5026 if (offset < 0 || offset > max)
5027 {
5028 warning ("offset outside bounds of constant string");
5029 return 0;
5030 }
5031 /* Use strlen to search for the first zero byte. Since any strings
5032 constructed with build_string will have nulls appended, we win even
5033 if we get handed something like (char[4])"abcd".
5034
5035 Since OFFSET is our starting index into the string, no further
5036 calculation is needed. */
5037 return size_int (strlen (ptr + offset));
5038}
5039\f
5040/* Expand an expression EXP that calls a built-in function,
5041 with result going to TARGET if that's convenient
5042 (and in mode MODE if that's convenient).
5043 SUBTARGET may be used as the target for computing one of EXP's operands.
5044 IGNORE is nonzero if the value is to be ignored. */
5045
5046static rtx
5047expand_builtin (exp, target, subtarget, mode, ignore)
5048 tree exp;
5049 rtx target;
5050 rtx subtarget;
5051 enum machine_mode mode;
5052 int ignore;
5053{
5054 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5055 tree arglist = TREE_OPERAND (exp, 1);
5056 rtx op0;
60bac6ea 5057 rtx lab1, insns;
bbf6f052
RK
5058 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5059
5060 switch (DECL_FUNCTION_CODE (fndecl))
5061 {
5062 case BUILT_IN_ABS:
5063 case BUILT_IN_LABS:
5064 case BUILT_IN_FABS:
5065 /* build_function_call changes these into ABS_EXPR. */
5066 abort ();
5067
e87b4f3f
RS
5068 case BUILT_IN_FSQRT:
5069 /* If not optimizing, call the library function. */
8c8a8e34 5070 if (! optimize)
e87b4f3f
RS
5071 break;
5072
5073 if (arglist == 0
19deaec9 5074 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5075 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5076 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5077
db0e6d01
RS
5078 /* Stabilize and compute the argument. */
5079 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5080 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5081 {
5082 exp = copy_node (exp);
5083 arglist = copy_node (arglist);
5084 TREE_OPERAND (exp, 1) = arglist;
5085 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5086 }
e87b4f3f 5087 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5088
5089 /* Make a suitable register to place result in. */
5090 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5091
c1f7c223 5092 emit_queue ();
8c8a8e34 5093 start_sequence ();
e7c33f54 5094
60bac6ea 5095 /* Compute sqrt into TARGET.
e87b4f3f
RS
5096 Set TARGET to wherever the result comes back. */
5097 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8c8a8e34 5098 sqrt_optab, op0, target, 0);
e7c33f54
RK
5099
5100 /* If we were unable to expand via the builtin, stop the
5101 sequence (without outputting the insns) and break, causing
5102 a call the the library function. */
e87b4f3f 5103 if (target == 0)
e7c33f54 5104 {
8c8a8e34 5105 end_sequence ();
e7c33f54
RK
5106 break;
5107 }
e87b4f3f 5108
60bac6ea
RS
5109 /* Check the results by default. But if flag_fast_math is turned on,
5110 then assume sqrt will always be called with valid arguments. */
5111
5112 if (! flag_fast_math)
5113 {
5114 /* Don't define the sqrt instructions
5115 if your machine is not IEEE. */
5116 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5117 abort ();
5118
5119 lab1 = gen_label_rtx ();
5120
5121 /* Test the result; if it is NaN, set errno=EDOM because
5122 the argument was not in the domain. */
5123 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5124 emit_jump_insn (gen_beq (lab1));
5125
5126#if TARGET_EDOM
5127 {
5128#ifdef GEN_ERRNO_RTX
5129 rtx errno_rtx = GEN_ERRNO_RTX;
5130#else
5131 rtx errno_rtx
5132 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5133#endif
5134
5135 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5136 }
5137#else
5138 /* We can't set errno=EDOM directly; let the library call do it.
5139 Pop the arguments right away in case the call gets deleted. */
5140 NO_DEFER_POP;
5141 expand_call (exp, target, 0);
5142 OK_DEFER_POP;
5143#endif
5144
5145 emit_label (lab1);
5146 }
e87b4f3f 5147
e7c33f54 5148 /* Output the entire sequence. */
8c8a8e34
JW
5149 insns = get_insns ();
5150 end_sequence ();
5151 emit_insns (insns);
e7c33f54
RK
5152
5153 return target;
5154
bbf6f052
RK
5155 case BUILT_IN_SAVEREGS:
5156 /* Don't do __builtin_saveregs more than once in a function.
5157 Save the result of the first call and reuse it. */
5158 if (saveregs_value != 0)
5159 return saveregs_value;
5160 {
5161 /* When this function is called, it means that registers must be
5162 saved on entry to this function. So we migrate the
5163 call to the first insn of this function. */
5164 rtx temp;
5165 rtx seq;
5166 rtx valreg, saved_valreg;
5167
5168 /* Now really call the function. `expand_call' does not call
5169 expand_builtin, so there is no danger of infinite recursion here. */
5170 start_sequence ();
5171
5172#ifdef EXPAND_BUILTIN_SAVEREGS
5173 /* Do whatever the machine needs done in this case. */
5174 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5175#else
5176 /* The register where the function returns its value
5177 is likely to have something else in it, such as an argument.
5178 So preserve that register around the call. */
5179 if (value_mode != VOIDmode)
5180 {
5181 valreg = hard_libcall_value (value_mode);
5182 saved_valreg = gen_reg_rtx (value_mode);
5183 emit_move_insn (saved_valreg, valreg);
5184 }
5185
5186 /* Generate the call, putting the value in a pseudo. */
5187 temp = expand_call (exp, target, ignore);
5188
5189 if (value_mode != VOIDmode)
5190 emit_move_insn (valreg, saved_valreg);
5191#endif
5192
5193 seq = get_insns ();
5194 end_sequence ();
5195
5196 saveregs_value = temp;
5197
5198 /* This won't work inside a SEQUENCE--it really has to be
5199 at the start of the function. */
5200 if (in_sequence_p ())
5201 {
5202 /* Better to do this than to crash. */
5203 error ("`va_start' used within `({...})'");
5204 return temp;
5205 }
5206
5207 /* Put the sequence after the NOTE that starts the function. */
5208 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5209 return temp;
5210 }
5211
5212 /* __builtin_args_info (N) returns word N of the arg space info
5213 for the current function. The number and meanings of words
5214 is controlled by the definition of CUMULATIVE_ARGS. */
5215 case BUILT_IN_ARGS_INFO:
5216 {
5217 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5218 int i;
5219 int *word_ptr = (int *) &current_function_args_info;
5220 tree type, elts, result;
5221
5222 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5223 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5224 __FILE__, __LINE__);
5225
5226 if (arglist != 0)
5227 {
5228 tree arg = TREE_VALUE (arglist);
5229 if (TREE_CODE (arg) != INTEGER_CST)
5230 error ("argument of __builtin_args_info must be constant");
5231 else
5232 {
5233 int wordnum = TREE_INT_CST_LOW (arg);
5234
5235 if (wordnum < 0 || wordnum >= nwords)
5236 error ("argument of __builtin_args_info out of range");
5237 else
906c4e36 5238 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5239 }
5240 }
5241 else
5242 error ("missing argument in __builtin_args_info");
5243
5244 return const0_rtx;
5245
5246#if 0
5247 for (i = 0; i < nwords; i++)
5248 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5249
5250 type = build_array_type (integer_type_node,
5251 build_index_type (build_int_2 (nwords, 0)));
5252 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5253 TREE_CONSTANT (result) = 1;
5254 TREE_STATIC (result) = 1;
5255 result = build (INDIRECT_REF, build_pointer_type (type), result);
5256 TREE_CONSTANT (result) = 1;
906c4e36 5257 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5258#endif
5259 }
5260
5261 /* Return the address of the first anonymous stack arg. */
5262 case BUILT_IN_NEXT_ARG:
5263 {
5264 tree fntype = TREE_TYPE (current_function_decl);
5265 if (!(TYPE_ARG_TYPES (fntype) != 0
5266 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5267 != void_type_node)))
5268 {
5269 error ("`va_start' used in function with fixed args");
5270 return const0_rtx;
5271 }
5272 }
5273
5274 return expand_binop (Pmode, add_optab,
5275 current_function_internal_arg_pointer,
5276 current_function_arg_offset_rtx,
906c4e36 5277 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5278
5279 case BUILT_IN_CLASSIFY_TYPE:
5280 if (arglist != 0)
5281 {
5282 tree type = TREE_TYPE (TREE_VALUE (arglist));
5283 enum tree_code code = TREE_CODE (type);
5284 if (code == VOID_TYPE)
906c4e36 5285 return GEN_INT (void_type_class);
bbf6f052 5286 if (code == INTEGER_TYPE)
906c4e36 5287 return GEN_INT (integer_type_class);
bbf6f052 5288 if (code == CHAR_TYPE)
906c4e36 5289 return GEN_INT (char_type_class);
bbf6f052 5290 if (code == ENUMERAL_TYPE)
906c4e36 5291 return GEN_INT (enumeral_type_class);
bbf6f052 5292 if (code == BOOLEAN_TYPE)
906c4e36 5293 return GEN_INT (boolean_type_class);
bbf6f052 5294 if (code == POINTER_TYPE)
906c4e36 5295 return GEN_INT (pointer_type_class);
bbf6f052 5296 if (code == REFERENCE_TYPE)
906c4e36 5297 return GEN_INT (reference_type_class);
bbf6f052 5298 if (code == OFFSET_TYPE)
906c4e36 5299 return GEN_INT (offset_type_class);
bbf6f052 5300 if (code == REAL_TYPE)
906c4e36 5301 return GEN_INT (real_type_class);
bbf6f052 5302 if (code == COMPLEX_TYPE)
906c4e36 5303 return GEN_INT (complex_type_class);
bbf6f052 5304 if (code == FUNCTION_TYPE)
906c4e36 5305 return GEN_INT (function_type_class);
bbf6f052 5306 if (code == METHOD_TYPE)
906c4e36 5307 return GEN_INT (method_type_class);
bbf6f052 5308 if (code == RECORD_TYPE)
906c4e36 5309 return GEN_INT (record_type_class);
bbf6f052 5310 if (code == UNION_TYPE)
906c4e36 5311 return GEN_INT (union_type_class);
bbf6f052 5312 if (code == ARRAY_TYPE)
906c4e36 5313 return GEN_INT (array_type_class);
bbf6f052 5314 if (code == STRING_TYPE)
906c4e36 5315 return GEN_INT (string_type_class);
bbf6f052 5316 if (code == SET_TYPE)
906c4e36 5317 return GEN_INT (set_type_class);
bbf6f052 5318 if (code == FILE_TYPE)
906c4e36 5319 return GEN_INT (file_type_class);
bbf6f052 5320 if (code == LANG_TYPE)
906c4e36 5321 return GEN_INT (lang_type_class);
bbf6f052 5322 }
906c4e36 5323 return GEN_INT (no_type_class);
bbf6f052
RK
5324
5325 case BUILT_IN_CONSTANT_P:
5326 if (arglist == 0)
5327 return const0_rtx;
5328 else
cda0ec81 5329 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5330 ? const1_rtx : const0_rtx);
5331
5332 case BUILT_IN_FRAME_ADDRESS:
5333 /* The argument must be a nonnegative integer constant.
5334 It counts the number of frames to scan up the stack.
5335 The value is the address of that frame. */
5336 case BUILT_IN_RETURN_ADDRESS:
5337 /* The argument must be a nonnegative integer constant.
5338 It counts the number of frames to scan up the stack.
5339 The value is the return address saved in that frame. */
5340 if (arglist == 0)
5341 /* Warning about missing arg was already issued. */
5342 return const0_rtx;
5343 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5344 {
5345 error ("invalid arg to __builtin_return_address");
5346 return const0_rtx;
5347 }
5348 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5349 {
5350 error ("invalid arg to __builtin_return_address");
5351 return const0_rtx;
5352 }
5353 else
5354 {
5355 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5356 rtx tem = frame_pointer_rtx;
5357 int i;
5358
5359 /* Scan back COUNT frames to the specified frame. */
5360 for (i = 0; i < count; i++)
5361 {
5362 /* Assume the dynamic chain pointer is in the word that
5363 the frame address points to, unless otherwise specified. */
5364#ifdef DYNAMIC_CHAIN_ADDRESS
5365 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5366#endif
5367 tem = memory_address (Pmode, tem);
5368 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5369 }
5370
5371 /* For __builtin_frame_address, return what we've got. */
5372 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5373 return tem;
5374
5375 /* For __builtin_return_address,
5376 Get the return address from that frame. */
5377#ifdef RETURN_ADDR_RTX
5378 return RETURN_ADDR_RTX (count, tem);
5379#else
5380 tem = memory_address (Pmode,
5381 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5382 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5383#endif
5384 }
5385
5386 case BUILT_IN_ALLOCA:
5387 if (arglist == 0
5388 /* Arg could be non-integer if user redeclared this fcn wrong. */
5389 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5390 return const0_rtx;
5391 current_function_calls_alloca = 1;
5392 /* Compute the argument. */
906c4e36 5393 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5394
5395 /* Allocate the desired space. */
8c8a8e34 5396 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5397
5398 /* Record the new stack level for nonlocal gotos. */
6dc42e49 5399 if (nonlocal_goto_handler_slot != 0)
906c4e36 5400 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
5401 return target;
5402
5403 case BUILT_IN_FFS:
5404 /* If not optimizing, call the library function. */
5405 if (!optimize)
5406 break;
5407
5408 if (arglist == 0
5409 /* Arg could be non-integer if user redeclared this fcn wrong. */
5410 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5411 return const0_rtx;
5412
5413 /* Compute the argument. */
5414 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5415 /* Compute ffs, into TARGET if possible.
5416 Set TARGET to wherever the result comes back. */
5417 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5418 ffs_optab, op0, target, 1);
5419 if (target == 0)
5420 abort ();
5421 return target;
5422
5423 case BUILT_IN_STRLEN:
5424 /* If not optimizing, call the library function. */
5425 if (!optimize)
5426 break;
5427
5428 if (arglist == 0
5429 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5430 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5431 return const0_rtx;
5432 else
5433 {
e7c33f54
RK
5434 tree src = TREE_VALUE (arglist);
5435 tree len = c_strlen (src);
bbf6f052 5436
e7c33f54
RK
5437 int align
5438 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5439
5440 rtx result, src_rtx, char_rtx;
5441 enum machine_mode insn_mode = value_mode, char_mode;
5442 enum insn_code icode;
5443
5444 /* If the length is known, just return it. */
5445 if (len != 0)
5446 return expand_expr (len, target, mode, 0);
5447
5448 /* If SRC is not a pointer type, don't do this operation inline. */
5449 if (align == 0)
5450 break;
5451
5452 /* Call a function if we can't compute strlen in the right mode. */
5453
5454 while (insn_mode != VOIDmode)
5455 {
5456 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5457 if (icode != CODE_FOR_nothing)
5458 break;
5459
5460 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5461 }
5462 if (insn_mode == VOIDmode)
bbf6f052 5463 break;
e7c33f54
RK
5464
5465 /* Make a place to write the result of the instruction. */
5466 result = target;
5467 if (! (result != 0
5468 && GET_CODE (result) == REG
5469 && GET_MODE (result) == insn_mode
5470 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5471 result = gen_reg_rtx (insn_mode);
5472
4d613828 5473 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 5474
4d613828 5475 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
5476 result = gen_reg_rtx (insn_mode);
5477
5478 src_rtx = memory_address (BLKmode,
906c4e36 5479 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 5480 EXPAND_NORMAL));
4d613828 5481 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
5482 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5483
5484 char_rtx = const0_rtx;
4d613828
RS
5485 char_mode = insn_operand_mode[(int)icode][2];
5486 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
5487 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5488
5489 emit_insn (GEN_FCN (icode) (result,
5490 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 5491 char_rtx, GEN_INT (align)));
e7c33f54
RK
5492
5493 /* Return the value in the proper mode for this function. */
5494 if (GET_MODE (result) == value_mode)
5495 return result;
5496 else if (target != 0)
5497 {
5498 convert_move (target, result, 0);
5499 return target;
5500 }
5501 else
5502 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
5503 }
5504
5505 case BUILT_IN_STRCPY:
5506 /* If not optimizing, call the library function. */
5507 if (!optimize)
5508 break;
5509
5510 if (arglist == 0
5511 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5512 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5513 || TREE_CHAIN (arglist) == 0
5514 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5515 return const0_rtx;
5516 else
5517 {
5518 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5519
5520 if (len == 0)
5521 break;
5522
5523 len = size_binop (PLUS_EXPR, len, integer_one_node);
5524
906c4e36 5525 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
5526 }
5527
5528 /* Drops in. */
5529 case BUILT_IN_MEMCPY:
5530 /* If not optimizing, call the library function. */
5531 if (!optimize)
5532 break;
5533
5534 if (arglist == 0
5535 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5536 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5537 || TREE_CHAIN (arglist) == 0
5538 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5539 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5540 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5541 return const0_rtx;
5542 else
5543 {
5544 tree dest = TREE_VALUE (arglist);
5545 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5546 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5547
5548 int src_align
5549 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5550 int dest_align
5551 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5552 rtx dest_rtx;
5553
5554 /* If either SRC or DEST is not a pointer type, don't do
5555 this operation in-line. */
5556 if (src_align == 0 || dest_align == 0)
5557 {
5558 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5559 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5560 break;
5561 }
5562
906c4e36 5563 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
bbf6f052
RK
5564
5565 /* Copy word part most expediently. */
5566 emit_block_move (gen_rtx (MEM, BLKmode,
5567 memory_address (BLKmode, dest_rtx)),
5568 gen_rtx (MEM, BLKmode,
5569 memory_address (BLKmode,
906c4e36
RK
5570 expand_expr (src, NULL_RTX,
5571 Pmode,
bbf6f052 5572 EXPAND_NORMAL))),
906c4e36 5573 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
5574 MIN (src_align, dest_align));
5575 return dest_rtx;
5576 }
5577
5578/* These comparison functions need an instruction that returns an actual
5579 index. An ordinary compare that just sets the condition codes
5580 is not enough. */
5581#ifdef HAVE_cmpstrsi
5582 case BUILT_IN_STRCMP:
5583 /* If not optimizing, call the library function. */
5584 if (!optimize)
5585 break;
5586
5587 if (arglist == 0
5588 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5589 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5590 || TREE_CHAIN (arglist) == 0
5591 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5592 return const0_rtx;
5593 else if (!HAVE_cmpstrsi)
5594 break;
5595 {
5596 tree arg1 = TREE_VALUE (arglist);
5597 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5598 tree offset;
5599 tree len, len2;
5600
5601 len = c_strlen (arg1);
5602 if (len)
5603 len = size_binop (PLUS_EXPR, integer_one_node, len);
5604 len2 = c_strlen (arg2);
5605 if (len2)
5606 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5607
5608 /* If we don't have a constant length for the first, use the length
5609 of the second, if we know it. We don't require a constant for
5610 this case; some cost analysis could be done if both are available
5611 but neither is constant. For now, assume they're equally cheap.
5612
5613 If both strings have constant lengths, use the smaller. This
5614 could arise if optimization results in strcpy being called with
5615 two fixed strings, or if the code was machine-generated. We should
5616 add some code to the `memcmp' handler below to deal with such
5617 situations, someday. */
5618 if (!len || TREE_CODE (len) != INTEGER_CST)
5619 {
5620 if (len2)
5621 len = len2;
5622 else if (len == 0)
5623 break;
5624 }
5625 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5626 {
5627 if (tree_int_cst_lt (len2, len))
5628 len = len2;
5629 }
5630
906c4e36 5631 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
5632 }
5633
5634 /* Drops in. */
5635 case BUILT_IN_MEMCMP:
5636 /* If not optimizing, call the library function. */
5637 if (!optimize)
5638 break;
5639
5640 if (arglist == 0
5641 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5642 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5643 || TREE_CHAIN (arglist) == 0
5644 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5645 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5646 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5647 return const0_rtx;
5648 else if (!HAVE_cmpstrsi)
5649 break;
5650 {
5651 tree arg1 = TREE_VALUE (arglist);
5652 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5653 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5654 rtx result;
5655
5656 int arg1_align
5657 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5658 int arg2_align
5659 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5660 enum machine_mode insn_mode
5661 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5662
5663 /* If we don't have POINTER_TYPE, call the function. */
5664 if (arg1_align == 0 || arg2_align == 0)
5665 {
5666 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5667 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5668 break;
5669 }
5670
5671 /* Make a place to write the result of the instruction. */
5672 result = target;
5673 if (! (result != 0
5674 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5675 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5676 result = gen_reg_rtx (insn_mode);
5677
5678 emit_insn (gen_cmpstrsi (result,
5679 gen_rtx (MEM, BLKmode,
906c4e36
RK
5680 expand_expr (arg1, NULL_RTX, Pmode,
5681 EXPAND_NORMAL)),
bbf6f052 5682 gen_rtx (MEM, BLKmode,
906c4e36
RK
5683 expand_expr (arg2, NULL_RTX, Pmode,
5684 EXPAND_NORMAL)),
5685 expand_expr (len, NULL_RTX, VOIDmode, 0),
5686 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
5687
5688 /* Return the value in the proper mode for this function. */
5689 mode = TYPE_MODE (TREE_TYPE (exp));
5690 if (GET_MODE (result) == mode)
5691 return result;
5692 else if (target != 0)
5693 {
5694 convert_move (target, result, 0);
5695 return target;
5696 }
5697 else
5698 return convert_to_mode (mode, result, 0);
5699 }
5700#else
5701 case BUILT_IN_STRCMP:
5702 case BUILT_IN_MEMCMP:
5703 break;
5704#endif
5705
5706 default: /* just do library call, if unknown builtin */
5707 error ("built-in function %s not currently supported",
5708 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5709 }
5710
5711 /* The switch statement above can drop through to cause the function
5712 to be called normally. */
5713
5714 return expand_call (exp, target, ignore);
5715}
5716\f
5717/* Expand code for a post- or pre- increment or decrement
5718 and return the RTX for the result.
5719 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5720
5721static rtx
5722expand_increment (exp, post)
5723 register tree exp;
5724 int post;
5725{
5726 register rtx op0, op1;
5727 register rtx temp, value;
5728 register tree incremented = TREE_OPERAND (exp, 0);
5729 optab this_optab = add_optab;
5730 int icode;
5731 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5732 int op0_is_copy = 0;
5733
5734 /* Stabilize any component ref that might need to be
5735 evaluated more than once below. */
5736 if (TREE_CODE (incremented) == BIT_FIELD_REF
5737 || (TREE_CODE (incremented) == COMPONENT_REF
5738 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5739 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5740 incremented = stabilize_reference (incremented);
5741
5742 /* Compute the operands as RTX.
5743 Note whether OP0 is the actual lvalue or a copy of it:
94a58076
RS
5744 I believe it is a copy iff it is a register or subreg
5745 and insns were generated in computing it. */
bbf6f052 5746 temp = get_last_insn ();
906c4e36 5747 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
94a58076
RS
5748 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
5749 && temp != get_last_insn ());
906c4e36 5750 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5751
5752 /* Decide whether incrementing or decrementing. */
5753 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5754 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5755 this_optab = sub_optab;
5756
5757 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5758 then we cannot just increment OP0. We must
5759 therefore contrive to increment the original value.
5760 Then we can return OP0 since it is a copy of the old value. */
5761 if (op0_is_copy)
5762 {
5763 /* This is the easiest way to increment the value wherever it is.
5764 Problems with multiple evaluation of INCREMENTED
5765 are prevented because either (1) it is a component_ref,
5766 in which case it was stabilized above, or (2) it is an array_ref
5767 with constant index in an array in a register, which is
5768 safe to reevaluate. */
5769 tree newexp = build ((this_optab == add_optab
5770 ? PLUS_EXPR : MINUS_EXPR),
5771 TREE_TYPE (exp),
5772 incremented,
5773 TREE_OPERAND (exp, 1));
5774 temp = expand_assignment (incremented, newexp, ! post, 0);
5775 return post ? op0 : temp;
5776 }
5777
5778 /* Convert decrement by a constant into a negative increment. */
5779 if (this_optab == sub_optab
5780 && GET_CODE (op1) == CONST_INT)
5781 {
906c4e36 5782 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
5783 this_optab = add_optab;
5784 }
5785
5786 if (post)
5787 {
5788 /* We have a true reference to the value in OP0.
5789 If there is an insn to add or subtract in this mode, queue it. */
5790
5791#if 0 /* Turned off to avoid making extra insn for indexed memref. */
5792 op0 = stabilize (op0);
5793#endif
5794
5795 icode = (int) this_optab->handlers[(int) mode].insn_code;
5796 if (icode != (int) CODE_FOR_nothing
5797 /* Make sure that OP0 is valid for operands 0 and 1
5798 of the insn we want to queue. */
5799 && (*insn_operand_predicate[icode][0]) (op0, mode)
5800 && (*insn_operand_predicate[icode][1]) (op0, mode))
5801 {
5802 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5803 op1 = force_reg (mode, op1);
5804
5805 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5806 }
5807 }
5808
5809 /* Preincrement, or we can't increment with one simple insn. */
5810 if (post)
5811 /* Save a copy of the value before inc or dec, to return it later. */
5812 temp = value = copy_to_reg (op0);
5813 else
5814 /* Arrange to return the incremented value. */
5815 /* Copy the rtx because expand_binop will protect from the queue,
5816 and the results of that would be invalid for us to return
5817 if our caller does emit_queue before using our result. */
5818 temp = copy_rtx (value = op0);
5819
5820 /* Increment however we can. */
5821 op1 = expand_binop (mode, this_optab, value, op1, op0,
5822 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5823 /* Make sure the value is stored into OP0. */
5824 if (op1 != op0)
5825 emit_move_insn (op0, op1);
5826
5827 return temp;
5828}
5829\f
5830/* Expand all function calls contained within EXP, innermost ones first.
5831 But don't look within expressions that have sequence points.
5832 For each CALL_EXPR, record the rtx for its value
5833 in the CALL_EXPR_RTL field. */
5834
5835static void
5836preexpand_calls (exp)
5837 tree exp;
5838{
5839 register int nops, i;
5840 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5841
5842 if (! do_preexpand_calls)
5843 return;
5844
5845 /* Only expressions and references can contain calls. */
5846
5847 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5848 return;
5849
5850 switch (TREE_CODE (exp))
5851 {
5852 case CALL_EXPR:
5853 /* Do nothing if already expanded. */
5854 if (CALL_EXPR_RTL (exp) != 0)
5855 return;
5856
5857 /* Do nothing to built-in functions. */
5858 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5859 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5860 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 5861 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
5862 return;
5863
5864 case COMPOUND_EXPR:
5865 case COND_EXPR:
5866 case TRUTH_ANDIF_EXPR:
5867 case TRUTH_ORIF_EXPR:
5868 /* If we find one of these, then we can be sure
5869 the adjust will be done for it (since it makes jumps).
5870 Do it now, so that if this is inside an argument
5871 of a function, we don't get the stack adjustment
5872 after some other args have already been pushed. */
5873 do_pending_stack_adjust ();
5874 return;
5875
5876 case BLOCK:
5877 case RTL_EXPR:
5878 case WITH_CLEANUP_EXPR:
5879 return;
5880
5881 case SAVE_EXPR:
5882 if (SAVE_EXPR_RTL (exp) != 0)
5883 return;
5884 }
5885
5886 nops = tree_code_length[(int) TREE_CODE (exp)];
5887 for (i = 0; i < nops; i++)
5888 if (TREE_OPERAND (exp, i) != 0)
5889 {
5890 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5891 if (type == 'e' || type == '<' || type == '1' || type == '2'
5892 || type == 'r')
5893 preexpand_calls (TREE_OPERAND (exp, i));
5894 }
5895}
5896\f
5897/* At the start of a function, record that we have no previously-pushed
5898 arguments waiting to be popped. */
5899
5900void
5901init_pending_stack_adjust ()
5902{
5903 pending_stack_adjust = 0;
5904}
5905
5906/* When exiting from function, if safe, clear out any pending stack adjust
5907 so the adjustment won't get done. */
5908
5909void
5910clear_pending_stack_adjust ()
5911{
5912#ifdef EXIT_IGNORE_STACK
5913 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 5914 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
5915 && ! flag_inline_functions)
5916 pending_stack_adjust = 0;
5917#endif
5918}
5919
5920/* Pop any previously-pushed arguments that have not been popped yet. */
5921
5922void
5923do_pending_stack_adjust ()
5924{
5925 if (inhibit_defer_pop == 0)
5926 {
5927 if (pending_stack_adjust != 0)
906c4e36 5928 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
5929 pending_stack_adjust = 0;
5930 }
5931}
5932
5933/* Expand all cleanups up to OLD_CLEANUPS.
5934 Needed here, and also for language-dependent calls. */
5935
5936void
5937expand_cleanups_to (old_cleanups)
5938 tree old_cleanups;
5939{
5940 while (cleanups_this_call != old_cleanups)
5941 {
906c4e36 5942 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5943 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5944 }
5945}
5946\f
5947/* Expand conditional expressions. */
5948
5949/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5950 LABEL is an rtx of code CODE_LABEL, in this function and all the
5951 functions here. */
5952
5953void
5954jumpifnot (exp, label)
5955 tree exp;
5956 rtx label;
5957{
906c4e36 5958 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
5959}
5960
5961/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5962
5963void
5964jumpif (exp, label)
5965 tree exp;
5966 rtx label;
5967{
906c4e36 5968 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
5969}
5970
5971/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5972 the result is zero, or IF_TRUE_LABEL if the result is one.
5973 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5974 meaning fall through in that case.
5975
e7c33f54
RK
5976 do_jump always does any pending stack adjust except when it does not
5977 actually perform a jump. An example where there is no jump
5978 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5979
bbf6f052
RK
5980 This function is responsible for optimizing cases such as
5981 &&, || and comparison operators in EXP. */
5982
5983void
5984do_jump (exp, if_false_label, if_true_label)
5985 tree exp;
5986 rtx if_false_label, if_true_label;
5987{
5988 register enum tree_code code = TREE_CODE (exp);
5989 /* Some cases need to create a label to jump to
5990 in order to properly fall through.
5991 These cases set DROP_THROUGH_LABEL nonzero. */
5992 rtx drop_through_label = 0;
5993 rtx temp;
5994 rtx comparison = 0;
5995 int i;
5996 tree type;
5997
5998 emit_queue ();
5999
6000 switch (code)
6001 {
6002 case ERROR_MARK:
6003 break;
6004
6005 case INTEGER_CST:
6006 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6007 if (temp)
6008 emit_jump (temp);
6009 break;
6010
6011#if 0
6012 /* This is not true with #pragma weak */
6013 case ADDR_EXPR:
6014 /* The address of something can never be zero. */
6015 if (if_true_label)
6016 emit_jump (if_true_label);
6017 break;
6018#endif
6019
6020 case NOP_EXPR:
6021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6022 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6023 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6024 goto normal;
6025 case CONVERT_EXPR:
6026 /* If we are narrowing the operand, we have to do the compare in the
6027 narrower mode. */
6028 if ((TYPE_PRECISION (TREE_TYPE (exp))
6029 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6030 goto normal;
6031 case NON_LVALUE_EXPR:
6032 case REFERENCE_EXPR:
6033 case ABS_EXPR:
6034 case NEGATE_EXPR:
6035 case LROTATE_EXPR:
6036 case RROTATE_EXPR:
6037 /* These cannot change zero->non-zero or vice versa. */
6038 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6039 break;
6040
6041#if 0
6042 /* This is never less insns than evaluating the PLUS_EXPR followed by
6043 a test and can be longer if the test is eliminated. */
6044 case PLUS_EXPR:
6045 /* Reduce to minus. */
6046 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6047 TREE_OPERAND (exp, 0),
6048 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6049 TREE_OPERAND (exp, 1))));
6050 /* Process as MINUS. */
6051#endif
6052
6053 case MINUS_EXPR:
6054 /* Non-zero iff operands of minus differ. */
6055 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6056 TREE_OPERAND (exp, 0),
6057 TREE_OPERAND (exp, 1)),
6058 NE, NE);
6059 break;
6060
6061 case BIT_AND_EXPR:
6062 /* If we are AND'ing with a small constant, do this comparison in the
6063 smallest type that fits. If the machine doesn't have comparisons
6064 that small, it will be converted back to the wider comparison.
6065 This helps if we are testing the sign bit of a narrower object.
6066 combine can't do this for us because it can't know whether a
6067 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6068
08af8e09
RK
6069 if (! SLOW_BYTE_ACCESS
6070 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6071 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
6072 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6073 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
6074 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6075 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6076 != CODE_FOR_nothing))
bbf6f052
RK
6077 {
6078 do_jump (convert (type, exp), if_false_label, if_true_label);
6079 break;
6080 }
6081 goto normal;
6082
6083 case TRUTH_NOT_EXPR:
6084 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6085 break;
6086
6087 case TRUTH_ANDIF_EXPR:
6088 if (if_false_label == 0)
6089 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 6090 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
6091 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6092 break;
6093
6094 case TRUTH_ORIF_EXPR:
6095 if (if_true_label == 0)
6096 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 6097 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
6098 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6099 break;
6100
6101 case COMPOUND_EXPR:
6102 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6103 free_temp_slots ();
6104 emit_queue ();
e7c33f54 6105 do_pending_stack_adjust ();
bbf6f052
RK
6106 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6107 break;
6108
6109 case COMPONENT_REF:
6110 case BIT_FIELD_REF:
6111 case ARRAY_REF:
6112 {
6113 int bitsize, bitpos, unsignedp;
6114 enum machine_mode mode;
6115 tree type;
7bb0943f 6116 tree offset;
bbf6f052
RK
6117 int volatilep = 0;
6118
6119 /* Get description of this reference. We don't actually care
6120 about the underlying object here. */
7bb0943f
RS
6121 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6122 &mode, &unsignedp, &volatilep);
bbf6f052
RK
6123
6124 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
6125 if (! SLOW_BYTE_ACCESS
6126 && type != 0 && bitsize >= 0
6127 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6128 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6129 != CODE_FOR_nothing))
bbf6f052
RK
6130 {
6131 do_jump (convert (type, exp), if_false_label, if_true_label);
6132 break;
6133 }
6134 goto normal;
6135 }
6136
6137 case COND_EXPR:
6138 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6139 if (integer_onep (TREE_OPERAND (exp, 1))
6140 && integer_zerop (TREE_OPERAND (exp, 2)))
6141 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6142
6143 else if (integer_zerop (TREE_OPERAND (exp, 1))
6144 && integer_onep (TREE_OPERAND (exp, 2)))
6145 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6146
6147 else
6148 {
6149 register rtx label1 = gen_label_rtx ();
6150 drop_through_label = gen_label_rtx ();
906c4e36 6151 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
6152 /* Now the THEN-expression. */
6153 do_jump (TREE_OPERAND (exp, 1),
6154 if_false_label ? if_false_label : drop_through_label,
6155 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
6156 /* In case the do_jump just above never jumps. */
6157 do_pending_stack_adjust ();
bbf6f052
RK
6158 emit_label (label1);
6159 /* Now the ELSE-expression. */
6160 do_jump (TREE_OPERAND (exp, 2),
6161 if_false_label ? if_false_label : drop_through_label,
6162 if_true_label ? if_true_label : drop_through_label);
6163 }
6164 break;
6165
6166 case EQ_EXPR:
6167 if (integer_zerop (TREE_OPERAND (exp, 1)))
6168 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6169 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6170 == MODE_INT)
6171 &&
6172 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6173 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6174 else
6175 comparison = compare (exp, EQ, EQ);
6176 break;
6177
6178 case NE_EXPR:
6179 if (integer_zerop (TREE_OPERAND (exp, 1)))
6180 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6181 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6182 == MODE_INT)
6183 &&
6184 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6185 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6186 else
6187 comparison = compare (exp, NE, NE);
6188 break;
6189
6190 case LT_EXPR:
6191 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6192 == MODE_INT)
6193 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6194 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6195 else
6196 comparison = compare (exp, LT, LTU);
6197 break;
6198
6199 case LE_EXPR:
6200 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6201 == MODE_INT)
6202 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6203 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6204 else
6205 comparison = compare (exp, LE, LEU);
6206 break;
6207
6208 case GT_EXPR:
6209 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6210 == MODE_INT)
6211 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6212 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6213 else
6214 comparison = compare (exp, GT, GTU);
6215 break;
6216
6217 case GE_EXPR:
6218 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6219 == MODE_INT)
6220 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6221 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6222 else
6223 comparison = compare (exp, GE, GEU);
6224 break;
6225
6226 default:
6227 normal:
906c4e36 6228 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6229#if 0
6230 /* This is not needed any more and causes poor code since it causes
6231 comparisons and tests from non-SI objects to have different code
6232 sequences. */
6233 /* Copy to register to avoid generating bad insns by cse
6234 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6235 if (!cse_not_expected && GET_CODE (temp) == MEM)
6236 temp = copy_to_reg (temp);
6237#endif
6238 do_pending_stack_adjust ();
6239 if (GET_CODE (temp) == CONST_INT)
6240 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6241 else if (GET_CODE (temp) == LABEL_REF)
6242 comparison = const_true_rtx;
6243 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6244 && !can_compare_p (GET_MODE (temp)))
6245 /* Note swapping the labels gives us not-equal. */
6246 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6247 else if (GET_MODE (temp) != VOIDmode)
6248 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
6249 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6250 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
6251 else
6252 abort ();
6253 }
6254
6255 /* Do any postincrements in the expression that was tested. */
6256 emit_queue ();
6257
6258 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6259 straight into a conditional jump instruction as the jump condition.
6260 Otherwise, all the work has been done already. */
6261
6262 if (comparison == const_true_rtx)
6263 {
6264 if (if_true_label)
6265 emit_jump (if_true_label);
6266 }
6267 else if (comparison == const0_rtx)
6268 {
6269 if (if_false_label)
6270 emit_jump (if_false_label);
6271 }
6272 else if (comparison)
6273 do_jump_for_compare (comparison, if_false_label, if_true_label);
6274
6275 free_temp_slots ();
6276
6277 if (drop_through_label)
e7c33f54
RK
6278 {
6279 /* If do_jump produces code that might be jumped around,
6280 do any stack adjusts from that code, before the place
6281 where control merges in. */
6282 do_pending_stack_adjust ();
6283 emit_label (drop_through_label);
6284 }
bbf6f052
RK
6285}
6286\f
6287/* Given a comparison expression EXP for values too wide to be compared
6288 with one insn, test the comparison and jump to the appropriate label.
6289 The code of EXP is ignored; we always test GT if SWAP is 0,
6290 and LT if SWAP is 1. */
6291
6292static void
6293do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6294 tree exp;
6295 int swap;
6296 rtx if_false_label, if_true_label;
6297{
906c4e36
RK
6298 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6299 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6300 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6301 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6302 rtx drop_through_label = 0;
6303 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6304 int i;
6305
6306 if (! if_true_label || ! if_false_label)
6307 drop_through_label = gen_label_rtx ();
6308 if (! if_true_label)
6309 if_true_label = drop_through_label;
6310 if (! if_false_label)
6311 if_false_label = drop_through_label;
6312
6313 /* Compare a word at a time, high order first. */
6314 for (i = 0; i < nwords; i++)
6315 {
6316 rtx comp;
6317 rtx op0_word, op1_word;
6318
6319 if (WORDS_BIG_ENDIAN)
6320 {
6321 op0_word = operand_subword_force (op0, i, mode);
6322 op1_word = operand_subword_force (op1, i, mode);
6323 }
6324 else
6325 {
6326 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6327 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6328 }
6329
6330 /* All but high-order word must be compared as unsigned. */
6331 comp = compare_from_rtx (op0_word, op1_word,
6332 (unsignedp || i > 0) ? GTU : GT,
906c4e36 6333 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
6334 if (comp == const_true_rtx)
6335 emit_jump (if_true_label);
6336 else if (comp != const0_rtx)
906c4e36 6337 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
6338
6339 /* Consider lower words only if these are equal. */
6340 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 6341 NULL_RTX, 0);
bbf6f052
RK
6342 if (comp == const_true_rtx)
6343 emit_jump (if_false_label);
6344 else if (comp != const0_rtx)
906c4e36 6345 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
6346 }
6347
6348 if (if_false_label)
6349 emit_jump (if_false_label);
6350 if (drop_through_label)
6351 emit_label (drop_through_label);
6352}
6353
6354/* Given an EQ_EXPR expression EXP for values too wide to be compared
6355 with one insn, test the comparison and jump to the appropriate label. */
6356
6357static void
6358do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6359 tree exp;
6360 rtx if_false_label, if_true_label;
6361{
906c4e36
RK
6362 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6363 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6364 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6365 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6366 int i;
6367 rtx drop_through_label = 0;
6368
6369 if (! if_false_label)
6370 drop_through_label = if_false_label = gen_label_rtx ();
6371
6372 for (i = 0; i < nwords; i++)
6373 {
6374 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6375 operand_subword_force (op1, i, mode),
cd1b4b44
RK
6376 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6377 word_mode, NULL_RTX, 0);
bbf6f052
RK
6378 if (comp == const_true_rtx)
6379 emit_jump (if_false_label);
6380 else if (comp != const0_rtx)
906c4e36 6381 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6382 }
6383
6384 if (if_true_label)
6385 emit_jump (if_true_label);
6386 if (drop_through_label)
6387 emit_label (drop_through_label);
6388}
6389\f
6390/* Jump according to whether OP0 is 0.
6391 We assume that OP0 has an integer mode that is too wide
6392 for the available compare insns. */
6393
6394static void
6395do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6396 rtx op0;
6397 rtx if_false_label, if_true_label;
6398{
6399 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6400 int i;
6401 rtx drop_through_label = 0;
6402
6403 if (! if_false_label)
6404 drop_through_label = if_false_label = gen_label_rtx ();
6405
6406 for (i = 0; i < nwords; i++)
6407 {
6408 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6409 GET_MODE (op0)),
cd1b4b44 6410 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
6411 if (comp == const_true_rtx)
6412 emit_jump (if_false_label);
6413 else if (comp != const0_rtx)
906c4e36 6414 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6415 }
6416
6417 if (if_true_label)
6418 emit_jump (if_true_label);
6419 if (drop_through_label)
6420 emit_label (drop_through_label);
6421}
6422
6423/* Given a comparison expression in rtl form, output conditional branches to
6424 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6425
6426static void
6427do_jump_for_compare (comparison, if_false_label, if_true_label)
6428 rtx comparison, if_false_label, if_true_label;
6429{
6430 if (if_true_label)
6431 {
6432 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6433 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6434 else
6435 abort ();
6436
6437 if (if_false_label)
6438 emit_jump (if_false_label);
6439 }
6440 else if (if_false_label)
6441 {
6442 rtx insn;
6443 rtx prev = PREV_INSN (get_last_insn ());
6444 rtx branch = 0;
6445
6446 /* Output the branch with the opposite condition. Then try to invert
6447 what is generated. If more than one insn is a branch, or if the
6448 branch is not the last insn written, abort. If we can't invert
6449 the branch, emit make a true label, redirect this jump to that,
6450 emit a jump to the false label and define the true label. */
6451
6452 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6453 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6454 else
6455 abort ();
6456
6457 /* Here we get the insn before what was just emitted.
6458 On some machines, emitting the branch can discard
6459 the previous compare insn and emit a replacement. */
6460 if (prev == 0)
6461 /* If there's only one preceding insn... */
6462 insn = get_insns ();
6463 else
6464 insn = NEXT_INSN (prev);
6465
6466 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6467 if (GET_CODE (insn) == JUMP_INSN)
6468 {
6469 if (branch)
6470 abort ();
6471 branch = insn;
6472 }
6473
6474 if (branch != get_last_insn ())
6475 abort ();
6476
6477 if (! invert_jump (branch, if_false_label))
6478 {
6479 if_true_label = gen_label_rtx ();
6480 redirect_jump (branch, if_true_label);
6481 emit_jump (if_false_label);
6482 emit_label (if_true_label);
6483 }
6484 }
6485}
6486\f
6487/* Generate code for a comparison expression EXP
6488 (including code to compute the values to be compared)
6489 and set (CC0) according to the result.
6490 SIGNED_CODE should be the rtx operation for this comparison for
6491 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6492
6493 We force a stack adjustment unless there are currently
6494 things pushed on the stack that aren't yet used. */
6495
6496static rtx
6497compare (exp, signed_code, unsigned_code)
6498 register tree exp;
6499 enum rtx_code signed_code, unsigned_code;
6500{
906c4e36
RK
6501 register rtx op0
6502 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6503 register rtx op1
6504 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6505 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6506 register enum machine_mode mode = TYPE_MODE (type);
6507 int unsignedp = TREE_UNSIGNED (type);
6508 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6509
6510 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6511 ((mode == BLKmode)
906c4e36 6512 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
6513 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6514}
6515
6516/* Like compare but expects the values to compare as two rtx's.
6517 The decision as to signed or unsigned comparison must be made by the caller.
6518
6519 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6520 compared.
6521
6522 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6523 size of MODE should be used. */
6524
6525rtx
6526compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6527 register rtx op0, op1;
6528 enum rtx_code code;
6529 int unsignedp;
6530 enum machine_mode mode;
6531 rtx size;
6532 int align;
6533{
6534 /* If one operand is constant, make it the second one. */
6535
6536 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6537 {
6538 rtx tem = op0;
6539 op0 = op1;
6540 op1 = tem;
6541 code = swap_condition (code);
6542 }
6543
6544 if (flag_force_mem)
6545 {
6546 op0 = force_not_mem (op0);
6547 op1 = force_not_mem (op1);
6548 }
6549
6550 do_pending_stack_adjust ();
6551
6552 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6553 return simplify_relational_operation (code, mode, op0, op1);
6554
cd1b4b44
RK
6555#if 0
6556 /* There's no need to do this now that combine.c can eliminate lots of
6557 sign extensions. This can be less efficient in certain cases on other
6558 machines.
6559
bbf6f052
RK
6560 /* If this is a signed equality comparison, we can do it as an
6561 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
6562 extension and comparisons with zero are done as unsigned. This is
6563 the case even on machines that can do fast sign extension, since
6564 zero-extension is easier to combinen with other operations than
6565 sign-extension is. If we are comparing against a constant, we must
6566 convert it to what it would look like unsigned. */
bbf6f052 6567 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 6568 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6569 {
6570 if (GET_CODE (op1) == CONST_INT
6571 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 6572 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
6573 unsignedp = 1;
6574 }
cd1b4b44 6575#endif
bbf6f052
RK
6576
6577 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6578
6579 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6580}
6581\f
6582/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
6583 and return an rtx for the result. EXP is either a comparison
6584 or a TRUTH_NOT_EXPR whose operand is a comparison.
6585
bbf6f052
RK
6586 If TARGET is nonzero, store the result there if convenient.
6587
6588 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6589 cheap.
6590
6591 Return zero if there is no suitable set-flag instruction
6592 available on this machine.
6593
6594 Once expand_expr has been called on the arguments of the comparison,
6595 we are committed to doing the store flag, since it is not safe to
6596 re-evaluate the expression. We emit the store-flag insn by calling
6597 emit_store_flag, but only expand the arguments if we have a reason
6598 to believe that emit_store_flag will be successful. If we think that
6599 it will, but it isn't, we have to simulate the store-flag with a
6600 set/jump/set sequence. */
6601
6602static rtx
6603do_store_flag (exp, target, mode, only_cheap)
6604 tree exp;
6605 rtx target;
6606 enum machine_mode mode;
6607 int only_cheap;
6608{
6609 enum rtx_code code;
e7c33f54 6610 tree arg0, arg1, type;
bbf6f052 6611 tree tem;
e7c33f54
RK
6612 enum machine_mode operand_mode;
6613 int invert = 0;
6614 int unsignedp;
bbf6f052
RK
6615 rtx op0, op1;
6616 enum insn_code icode;
6617 rtx subtarget = target;
6618 rtx result, label, pattern, jump_pat;
6619
e7c33f54
RK
6620 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6621 result at the end. We can't simply invert the test since it would
6622 have already been inverted if it were valid. This case occurs for
6623 some floating-point comparisons. */
6624
6625 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6626 invert = 1, exp = TREE_OPERAND (exp, 0);
6627
6628 arg0 = TREE_OPERAND (exp, 0);
6629 arg1 = TREE_OPERAND (exp, 1);
6630 type = TREE_TYPE (arg0);
6631 operand_mode = TYPE_MODE (type);
6632 unsignedp = TREE_UNSIGNED (type);
6633
bbf6f052
RK
6634 /* We won't bother with BLKmode store-flag operations because it would mean
6635 passing a lot of information to emit_store_flag. */
6636 if (operand_mode == BLKmode)
6637 return 0;
6638
d964285c
CH
6639 STRIP_NOPS (arg0);
6640 STRIP_NOPS (arg1);
bbf6f052
RK
6641
6642 /* Get the rtx comparison code to use. We know that EXP is a comparison
6643 operation of some type. Some comparisons against 1 and -1 can be
6644 converted to comparisons with zero. Do so here so that the tests
6645 below will be aware that we have a comparison with zero. These
6646 tests will not catch constants in the first operand, but constants
6647 are rarely passed as the first operand. */
6648
6649 switch (TREE_CODE (exp))
6650 {
6651 case EQ_EXPR:
6652 code = EQ;
6653 break;
6654 case NE_EXPR:
6655 code = NE;
6656 break;
6657 case LT_EXPR:
6658 if (integer_onep (arg1))
6659 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6660 else
6661 code = unsignedp ? LTU : LT;
6662 break;
6663 case LE_EXPR:
6664 if (integer_all_onesp (arg1))
6665 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6666 else
6667 code = unsignedp ? LEU : LE;
6668 break;
6669 case GT_EXPR:
6670 if (integer_all_onesp (arg1))
6671 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6672 else
6673 code = unsignedp ? GTU : GT;
6674 break;
6675 case GE_EXPR:
6676 if (integer_onep (arg1))
6677 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6678 else
6679 code = unsignedp ? GEU : GE;
6680 break;
6681 default:
6682 abort ();
6683 }
6684
6685 /* Put a constant second. */
6686 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6687 {
6688 tem = arg0; arg0 = arg1; arg1 = tem;
6689 code = swap_condition (code);
6690 }
6691
6692 /* If this is an equality or inequality test of a single bit, we can
6693 do this by shifting the bit being tested to the low-order bit and
6694 masking the result with the constant 1. If the condition was EQ,
6695 we xor it with 1. This does not require an scc insn and is faster
6696 than an scc insn even if we have it. */
6697
6698 if ((code == NE || code == EQ)
6699 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6700 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 6701 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6702 {
6703 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 6704 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
6705
6706 if (subtarget == 0 || GET_CODE (subtarget) != REG
6707 || GET_MODE (subtarget) != operand_mode
6708 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6709 subtarget = 0;
6710
6711 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6712
6713 if (bitnum != 0)
6714 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6715 size_int (bitnum), target, 1);
6716
6717 if (GET_MODE (op0) != mode)
6718 op0 = convert_to_mode (mode, op0, 1);
6719
6720 if (bitnum != TYPE_PRECISION (type) - 1)
6721 op0 = expand_and (op0, const1_rtx, target);
6722
e7c33f54 6723 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
6724 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6725 OPTAB_LIB_WIDEN);
6726
6727 return op0;
6728 }
6729
6730 /* Now see if we are likely to be able to do this. Return if not. */
6731 if (! can_compare_p (operand_mode))
6732 return 0;
6733 icode = setcc_gen_code[(int) code];
6734 if (icode == CODE_FOR_nothing
6735 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6736 {
6737 /* We can only do this if it is one of the special cases that
6738 can be handled without an scc insn. */
6739 if ((code == LT && integer_zerop (arg1))
6740 || (! only_cheap && code == GE && integer_zerop (arg1)))
6741 ;
6742 else if (BRANCH_COST >= 0
6743 && ! only_cheap && (code == NE || code == EQ)
6744 && TREE_CODE (type) != REAL_TYPE
6745 && ((abs_optab->handlers[(int) operand_mode].insn_code
6746 != CODE_FOR_nothing)
6747 || (ffs_optab->handlers[(int) operand_mode].insn_code
6748 != CODE_FOR_nothing)))
6749 ;
6750 else
6751 return 0;
6752 }
6753
6754 preexpand_calls (exp);
6755 if (subtarget == 0 || GET_CODE (subtarget) != REG
6756 || GET_MODE (subtarget) != operand_mode
6757 || ! safe_from_p (subtarget, arg1))
6758 subtarget = 0;
6759
6760 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 6761 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6762
6763 if (target == 0)
6764 target = gen_reg_rtx (mode);
6765
6766 result = emit_store_flag (target, code, op0, op1, operand_mode,
6767 unsignedp, 1);
6768
6769 if (result)
e7c33f54
RK
6770 {
6771 if (invert)
6772 result = expand_binop (mode, xor_optab, result, const1_rtx,
6773 result, 0, OPTAB_LIB_WIDEN);
6774 return result;
6775 }
bbf6f052
RK
6776
6777 /* If this failed, we have to do this with set/compare/jump/set code. */
6778 if (target == 0 || GET_CODE (target) != REG
6779 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6780 target = gen_reg_rtx (GET_MODE (target));
6781
e7c33f54 6782 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
6783 result = compare_from_rtx (op0, op1, code, unsignedp,
6784 operand_mode, NULL_RTX, 0);
bbf6f052 6785 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
6786 return (((result == const0_rtx && ! invert)
6787 || (result != const0_rtx && invert))
6788 ? const0_rtx : const1_rtx);
bbf6f052
RK
6789
6790 label = gen_label_rtx ();
6791 if (bcc_gen_fctn[(int) code] == 0)
6792 abort ();
6793
6794 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 6795 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
6796 emit_label (label);
6797
6798 return target;
6799}
6800\f
6801/* Generate a tablejump instruction (used for switch statements). */
6802
6803#ifdef HAVE_tablejump
6804
6805/* INDEX is the value being switched on, with the lowest value
6806 in the table already subtracted.
88d3b7f0 6807 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
6808 RANGE is the length of the jump table.
6809 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6810
6811 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6812 index value is out of range. */
6813
6814void
e87b4f3f 6815do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 6816 rtx index, range, table_label, default_label;
e87b4f3f 6817 enum machine_mode mode;
bbf6f052
RK
6818{
6819 register rtx temp, vector;
6820
88d3b7f0
RS
6821 /* Do an unsigned comparison (in the proper mode) between the index
6822 expression and the value which represents the length of the range.
6823 Since we just finished subtracting the lower bound of the range
6824 from the index expression, this comparison allows us to simultaneously
6825 check that the original index expression value is both greater than
6826 or equal to the minimum value of the range and less than or equal to
6827 the maximum value of the range. */
e87b4f3f 6828
906c4e36 6829 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
bbf6f052 6830 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
6831
6832 /* If index is in range, it must fit in Pmode.
6833 Convert to Pmode so we can index with it. */
6834 if (mode != Pmode)
6835 index = convert_to_mode (Pmode, index, 1);
6836
bbf6f052
RK
6837 /* If flag_force_addr were to affect this address
6838 it could interfere with the tricky assumptions made
6839 about addresses that contain label-refs,
6840 which may be valid only very near the tablejump itself. */
6841 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6842 GET_MODE_SIZE, because this indicates how large insns are. The other
6843 uses should all be Pmode, because they are addresses. This code
6844 could fail if addresses and insns are not the same size. */
6845 index = memory_address_noforce
6846 (CASE_VECTOR_MODE,
6847 gen_rtx (PLUS, Pmode,
6848 gen_rtx (MULT, Pmode, index,
906c4e36 6849 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
6850 gen_rtx (LABEL_REF, Pmode, table_label)));
6851 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6852 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6853 RTX_UNCHANGING_P (vector) = 1;
6854 convert_move (temp, vector, 0);
6855
6856 emit_jump_insn (gen_tablejump (temp, table_label));
6857
6858#ifndef CASE_VECTOR_PC_RELATIVE
6859 /* If we are generating PIC code or if the table is PC-relative, the
6860 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6861 if (! flag_pic)
6862 emit_barrier ();
6863#endif
6864}
6865
6866#endif /* HAVE_tablejump */
This page took 0.872698 seconds and 5 git commands to generate.