]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(extraclean): Quote the sharps.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
4fa52007
RK
105/* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109static char direct_load[NUM_MACHINE_MODES];
110static char direct_store[NUM_MACHINE_MODES];
111
bbf6f052
RK
112/* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115#ifndef MOVE_RATIO
266007a7 116#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
117#define MOVE_RATIO 2
118#else
119/* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121#define MOVE_RATIO 15
122#endif
123#endif
e87b4f3f 124
266007a7
RK
125/* This array records the insn_code of insns to perform block moves. */
126static enum insn_code movstr_optab[NUM_MACHINE_MODES];
127
e87b4f3f
RS
128/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
129
130#ifndef SLOW_UNALIGNED_ACCESS
131#define SLOW_UNALIGNED_ACCESS 0
132#endif
bbf6f052 133\f
4fa52007 134/* This is run once per compilation to set up which modes can be used
266007a7 135 directly in memory and to initialize the block move optab. */
4fa52007
RK
136
137void
138init_expr_once ()
139{
140 rtx insn, pat;
141 enum machine_mode mode;
e2549997
RS
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
4fa52007 145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
147
148 start_sequence ();
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
151
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
154 {
155 int regno;
156 rtx reg;
157 int num_clobbers;
158
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
e2549997 161 PUT_MODE (mem1, mode);
4fa52007 162
e6fe56a4
RK
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
165
7308a047
RS
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
169 regno++)
170 {
171 if (! HARD_REGNO_MODE_OK (regno, mode))
172 continue;
e6fe56a4 173
7308a047 174 reg = gen_rtx (REG, mode, regno);
e6fe56a4 175
7308a047
RS
176 SET_SRC (pat) = mem;
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
e6fe56a4 180
e2549997
RS
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
185
7308a047
RS
186 SET_SRC (pat) = reg;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
e2549997
RS
190
191 SET_SRC (pat) = reg;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
7308a047 195 }
266007a7
RK
196
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
4fa52007
RK
198 }
199
200 end_sequence ();
266007a7
RK
201
202#ifdef HAVE_movstrqi
203 if (HAVE_movstrqi)
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
205#endif
206#ifdef HAVE_movstrhi
207 if (HAVE_movstrhi)
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
209#endif
210#ifdef HAVE_movstrsi
211 if (HAVE_movstrsi)
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
213#endif
214#ifdef HAVE_movstrdi
215 if (HAVE_movstrdi)
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
217#endif
218#ifdef HAVE_movstrti
219 if (HAVE_movstrti)
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
221#endif
4fa52007
RK
222}
223
bbf6f052
RK
224/* This is run at the start of compiling a function. */
225
226void
227init_expr ()
228{
229 init_queue ();
230
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
234 saveregs_value = 0;
e87b4f3f 235 forced_labels = 0;
bbf6f052
RK
236}
237
238/* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
240
241void
242save_expr_status (p)
243 struct function *p;
244{
245 /* Instead of saving the postincrement queue, empty it. */
246 emit_queue ();
247
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
e87b4f3f 252 p->forced_labels = forced_labels;
bbf6f052
RK
253
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
257 saveregs_value = 0;
e87b4f3f 258 forced_labels = 0;
bbf6f052
RK
259}
260
261/* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
263
264void
265restore_expr_status (p)
266 struct function *p;
267{
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
e87b4f3f 272 forced_labels = p->forced_labels;
bbf6f052
RK
273}
274\f
275/* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
277
278static rtx pending_chain;
279
280/* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
283
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
286
287static rtx
288enqueue_insn (var, body)
289 rtx var, body;
290{
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 292 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
293 return pending_chain;
294}
295
296/* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
302
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
306
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
310
311rtx
312protect_from_queue (x, modify)
313 register rtx x;
314 int modify;
315{
316 register RTX_CODE code = GET_CODE (x);
317
318#if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
321 return x;
322#endif
323
324 if (code != QUEUED)
325 {
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
333 {
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
336 if (QUEUED_INSN (y))
337 {
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
340 QUEUED_INSN (y));
341 return temp;
342 }
343 return x;
344 }
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
347 if (code == MEM)
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
350 {
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
353 }
354 return x;
355 }
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
360 use that copy. */
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
367 QUEUED_INSN (x));
368 return QUEUED_COPY (x);
369}
370
371/* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
375
376static int
377queued_subexp_p (x)
378 rtx x;
379{
380 register enum rtx_code code = GET_CODE (x);
381 switch (code)
382 {
383 case QUEUED:
384 return 1;
385 case MEM:
386 return queued_subexp_p (XEXP (x, 0));
387 case MULT:
388 case PLUS:
389 case MINUS:
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
392 }
393 return 0;
394}
395
396/* Perform all the pending incrementations. */
397
398void
399emit_queue ()
400{
401 register rtx p;
402 while (p = pending_chain)
403 {
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
406 }
407}
408
409static void
410init_queue ()
411{
412 if (pending_chain)
413 abort ();
414}
415\f
416/* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
420
421void
422convert_move (to, from, unsignedp)
423 register rtx to, from;
424 int unsignedp;
425{
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
430 enum insn_code code;
431 rtx libcall;
432
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
435
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
438
439 if (to_real != from_real)
440 abort ();
441
1499e0a8
RK
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
444 TO here. */
445
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
451
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
453 abort ();
454
bbf6f052
RK
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
457 {
458 emit_move_insn (to, from);
459 return;
460 }
461
462 if (to_real)
463 {
464#ifdef HAVE_extendsfdf2
465 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
466 {
467 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
468 return;
469 }
470#endif
b092b471
JW
471#ifdef HAVE_extendsfxf2
472 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
473 {
474 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
475 return;
476 }
477#endif
bbf6f052
RK
478#ifdef HAVE_extendsftf2
479 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
480 {
481 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
482 return;
483 }
484#endif
b092b471
JW
485#ifdef HAVE_extenddfxf2
486 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
487 {
488 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
489 return;
490 }
491#endif
bbf6f052
RK
492#ifdef HAVE_extenddftf2
493 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
494 {
495 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
496 return;
497 }
498#endif
499#ifdef HAVE_truncdfsf2
500 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
501 {
502 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
503 return;
504 }
505#endif
b092b471
JW
506#ifdef HAVE_truncxfsf2
507 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
508 {
509 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
510 return;
511 }
512#endif
bbf6f052
RK
513#ifdef HAVE_trunctfsf2
514 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
515 {
516 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
517 return;
518 }
519#endif
b092b471
JW
520#ifdef HAVE_truncxfdf2
521 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
522 {
523 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
524 return;
525 }
526#endif
bbf6f052
RK
527#ifdef HAVE_trunctfdf2
528 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
529 {
530 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
531 return;
532 }
533#endif
534
b092b471
JW
535 libcall = (rtx) 0;
536 switch (from_mode)
537 {
538 case SFmode:
539 switch (to_mode)
540 {
541 case DFmode:
542 libcall = extendsfdf2_libfunc;
543 break;
544
545 case XFmode:
546 libcall = extendsfxf2_libfunc;
547 break;
548
549 case TFmode:
550 libcall = extendsftf2_libfunc;
551 break;
552 }
553 break;
554
555 case DFmode:
556 switch (to_mode)
557 {
558 case SFmode:
559 libcall = truncdfsf2_libfunc;
560 break;
561
562 case XFmode:
563 libcall = extenddfxf2_libfunc;
564 break;
565
566 case TFmode:
567 libcall = extenddftf2_libfunc;
568 break;
569 }
570 break;
571
572 case XFmode:
573 switch (to_mode)
574 {
575 case SFmode:
576 libcall = truncxfsf2_libfunc;
577 break;
578
579 case DFmode:
580 libcall = truncxfdf2_libfunc;
581 break;
582 }
583 break;
584
585 case TFmode:
586 switch (to_mode)
587 {
588 case SFmode:
589 libcall = trunctfsf2_libfunc;
590 break;
591
592 case DFmode:
593 libcall = trunctfdf2_libfunc;
594 break;
595 }
596 break;
597 }
598
599 if (libcall == (rtx) 0)
600 /* This conversion is not implemented yet. */
bbf6f052
RK
601 abort ();
602
e87b4f3f 603 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
604 emit_move_insn (to, hard_libcall_value (to_mode));
605 return;
606 }
607
608 /* Now both modes are integers. */
609
610 /* Handle expanding beyond a word. */
611 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
612 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
613 {
614 rtx insns;
615 rtx lowpart;
616 rtx fill_value;
617 rtx lowfrom;
618 int i;
619 enum machine_mode lowpart_mode;
620 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
621
622 /* Try converting directly if the insn is supported. */
623 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
624 != CODE_FOR_nothing)
625 {
cd1b4b44
RK
626 /* If FROM is a SUBREG, put it into a register. Do this
627 so that we always generate the same set of insns for
628 better cse'ing; if an intermediate assignment occurred,
629 we won't be doing the operation directly on the SUBREG. */
630 if (optimize > 0 && GET_CODE (from) == SUBREG)
631 from = force_reg (from_mode, from);
bbf6f052
RK
632 emit_unop_insn (code, to, from, equiv_code);
633 return;
634 }
635 /* Next, try converting via full word. */
636 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
637 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
638 != CODE_FOR_nothing))
639 {
640 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
641 emit_unop_insn (code, to,
642 gen_lowpart (word_mode, to), equiv_code);
643 return;
644 }
645
646 /* No special multiword conversion insn; do it by hand. */
647 start_sequence ();
648
649 /* Get a copy of FROM widened to a word, if necessary. */
650 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
651 lowpart_mode = word_mode;
652 else
653 lowpart_mode = from_mode;
654
655 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
656
657 lowpart = gen_lowpart (lowpart_mode, to);
658 emit_move_insn (lowpart, lowfrom);
659
660 /* Compute the value to put in each remaining word. */
661 if (unsignedp)
662 fill_value = const0_rtx;
663 else
664 {
665#ifdef HAVE_slt
666 if (HAVE_slt
667 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
668 && STORE_FLAG_VALUE == -1)
669 {
906c4e36
RK
670 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
671 lowpart_mode, 0, 0);
bbf6f052
RK
672 fill_value = gen_reg_rtx (word_mode);
673 emit_insn (gen_slt (fill_value));
674 }
675 else
676#endif
677 {
678 fill_value
679 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
680 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 681 NULL_RTX, 0);
bbf6f052
RK
682 fill_value = convert_to_mode (word_mode, fill_value, 1);
683 }
684 }
685
686 /* Fill the remaining words. */
687 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
688 {
689 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
690 rtx subword = operand_subword (to, index, 1, to_mode);
691
692 if (subword == 0)
693 abort ();
694
695 if (fill_value != subword)
696 emit_move_insn (subword, fill_value);
697 }
698
699 insns = get_insns ();
700 end_sequence ();
701
906c4e36 702 emit_no_conflict_block (insns, to, from, NULL_RTX,
bbf6f052
RK
703 gen_rtx (equiv_code, to_mode, from));
704 return;
705 }
706
707 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
708 {
709 convert_move (to, gen_lowpart (word_mode, from), 0);
710 return;
711 }
712
713 /* Handle pointer conversion */ /* SPEE 900220 */
714 if (to_mode == PSImode)
715 {
716 if (from_mode != SImode)
717 from = convert_to_mode (SImode, from, unsignedp);
718
719#ifdef HAVE_truncsipsi
720 if (HAVE_truncsipsi)
721 {
722 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
723 return;
724 }
725#endif /* HAVE_truncsipsi */
726 abort ();
727 }
728
729 if (from_mode == PSImode)
730 {
731 if (to_mode != SImode)
732 {
733 from = convert_to_mode (SImode, from, unsignedp);
734 from_mode = SImode;
735 }
736 else
737 {
738#ifdef HAVE_extendpsisi
739 if (HAVE_extendpsisi)
740 {
741 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
742 return;
743 }
744#endif /* HAVE_extendpsisi */
745 abort ();
746 }
747 }
748
749 /* Now follow all the conversions between integers
750 no more than a word long. */
751
752 /* For truncation, usually we can just refer to FROM in a narrower mode. */
753 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
754 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
755 GET_MODE_BITSIZE (from_mode))
756 && ((GET_CODE (from) == MEM
757 && ! MEM_VOLATILE_P (from)
4fa52007 758 && direct_load[(int) to_mode]
bbf6f052
RK
759 && ! mode_dependent_address_p (XEXP (from, 0)))
760 || GET_CODE (from) == REG
761 || GET_CODE (from) == SUBREG))
762 {
763 emit_move_insn (to, gen_lowpart (to_mode, from));
764 return;
765 }
766
767 /* For truncation, usually we can just refer to FROM in a narrower mode. */
768 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
769 {
770 /* Convert directly if that works. */
771 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
772 != CODE_FOR_nothing)
773 {
3dc4195c
RK
774 /* If FROM is a SUBREG, put it into a register. Do this
775 so that we always generate the same set of insns for
776 better cse'ing; if an intermediate assignment occurred,
777 we won't be doing the operation directly on the SUBREG. */
778 if (optimize > 0 && GET_CODE (from) == SUBREG)
779 from = force_reg (from_mode, from);
bbf6f052
RK
780 emit_unop_insn (code, to, from, equiv_code);
781 return;
782 }
783 else
784 {
785 enum machine_mode intermediate;
786
787 /* Search for a mode to convert via. */
788 for (intermediate = from_mode; intermediate != VOIDmode;
789 intermediate = GET_MODE_WIDER_MODE (intermediate))
790 if ((can_extend_p (to_mode, intermediate, unsignedp)
791 != CODE_FOR_nothing)
792 && (can_extend_p (intermediate, from_mode, unsignedp)
793 != CODE_FOR_nothing))
794 {
795 convert_move (to, convert_to_mode (intermediate, from,
796 unsignedp), unsignedp);
797 return;
798 }
799
800 /* No suitable intermediate mode. */
801 abort ();
802 }
803 }
804
805 /* Support special truncate insns for certain modes. */
806
807 if (from_mode == DImode && to_mode == SImode)
808 {
809#ifdef HAVE_truncdisi2
810 if (HAVE_truncdisi2)
811 {
812 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
813 return;
814 }
815#endif
816 convert_move (to, force_reg (from_mode, from), unsignedp);
817 return;
818 }
819
820 if (from_mode == DImode && to_mode == HImode)
821 {
822#ifdef HAVE_truncdihi2
823 if (HAVE_truncdihi2)
824 {
825 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
826 return;
827 }
828#endif
829 convert_move (to, force_reg (from_mode, from), unsignedp);
830 return;
831 }
832
833 if (from_mode == DImode && to_mode == QImode)
834 {
835#ifdef HAVE_truncdiqi2
836 if (HAVE_truncdiqi2)
837 {
838 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
839 return;
840 }
841#endif
842 convert_move (to, force_reg (from_mode, from), unsignedp);
843 return;
844 }
845
846 if (from_mode == SImode && to_mode == HImode)
847 {
848#ifdef HAVE_truncsihi2
849 if (HAVE_truncsihi2)
850 {
851 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
852 return;
853 }
854#endif
855 convert_move (to, force_reg (from_mode, from), unsignedp);
856 return;
857 }
858
859 if (from_mode == SImode && to_mode == QImode)
860 {
861#ifdef HAVE_truncsiqi2
862 if (HAVE_truncsiqi2)
863 {
864 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
865 return;
866 }
867#endif
868 convert_move (to, force_reg (from_mode, from), unsignedp);
869 return;
870 }
871
872 if (from_mode == HImode && to_mode == QImode)
873 {
874#ifdef HAVE_trunchiqi2
875 if (HAVE_trunchiqi2)
876 {
877 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
878 return;
879 }
880#endif
881 convert_move (to, force_reg (from_mode, from), unsignedp);
882 return;
883 }
884
885 /* Handle truncation of volatile memrefs, and so on;
886 the things that couldn't be truncated directly,
887 and for which there was no special instruction. */
888 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
889 {
890 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
891 emit_move_insn (to, temp);
892 return;
893 }
894
895 /* Mode combination is not recognized. */
896 abort ();
897}
898
899/* Return an rtx for a value that would result
900 from converting X to mode MODE.
901 Both X and MODE may be floating, or both integer.
902 UNSIGNEDP is nonzero if X is an unsigned value.
903 This can be done by referring to a part of X in place
5d901c31
RS
904 or by copying to a new temporary with conversion.
905
906 This function *must not* call protect_from_queue
907 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
908
909rtx
910convert_to_mode (mode, x, unsignedp)
911 enum machine_mode mode;
912 rtx x;
913 int unsignedp;
914{
915 register rtx temp;
1499e0a8
RK
916
917 /* If FROM is a SUBREG that indicates that we have already done at least
918 the required extension, strip it. */
919
920 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
921 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
922 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
923 x = gen_lowpart (mode, x);
bbf6f052 924
bbf6f052
RK
925 if (mode == GET_MODE (x))
926 return x;
927
928 /* There is one case that we must handle specially: If we are converting
906c4e36 929 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
930 we are to interpret the constant as unsigned, gen_lowpart will do
931 the wrong if the constant appears negative. What we want to do is
932 make the high-order word of the constant zero, not all ones. */
933
934 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 935 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 936 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 937 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
938
939 /* We can do this with a gen_lowpart if both desired and current modes
940 are integer, and this is either a constant integer, a register, or a
941 non-volatile MEM. Except for the constant case, we must be narrowing
942 the operand. */
943
944 if (GET_CODE (x) == CONST_INT
945 || (GET_MODE_CLASS (mode) == MODE_INT
946 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
947 && (GET_CODE (x) == CONST_DOUBLE
948 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
949 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 950 && direct_load[(int) mode]
bbf6f052
RK
951 || GET_CODE (x) == REG)))))
952 return gen_lowpart (mode, x);
953
954 temp = gen_reg_rtx (mode);
955 convert_move (temp, x, unsignedp);
956 return temp;
957}
958\f
959/* Generate several move instructions to copy LEN bytes
960 from block FROM to block TO. (These are MEM rtx's with BLKmode).
961 The caller must pass FROM and TO
962 through protect_from_queue before calling.
963 ALIGN (in bytes) is maximum alignment we can assume. */
964
965struct move_by_pieces
966{
967 rtx to;
968 rtx to_addr;
969 int autinc_to;
970 int explicit_inc_to;
971 rtx from;
972 rtx from_addr;
973 int autinc_from;
974 int explicit_inc_from;
975 int len;
976 int offset;
977 int reverse;
978};
979
980static void move_by_pieces_1 ();
981static int move_by_pieces_ninsns ();
982
983static void
984move_by_pieces (to, from, len, align)
985 rtx to, from;
986 int len, align;
987{
988 struct move_by_pieces data;
989 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 990 int max_size = MOVE_MAX + 1;
bbf6f052
RK
991
992 data.offset = 0;
993 data.to_addr = to_addr;
994 data.from_addr = from_addr;
995 data.to = to;
996 data.from = from;
997 data.autinc_to
998 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
999 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1000 data.autinc_from
1001 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1002 || GET_CODE (from_addr) == POST_INC
1003 || GET_CODE (from_addr) == POST_DEC);
1004
1005 data.explicit_inc_from = 0;
1006 data.explicit_inc_to = 0;
1007 data.reverse
1008 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1009 if (data.reverse) data.offset = len;
1010 data.len = len;
1011
1012 /* If copying requires more than two move insns,
1013 copy addresses to registers (to make displacements shorter)
1014 and use post-increment if available. */
1015 if (!(data.autinc_from && data.autinc_to)
1016 && move_by_pieces_ninsns (len, align) > 2)
1017 {
1018#ifdef HAVE_PRE_DECREMENT
1019 if (data.reverse && ! data.autinc_from)
1020 {
1021 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1022 data.autinc_from = 1;
1023 data.explicit_inc_from = -1;
1024 }
1025#endif
1026#ifdef HAVE_POST_INCREMENT
1027 if (! data.autinc_from)
1028 {
1029 data.from_addr = copy_addr_to_reg (from_addr);
1030 data.autinc_from = 1;
1031 data.explicit_inc_from = 1;
1032 }
1033#endif
1034 if (!data.autinc_from && CONSTANT_P (from_addr))
1035 data.from_addr = copy_addr_to_reg (from_addr);
1036#ifdef HAVE_PRE_DECREMENT
1037 if (data.reverse && ! data.autinc_to)
1038 {
1039 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1040 data.autinc_to = 1;
1041 data.explicit_inc_to = -1;
1042 }
1043#endif
1044#ifdef HAVE_POST_INCREMENT
1045 if (! data.reverse && ! data.autinc_to)
1046 {
1047 data.to_addr = copy_addr_to_reg (to_addr);
1048 data.autinc_to = 1;
1049 data.explicit_inc_to = 1;
1050 }
1051#endif
1052 if (!data.autinc_to && CONSTANT_P (to_addr))
1053 data.to_addr = copy_addr_to_reg (to_addr);
1054 }
1055
e87b4f3f
RS
1056 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1057 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1058 align = MOVE_MAX;
bbf6f052
RK
1059
1060 /* First move what we can in the largest integer mode, then go to
1061 successively smaller modes. */
1062
1063 while (max_size > 1)
1064 {
1065 enum machine_mode mode = VOIDmode, tmode;
1066 enum insn_code icode;
1067
e7c33f54
RK
1068 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1069 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1070 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1071 mode = tmode;
1072
1073 if (mode == VOIDmode)
1074 break;
1075
1076 icode = mov_optab->handlers[(int) mode].insn_code;
1077 if (icode != CODE_FOR_nothing
1078 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1079 GET_MODE_SIZE (mode)))
1080 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1081
1082 max_size = GET_MODE_SIZE (mode);
1083 }
1084
1085 /* The code above should have handled everything. */
1086 if (data.len != 0)
1087 abort ();
1088}
1089
1090/* Return number of insns required to move L bytes by pieces.
1091 ALIGN (in bytes) is maximum alignment we can assume. */
1092
1093static int
1094move_by_pieces_ninsns (l, align)
1095 unsigned int l;
1096 int align;
1097{
1098 register int n_insns = 0;
e87b4f3f 1099 int max_size = MOVE_MAX + 1;
bbf6f052 1100
e87b4f3f
RS
1101 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1102 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1103 align = MOVE_MAX;
bbf6f052
RK
1104
1105 while (max_size > 1)
1106 {
1107 enum machine_mode mode = VOIDmode, tmode;
1108 enum insn_code icode;
1109
e7c33f54
RK
1110 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1111 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1112 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1113 mode = tmode;
1114
1115 if (mode == VOIDmode)
1116 break;
1117
1118 icode = mov_optab->handlers[(int) mode].insn_code;
1119 if (icode != CODE_FOR_nothing
1120 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1121 GET_MODE_SIZE (mode)))
1122 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1123
1124 max_size = GET_MODE_SIZE (mode);
1125 }
1126
1127 return n_insns;
1128}
1129
1130/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1131 with move instructions for mode MODE. GENFUN is the gen_... function
1132 to make a move insn for that mode. DATA has all the other info. */
1133
1134static void
1135move_by_pieces_1 (genfun, mode, data)
1136 rtx (*genfun) ();
1137 enum machine_mode mode;
1138 struct move_by_pieces *data;
1139{
1140 register int size = GET_MODE_SIZE (mode);
1141 register rtx to1, from1;
1142
1143 while (data->len >= size)
1144 {
1145 if (data->reverse) data->offset -= size;
1146
1147 to1 = (data->autinc_to
1148 ? gen_rtx (MEM, mode, data->to_addr)
1149 : change_address (data->to, mode,
1150 plus_constant (data->to_addr, data->offset)));
1151 from1 =
1152 (data->autinc_from
1153 ? gen_rtx (MEM, mode, data->from_addr)
1154 : change_address (data->from, mode,
1155 plus_constant (data->from_addr, data->offset)));
1156
1157#ifdef HAVE_PRE_DECREMENT
1158 if (data->explicit_inc_to < 0)
906c4e36 1159 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1160 if (data->explicit_inc_from < 0)
906c4e36 1161 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1162#endif
1163
1164 emit_insn ((*genfun) (to1, from1));
1165#ifdef HAVE_POST_INCREMENT
1166 if (data->explicit_inc_to > 0)
906c4e36 1167 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1168 if (data->explicit_inc_from > 0)
906c4e36 1169 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1170#endif
1171
1172 if (! data->reverse) data->offset += size;
1173
1174 data->len -= size;
1175 }
1176}
1177\f
1178/* Emit code to move a block Y to a block X.
1179 This may be done with string-move instructions,
1180 with multiple scalar move instructions, or with a library call.
1181
1182 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1183 with mode BLKmode.
1184 SIZE is an rtx that says how long they are.
1185 ALIGN is the maximum alignment we can assume they have,
1186 measured in bytes. */
1187
1188void
1189emit_block_move (x, y, size, align)
1190 rtx x, y;
1191 rtx size;
1192 int align;
1193{
1194 if (GET_MODE (x) != BLKmode)
1195 abort ();
1196
1197 if (GET_MODE (y) != BLKmode)
1198 abort ();
1199
1200 x = protect_from_queue (x, 1);
1201 y = protect_from_queue (y, 0);
5d901c31 1202 size = protect_from_queue (size, 0);
bbf6f052
RK
1203
1204 if (GET_CODE (x) != MEM)
1205 abort ();
1206 if (GET_CODE (y) != MEM)
1207 abort ();
1208 if (size == 0)
1209 abort ();
1210
1211 if (GET_CODE (size) == CONST_INT
906c4e36 1212 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1213 move_by_pieces (x, y, INTVAL (size), align);
1214 else
1215 {
1216 /* Try the most limited insn first, because there's no point
1217 including more than one in the machine description unless
1218 the more limited one has some advantage. */
266007a7 1219
0bba3f6f 1220 rtx opalign = GEN_INT (align);
266007a7
RK
1221 enum machine_mode mode;
1222
1223 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1224 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1225 {
266007a7 1226 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1227
1228 if (code != CODE_FOR_nothing
803090c4
RK
1229 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1230 here because if SIZE is less than the mode mask, as it is
1231 returned by the macro, it will definately be less than the
1232 actual mode mask. */
266007a7 1233 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1234 && (insn_operand_predicate[(int) code][0] == 0
1235 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1236 && (insn_operand_predicate[(int) code][1] == 0
1237 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1238 && (insn_operand_predicate[(int) code][3] == 0
1239 || (*insn_operand_predicate[(int) code][3]) (opalign,
1240 VOIDmode)))
bbf6f052 1241 {
1ba1e2a8 1242 rtx op2;
266007a7
RK
1243 rtx last = get_last_insn ();
1244 rtx pat;
1245
1ba1e2a8 1246 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1247 if (insn_operand_predicate[(int) code][2] != 0
1248 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1249 op2 = copy_to_mode_reg (mode, op2);
1250
1251 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1252 if (pat)
1253 {
1254 emit_insn (pat);
1255 return;
1256 }
1257 else
1258 delete_insns_since (last);
bbf6f052
RK
1259 }
1260 }
bbf6f052
RK
1261
1262#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1263 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1264 VOIDmode, 3, XEXP (x, 0), Pmode,
1265 XEXP (y, 0), Pmode,
5a2724d7 1266 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052 1267#else
d562e42e 1268 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1269 VOIDmode, 3, XEXP (y, 0), Pmode,
1270 XEXP (x, 0), Pmode,
5a2724d7 1271 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052
RK
1272#endif
1273 }
1274}
1275\f
1276/* Copy all or part of a value X into registers starting at REGNO.
1277 The number of registers to be filled is NREGS. */
1278
1279void
1280move_block_to_reg (regno, x, nregs, mode)
1281 int regno;
1282 rtx x;
1283 int nregs;
1284 enum machine_mode mode;
1285{
1286 int i;
1287 rtx pat, last;
1288
1289 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1290 x = validize_mem (force_const_mem (mode, x));
1291
1292 /* See if the machine can do this with a load multiple insn. */
1293#ifdef HAVE_load_multiple
1294 last = get_last_insn ();
1295 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1296 GEN_INT (nregs));
bbf6f052
RK
1297 if (pat)
1298 {
1299 emit_insn (pat);
1300 return;
1301 }
1302 else
1303 delete_insns_since (last);
1304#endif
1305
1306 for (i = 0; i < nregs; i++)
1307 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1308 operand_subword_force (x, i, mode));
1309}
1310
1311/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1312 The number of registers to be filled is NREGS. */
1313
1314void
1315move_block_from_reg (regno, x, nregs)
1316 int regno;
1317 rtx x;
1318 int nregs;
1319{
1320 int i;
1321 rtx pat, last;
1322
1323 /* See if the machine can do this with a store multiple insn. */
1324#ifdef HAVE_store_multiple
1325 last = get_last_insn ();
1326 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1327 GEN_INT (nregs));
bbf6f052
RK
1328 if (pat)
1329 {
1330 emit_insn (pat);
1331 return;
1332 }
1333 else
1334 delete_insns_since (last);
1335#endif
1336
1337 for (i = 0; i < nregs; i++)
1338 {
1339 rtx tem = operand_subword (x, i, 1, BLKmode);
1340
1341 if (tem == 0)
1342 abort ();
1343
1344 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1345 }
1346}
1347
1348/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1349
1350void
1351use_regs (regno, nregs)
1352 int regno;
1353 int nregs;
1354{
1355 int i;
1356
1357 for (i = 0; i < nregs; i++)
1358 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1359}
7308a047
RS
1360
1361/* Mark the instructions since PREV as a libcall block.
1362 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1363
1364static rtx
1365group_insns (prev)
1366 rtx prev;
1367{
1368 rtx insn_first;
1369 rtx insn_last;
1370
1371 /* Find the instructions to mark */
1372 if (prev)
1373 insn_first = NEXT_INSN (prev);
1374 else
1375 insn_first = get_insns ();
1376
1377 insn_last = get_last_insn ();
1378
1379 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1380 REG_NOTES (insn_last));
1381
1382 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1383 REG_NOTES (insn_first));
1384}
bbf6f052
RK
1385\f
1386/* Write zeros through the storage of OBJECT.
1387 If OBJECT has BLKmode, SIZE is its length in bytes. */
1388
1389void
1390clear_storage (object, size)
1391 rtx object;
1392 int size;
1393{
1394 if (GET_MODE (object) == BLKmode)
1395 {
1396#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1397 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1398 VOIDmode, 3,
1399 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1400 GEN_INT (size), Pmode);
bbf6f052 1401#else
d562e42e 1402 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1403 VOIDmode, 2,
1404 XEXP (object, 0), Pmode,
906c4e36 1405 GEN_INT (size), Pmode);
bbf6f052
RK
1406#endif
1407 }
1408 else
1409 emit_move_insn (object, const0_rtx);
1410}
1411
1412/* Generate code to copy Y into X.
1413 Both Y and X must have the same mode, except that
1414 Y can be a constant with VOIDmode.
1415 This mode cannot be BLKmode; use emit_block_move for that.
1416
1417 Return the last instruction emitted. */
1418
1419rtx
1420emit_move_insn (x, y)
1421 rtx x, y;
1422{
1423 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1424 enum machine_mode submode;
1425 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1426 int i;
1427
1428 x = protect_from_queue (x, 1);
1429 y = protect_from_queue (y, 0);
1430
1431 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1432 abort ();
1433
1434 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1435 y = force_const_mem (mode, y);
1436
1437 /* If X or Y are memory references, verify that their addresses are valid
1438 for the machine. */
1439 if (GET_CODE (x) == MEM
1440 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1441 && ! push_operand (x, GET_MODE (x)))
1442 || (flag_force_addr
1443 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1444 x = change_address (x, VOIDmode, XEXP (x, 0));
1445
1446 if (GET_CODE (y) == MEM
1447 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1448 || (flag_force_addr
1449 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1450 y = change_address (y, VOIDmode, XEXP (y, 0));
1451
1452 if (mode == BLKmode)
1453 abort ();
1454
7308a047
RS
1455 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1456 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1457 (class == MODE_COMPLEX_INT
1458 ? MODE_INT : MODE_FLOAT),
1459 0);
1460
bbf6f052
RK
1461 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1462 return
1463 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1464
7308a047
RS
1465 /* Expand complex moves by moving real part and imag part, if posible. */
1466 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1467 && submode != BLKmode
1468 && (mov_optab->handlers[(int) submode].insn_code
1469 != CODE_FOR_nothing))
1470 {
1471 /* Don't split destination if it is a stack push. */
1472 int stack = push_operand (x, GET_MODE (x));
1473 rtx prev = get_last_insn ();
1474
1475 /* Tell flow that the whole of the destination is being set. */
1476 if (GET_CODE (x) == REG)
1477 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1478
1479 /* If this is a stack, push the highpart first, so it
1480 will be in the argument order.
1481
1482 In that case, change_address is used only to convert
1483 the mode, not to change the address. */
1484 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1485 ((stack ? change_address (x, submode, (rtx) 0)
1486 : gen_highpart (submode, x)),
1487 gen_highpart (submode, y)));
1488 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1489 ((stack ? change_address (x, submode, (rtx) 0)
1490 : gen_lowpart (submode, x)),
1491 gen_lowpart (submode, y)));
1492
1493 group_insns (prev);
7a1ab50a
RS
1494
1495 return get_last_insn ();
7308a047
RS
1496 }
1497
bbf6f052
RK
1498 /* This will handle any multi-word mode that lacks a move_insn pattern.
1499 However, you will get better code if you define such patterns,
1500 even if they must turn into multiple assembler instructions. */
a4320483 1501 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1502 {
1503 rtx last_insn = 0;
7308a047 1504 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1505
1506 for (i = 0;
1507 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1508 i++)
1509 {
1510 rtx xpart = operand_subword (x, i, 1, mode);
1511 rtx ypart = operand_subword (y, i, 1, mode);
1512
1513 /* If we can't get a part of Y, put Y into memory if it is a
1514 constant. Otherwise, force it into a register. If we still
1515 can't get a part of Y, abort. */
1516 if (ypart == 0 && CONSTANT_P (y))
1517 {
1518 y = force_const_mem (mode, y);
1519 ypart = operand_subword (y, i, 1, mode);
1520 }
1521 else if (ypart == 0)
1522 ypart = operand_subword_force (y, i, mode);
1523
1524 if (xpart == 0 || ypart == 0)
1525 abort ();
1526
1527 last_insn = emit_move_insn (xpart, ypart);
1528 }
7308a047
RS
1529 /* Mark these insns as a libcall block. */
1530 group_insns (prev_insn);
1531
bbf6f052
RK
1532 return last_insn;
1533 }
1534 else
1535 abort ();
1536}
1537\f
1538/* Pushing data onto the stack. */
1539
1540/* Push a block of length SIZE (perhaps variable)
1541 and return an rtx to address the beginning of the block.
1542 Note that it is not possible for the value returned to be a QUEUED.
1543 The value may be virtual_outgoing_args_rtx.
1544
1545 EXTRA is the number of bytes of padding to push in addition to SIZE.
1546 BELOW nonzero means this padding comes at low addresses;
1547 otherwise, the padding comes at high addresses. */
1548
1549rtx
1550push_block (size, extra, below)
1551 rtx size;
1552 int extra, below;
1553{
1554 register rtx temp;
1555 if (CONSTANT_P (size))
1556 anti_adjust_stack (plus_constant (size, extra));
1557 else if (GET_CODE (size) == REG && extra == 0)
1558 anti_adjust_stack (size);
1559 else
1560 {
1561 rtx temp = copy_to_mode_reg (Pmode, size);
1562 if (extra != 0)
906c4e36 1563 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1564 temp, 0, OPTAB_LIB_WIDEN);
1565 anti_adjust_stack (temp);
1566 }
1567
1568#ifdef STACK_GROWS_DOWNWARD
1569 temp = virtual_outgoing_args_rtx;
1570 if (extra != 0 && below)
1571 temp = plus_constant (temp, extra);
1572#else
1573 if (GET_CODE (size) == CONST_INT)
1574 temp = plus_constant (virtual_outgoing_args_rtx,
1575 - INTVAL (size) - (below ? 0 : extra));
1576 else if (extra != 0 && !below)
1577 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1578 negate_rtx (Pmode, plus_constant (size, extra)));
1579 else
1580 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1581 negate_rtx (Pmode, size));
1582#endif
1583
1584 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1585}
1586
87e38d84 1587rtx
bbf6f052
RK
1588gen_push_operand ()
1589{
1590 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1591}
1592
1593/* Generate code to push X onto the stack, assuming it has mode MODE and
1594 type TYPE.
1595 MODE is redundant except when X is a CONST_INT (since they don't
1596 carry mode info).
1597 SIZE is an rtx for the size of data to be copied (in bytes),
1598 needed only if X is BLKmode.
1599
1600 ALIGN (in bytes) is maximum alignment we can assume.
1601
1602 If PARTIAL is nonzero, then copy that many of the first words
1603 of X into registers starting with REG, and push the rest of X.
1604 The amount of space pushed is decreased by PARTIAL words,
1605 rounded *down* to a multiple of PARM_BOUNDARY.
1606 REG must be a hard register in this case.
1607
1608 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1609 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1610
1611 On a machine that lacks real push insns, ARGS_ADDR is the address of
1612 the bottom of the argument block for this call. We use indexing off there
1613 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1614 argument block has not been preallocated.
1615
1616 ARGS_SO_FAR is the size of args previously pushed for this call. */
1617
1618void
1619emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1620 args_addr, args_so_far)
1621 register rtx x;
1622 enum machine_mode mode;
1623 tree type;
1624 rtx size;
1625 int align;
1626 int partial;
1627 rtx reg;
1628 int extra;
1629 rtx args_addr;
1630 rtx args_so_far;
1631{
1632 rtx xinner;
1633 enum direction stack_direction
1634#ifdef STACK_GROWS_DOWNWARD
1635 = downward;
1636#else
1637 = upward;
1638#endif
1639
1640 /* Decide where to pad the argument: `downward' for below,
1641 `upward' for above, or `none' for don't pad it.
1642 Default is below for small data on big-endian machines; else above. */
1643 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1644
1645 /* Invert direction if stack is post-update. */
1646 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1647 if (where_pad != none)
1648 where_pad = (where_pad == downward ? upward : downward);
1649
1650 xinner = x = protect_from_queue (x, 0);
1651
1652 if (mode == BLKmode)
1653 {
1654 /* Copy a block into the stack, entirely or partially. */
1655
1656 register rtx temp;
1657 int used = partial * UNITS_PER_WORD;
1658 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1659 int skip;
1660
1661 if (size == 0)
1662 abort ();
1663
1664 used -= offset;
1665
1666 /* USED is now the # of bytes we need not copy to the stack
1667 because registers will take care of them. */
1668
1669 if (partial != 0)
1670 xinner = change_address (xinner, BLKmode,
1671 plus_constant (XEXP (xinner, 0), used));
1672
1673 /* If the partial register-part of the arg counts in its stack size,
1674 skip the part of stack space corresponding to the registers.
1675 Otherwise, start copying to the beginning of the stack space,
1676 by setting SKIP to 0. */
1677#ifndef REG_PARM_STACK_SPACE
1678 skip = 0;
1679#else
1680 skip = used;
1681#endif
1682
1683#ifdef PUSH_ROUNDING
1684 /* Do it with several push insns if that doesn't take lots of insns
1685 and if there is no difficulty with push insns that skip bytes
1686 on the stack for alignment purposes. */
1687 if (args_addr == 0
1688 && GET_CODE (size) == CONST_INT
1689 && skip == 0
1690 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1691 < MOVE_RATIO)
bbf6f052
RK
1692 /* Here we avoid the case of a structure whose weak alignment
1693 forces many pushes of a small amount of data,
1694 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1695 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1696 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1697 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1698 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1699 {
1700 /* Push padding now if padding above and stack grows down,
1701 or if padding below and stack grows up.
1702 But if space already allocated, this has already been done. */
1703 if (extra && args_addr == 0
1704 && where_pad != none && where_pad != stack_direction)
906c4e36 1705 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1706
1707 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1708 INTVAL (size) - used, align);
1709 }
1710 else
1711#endif /* PUSH_ROUNDING */
1712 {
1713 /* Otherwise make space on the stack and copy the data
1714 to the address of that space. */
1715
1716 /* Deduct words put into registers from the size we must copy. */
1717 if (partial != 0)
1718 {
1719 if (GET_CODE (size) == CONST_INT)
906c4e36 1720 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1721 else
1722 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1723 GEN_INT (used), NULL_RTX, 0,
1724 OPTAB_LIB_WIDEN);
bbf6f052
RK
1725 }
1726
1727 /* Get the address of the stack space.
1728 In this case, we do not deal with EXTRA separately.
1729 A single stack adjust will do. */
1730 if (! args_addr)
1731 {
1732 temp = push_block (size, extra, where_pad == downward);
1733 extra = 0;
1734 }
1735 else if (GET_CODE (args_so_far) == CONST_INT)
1736 temp = memory_address (BLKmode,
1737 plus_constant (args_addr,
1738 skip + INTVAL (args_so_far)));
1739 else
1740 temp = memory_address (BLKmode,
1741 plus_constant (gen_rtx (PLUS, Pmode,
1742 args_addr, args_so_far),
1743 skip));
1744
1745 /* TEMP is the address of the block. Copy the data there. */
1746 if (GET_CODE (size) == CONST_INT
1747 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1748 < MOVE_RATIO))
1749 {
1750 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1751 INTVAL (size), align);
1752 goto ret;
1753 }
1754 /* Try the most limited insn first, because there's no point
1755 including more than one in the machine description unless
1756 the more limited one has some advantage. */
1757#ifdef HAVE_movstrqi
1758 if (HAVE_movstrqi
1759 && GET_CODE (size) == CONST_INT
1760 && ((unsigned) INTVAL (size)
1761 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1762 {
1763 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1764 xinner, size, GEN_INT (align)));
bbf6f052
RK
1765 goto ret;
1766 }
1767#endif
1768#ifdef HAVE_movstrhi
1769 if (HAVE_movstrhi
1770 && GET_CODE (size) == CONST_INT
1771 && ((unsigned) INTVAL (size)
1772 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1773 {
1774 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1775 xinner, size, GEN_INT (align)));
bbf6f052
RK
1776 goto ret;
1777 }
1778#endif
1779#ifdef HAVE_movstrsi
1780 if (HAVE_movstrsi)
1781 {
1782 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1783 xinner, size, GEN_INT (align)));
bbf6f052
RK
1784 goto ret;
1785 }
1786#endif
1787#ifdef HAVE_movstrdi
1788 if (HAVE_movstrdi)
1789 {
1790 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1791 xinner, size, GEN_INT (align)));
bbf6f052
RK
1792 goto ret;
1793 }
1794#endif
1795
1796#ifndef ACCUMULATE_OUTGOING_ARGS
1797 /* If the source is referenced relative to the stack pointer,
1798 copy it to another register to stabilize it. We do not need
1799 to do this if we know that we won't be changing sp. */
1800
1801 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1802 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1803 temp = copy_to_reg (temp);
1804#endif
1805
1806 /* Make inhibit_defer_pop nonzero around the library call
1807 to force it to pop the bcopy-arguments right away. */
1808 NO_DEFER_POP;
1809#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1810 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1811 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1812 size, Pmode);
1813#else
d562e42e 1814 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1815 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1816 size, Pmode);
1817#endif
1818 OK_DEFER_POP;
1819 }
1820 }
1821 else if (partial > 0)
1822 {
1823 /* Scalar partly in registers. */
1824
1825 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1826 int i;
1827 int not_stack;
1828 /* # words of start of argument
1829 that we must make space for but need not store. */
1830 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1831 int args_offset = INTVAL (args_so_far);
1832 int skip;
1833
1834 /* Push padding now if padding above and stack grows down,
1835 or if padding below and stack grows up.
1836 But if space already allocated, this has already been done. */
1837 if (extra && args_addr == 0
1838 && where_pad != none && where_pad != stack_direction)
906c4e36 1839 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1840
1841 /* If we make space by pushing it, we might as well push
1842 the real data. Otherwise, we can leave OFFSET nonzero
1843 and leave the space uninitialized. */
1844 if (args_addr == 0)
1845 offset = 0;
1846
1847 /* Now NOT_STACK gets the number of words that we don't need to
1848 allocate on the stack. */
1849 not_stack = partial - offset;
1850
1851 /* If the partial register-part of the arg counts in its stack size,
1852 skip the part of stack space corresponding to the registers.
1853 Otherwise, start copying to the beginning of the stack space,
1854 by setting SKIP to 0. */
1855#ifndef REG_PARM_STACK_SPACE
1856 skip = 0;
1857#else
1858 skip = not_stack;
1859#endif
1860
1861 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1862 x = validize_mem (force_const_mem (mode, x));
1863
1864 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1865 SUBREGs of such registers are not allowed. */
1866 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1867 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1868 x = copy_to_reg (x);
1869
1870 /* Loop over all the words allocated on the stack for this arg. */
1871 /* We can do it by words, because any scalar bigger than a word
1872 has a size a multiple of a word. */
1873#ifndef PUSH_ARGS_REVERSED
1874 for (i = not_stack; i < size; i++)
1875#else
1876 for (i = size - 1; i >= not_stack; i--)
1877#endif
1878 if (i >= not_stack + offset)
1879 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
1880 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1881 0, args_addr,
1882 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
1883 * UNITS_PER_WORD)));
1884 }
1885 else
1886 {
1887 rtx addr;
1888
1889 /* Push padding now if padding above and stack grows down,
1890 or if padding below and stack grows up.
1891 But if space already allocated, this has already been done. */
1892 if (extra && args_addr == 0
1893 && where_pad != none && where_pad != stack_direction)
906c4e36 1894 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1895
1896#ifdef PUSH_ROUNDING
1897 if (args_addr == 0)
1898 addr = gen_push_operand ();
1899 else
1900#endif
1901 if (GET_CODE (args_so_far) == CONST_INT)
1902 addr
1903 = memory_address (mode,
1904 plus_constant (args_addr, INTVAL (args_so_far)));
1905 else
1906 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1907 args_so_far));
1908
1909 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1910 }
1911
1912 ret:
1913 /* If part should go in registers, copy that part
1914 into the appropriate registers. Do this now, at the end,
1915 since mem-to-mem copies above may do function calls. */
1916 if (partial > 0)
1917 move_block_to_reg (REGNO (reg), x, partial, mode);
1918
1919 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 1920 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1921}
1922\f
1923/* Output a library call to function FUN (a SYMBOL_REF rtx)
1924 (emitting the queue unless NO_QUEUE is nonzero),
1925 for a value of mode OUTMODE,
1926 with NARGS different arguments, passed as alternating rtx values
1927 and machine_modes to convert them to.
1928 The rtx values should have been passed through protect_from_queue already.
1929
1930 NO_QUEUE will be true if and only if the library call is a `const' call
1931 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
d562e42e
JW
1932 to the variable is_const in expand_call.
1933
1934 NO_QUEUE must be true for const calls, because if it isn't, then
1935 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1936 and will be lost if the libcall sequence is optimized away.
1937
1938 NO_QUEUE must be false for non-const calls, because if it isn't, the
1939 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1940 optimized. For instance, the instruction scheduler may incorrectly
1941 move memory references across the non-const call. */
bbf6f052
RK
1942
1943void
1944emit_library_call (va_alist)
1945 va_dcl
1946{
1947 va_list p;
1948 struct args_size args_size;
1949 register int argnum;
1950 enum machine_mode outmode;
1951 int nargs;
1952 rtx fun;
1953 rtx orgfun;
1954 int inc;
1955 int count;
1956 rtx argblock = 0;
1957 CUMULATIVE_ARGS args_so_far;
1958 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1959 struct args_size offset; struct args_size size; };
1960 struct arg *argvec;
1961 int old_inhibit_defer_pop = inhibit_defer_pop;
1962 int no_queue = 0;
1963 rtx use_insns;
1964
1965 va_start (p);
1966 orgfun = fun = va_arg (p, rtx);
1967 no_queue = va_arg (p, int);
1968 outmode = va_arg (p, enum machine_mode);
1969 nargs = va_arg (p, int);
1970
1971 /* Copy all the libcall-arguments out of the varargs data
1972 and into a vector ARGVEC.
1973
1974 Compute how to pass each argument. We only support a very small subset
1975 of the full argument passing conventions to limit complexity here since
1976 library functions shouldn't have many args. */
1977
1978 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1979
1980 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1981
1982 args_size.constant = 0;
1983 args_size.var = 0;
1984
1985 for (count = 0; count < nargs; count++)
1986 {
1987 rtx val = va_arg (p, rtx);
1988 enum machine_mode mode = va_arg (p, enum machine_mode);
1989
1990 /* We cannot convert the arg value to the mode the library wants here;
1991 must do it earlier where we know the signedness of the arg. */
1992 if (mode == BLKmode
1993 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1994 abort ();
1995
1996 /* On some machines, there's no way to pass a float to a library fcn.
1997 Pass it as a double instead. */
1998#ifdef LIBGCC_NEEDS_DOUBLE
1999 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
803090c4 2000 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
bbf6f052
RK
2001#endif
2002
5d901c31
RS
2003 /* There's no need to call protect_from_queue, because
2004 either emit_move_insn or emit_push_insn will do that. */
2005
bbf6f052
RK
2006 /* Make sure it is a reasonable operand for a move or push insn. */
2007 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2008 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
906c4e36 2009 val = force_operand (val, NULL_RTX);
bbf6f052
RK
2010
2011 argvec[count].value = val;
2012 argvec[count].mode = mode;
2013
2014#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
906c4e36 2015 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
bbf6f052
RK
2016 abort ();
2017#endif
2018
906c4e36 2019 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
2020 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2021 abort ();
2022#ifdef FUNCTION_ARG_PARTIAL_NREGS
2023 argvec[count].partial
906c4e36 2024 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
2025#else
2026 argvec[count].partial = 0;
2027#endif
2028
906c4e36 2029 locate_and_pad_parm (mode, NULL_TREE,
bbf6f052 2030 argvec[count].reg && argvec[count].partial == 0,
906c4e36 2031 NULL_TREE, &args_size, &argvec[count].offset,
bbf6f052
RK
2032 &argvec[count].size);
2033
2034 if (argvec[count].size.var)
2035 abort ();
2036
2037#ifndef REG_PARM_STACK_SPACE
2038 if (argvec[count].partial)
2039 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2040#endif
2041
2042 if (argvec[count].reg == 0 || argvec[count].partial != 0
2043#ifdef REG_PARM_STACK_SPACE
2044 || 1
2045#endif
2046 )
2047 args_size.constant += argvec[count].size.constant;
2048
2049#ifdef ACCUMULATE_OUTGOING_ARGS
2050 /* If this arg is actually passed on the stack, it might be
2051 clobbering something we already put there (this library call might
2052 be inside the evaluation of an argument to a function whose call
2053 requires the stack). This will only occur when the library call
2054 has sufficient args to run out of argument registers. Abort in
2055 this case; if this ever occurs, code must be added to save and
2056 restore the arg slot. */
2057
2058 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2059 abort ();
2060#endif
2061
2062 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2063 }
2064 va_end (p);
2065
2066 /* If this machine requires an external definition for library
2067 functions, write one out. */
2068 assemble_external_libcall (fun);
2069
2070#ifdef STACK_BOUNDARY
2071 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2072 / STACK_BYTES) * STACK_BYTES);
2073#endif
2074
2075#ifdef REG_PARM_STACK_SPACE
2076 args_size.constant = MAX (args_size.constant,
2077 REG_PARM_STACK_SPACE ((tree) 0));
2078#endif
2079
2080#ifdef ACCUMULATE_OUTGOING_ARGS
2081 if (args_size.constant > current_function_outgoing_args_size)
2082 current_function_outgoing_args_size = args_size.constant;
2083 args_size.constant = 0;
2084#endif
2085
2086#ifndef PUSH_ROUNDING
906c4e36 2087 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
bbf6f052
RK
2088#endif
2089
2090#ifdef PUSH_ARGS_REVERSED
2091 inc = -1;
2092 argnum = nargs - 1;
2093#else
2094 inc = 1;
2095 argnum = 0;
2096#endif
2097
2098 /* Push the args that need to be pushed. */
2099
2100 for (count = 0; count < nargs; count++, argnum += inc)
2101 {
2102 register enum machine_mode mode = argvec[argnum].mode;
2103 register rtx val = argvec[argnum].value;
2104 rtx reg = argvec[argnum].reg;
2105 int partial = argvec[argnum].partial;
2106
2107 if (! (reg != 0 && partial == 0))
906c4e36
RK
2108 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2109 argblock, GEN_INT (argvec[count].offset.constant));
bbf6f052
RK
2110 NO_DEFER_POP;
2111 }
2112
2113#ifdef PUSH_ARGS_REVERSED
2114 argnum = nargs - 1;
2115#else
2116 argnum = 0;
2117#endif
2118
2119 /* Now load any reg parms into their regs. */
2120
2121 for (count = 0; count < nargs; count++, argnum += inc)
2122 {
2123 register enum machine_mode mode = argvec[argnum].mode;
2124 register rtx val = argvec[argnum].value;
2125 rtx reg = argvec[argnum].reg;
2126 int partial = argvec[argnum].partial;
2127
2128 if (reg != 0 && partial == 0)
2129 emit_move_insn (reg, val);
2130 NO_DEFER_POP;
2131 }
2132
2133 /* For version 1.37, try deleting this entirely. */
2134 if (! no_queue)
2135 emit_queue ();
2136
2137 /* Any regs containing parms remain in use through the call. */
2138 start_sequence ();
2139 for (count = 0; count < nargs; count++)
2140 if (argvec[count].reg != 0)
2141 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2142
2143 use_insns = get_insns ();
2144 end_sequence ();
2145
906c4e36 2146 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
bbf6f052
RK
2147
2148 /* Don't allow popping to be deferred, since then
2149 cse'ing of library calls could delete a call and leave the pop. */
2150 NO_DEFER_POP;
2151
2152 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2153 will set inhibit_defer_pop to that value. */
2154
2155 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2156 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
906c4e36 2157 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
bbf6f052
RK
2158 old_inhibit_defer_pop + 1, use_insns, no_queue);
2159
2160 /* Now restore inhibit_defer_pop to its actual original value. */
2161 OK_DEFER_POP;
2162}
2163\f
2164/* Expand an assignment that stores the value of FROM into TO.
2165 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2166 (This may contain a QUEUED rtx.)
2167 Otherwise, the returned value is not meaningful.
2168
2169 SUGGEST_REG is no longer actually used.
2170 It used to mean, copy the value through a register
2171 and return that register, if that is possible.
2172 But now we do this if WANT_VALUE.
2173
2174 If the value stored is a constant, we return the constant. */
2175
2176rtx
2177expand_assignment (to, from, want_value, suggest_reg)
2178 tree to, from;
2179 int want_value;
2180 int suggest_reg;
2181{
2182 register rtx to_rtx = 0;
2183 rtx result;
2184
2185 /* Don't crash if the lhs of the assignment was erroneous. */
2186
2187 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2188 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2189
2190 /* Assignment of a structure component needs special treatment
2191 if the structure component's rtx is not simply a MEM.
2192 Assignment of an array element at a constant index
2193 has the same problem. */
2194
2195 if (TREE_CODE (to) == COMPONENT_REF
2196 || TREE_CODE (to) == BIT_FIELD_REF
2197 || (TREE_CODE (to) == ARRAY_REF
2198 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2200 {
2201 enum machine_mode mode1;
2202 int bitsize;
2203 int bitpos;
7bb0943f 2204 tree offset;
bbf6f052
RK
2205 int unsignedp;
2206 int volatilep = 0;
7bb0943f 2207 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2208 &mode1, &unsignedp, &volatilep);
2209
2210 /* If we are going to use store_bit_field and extract_bit_field,
2211 make sure to_rtx will be safe for multiple use. */
2212
2213 if (mode1 == VOIDmode && want_value)
2214 tem = stabilize_reference (tem);
2215
906c4e36 2216 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2217 if (offset != 0)
2218 {
906c4e36 2219 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2220
2221 if (GET_CODE (to_rtx) != MEM)
2222 abort ();
2223 to_rtx = change_address (to_rtx, VOIDmode,
2224 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2225 force_reg (Pmode, offset_rtx)));
2226 }
bbf6f052
RK
2227 if (volatilep)
2228 {
2229 if (GET_CODE (to_rtx) == MEM)
2230 MEM_VOLATILE_P (to_rtx) = 1;
2231#if 0 /* This was turned off because, when a field is volatile
2232 in an object which is not volatile, the object may be in a register,
2233 and then we would abort over here. */
2234 else
2235 abort ();
2236#endif
2237 }
2238
2239 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2240 (want_value
2241 /* Spurious cast makes HPUX compiler happy. */
2242 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2243 : VOIDmode),
2244 unsignedp,
2245 /* Required alignment of containing datum. */
2246 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2247 int_size_in_bytes (TREE_TYPE (tem)));
2248 preserve_temp_slots (result);
2249 free_temp_slots ();
2250
2251 return result;
2252 }
2253
2254 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2255 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2256
2257 if (to_rtx == 0)
906c4e36 2258 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2259
2260 /* In case we are returning the contents of an object which overlaps
2261 the place the value is being stored, use a safe function when copying
2262 a value through a pointer into a structure value return block. */
2263 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2264 && current_function_returns_struct
2265 && !current_function_returns_pcc_struct)
2266 {
906c4e36 2267 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2268 rtx size = expr_size (from);
2269
2270#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2271 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2272 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2273 XEXP (from_rtx, 0), Pmode,
2274 size, Pmode);
2275#else
d562e42e 2276 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2277 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2278 XEXP (to_rtx, 0), Pmode,
2279 size, Pmode);
2280#endif
2281
2282 preserve_temp_slots (to_rtx);
2283 free_temp_slots ();
2284 return to_rtx;
2285 }
2286
2287 /* Compute FROM and store the value in the rtx we got. */
2288
2289 result = store_expr (from, to_rtx, want_value);
2290 preserve_temp_slots (result);
2291 free_temp_slots ();
2292 return result;
2293}
2294
2295/* Generate code for computing expression EXP,
2296 and storing the value into TARGET.
2297 Returns TARGET or an equivalent value.
2298 TARGET may contain a QUEUED rtx.
2299
2300 If SUGGEST_REG is nonzero, copy the value through a register
2301 and return that register, if that is possible.
2302
2303 If the value stored is a constant, we return the constant. */
2304
2305rtx
2306store_expr (exp, target, suggest_reg)
2307 register tree exp;
2308 register rtx target;
2309 int suggest_reg;
2310{
2311 register rtx temp;
2312 int dont_return_target = 0;
2313
2314 if (TREE_CODE (exp) == COMPOUND_EXPR)
2315 {
2316 /* Perform first part of compound expression, then assign from second
2317 part. */
2318 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2319 emit_queue ();
2320 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2321 }
2322 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2323 {
2324 /* For conditional expression, get safe form of the target. Then
2325 test the condition, doing the appropriate assignment on either
2326 side. This avoids the creation of unnecessary temporaries.
2327 For non-BLKmode, it is more efficient not to do this. */
2328
2329 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2330
2331 emit_queue ();
2332 target = protect_from_queue (target, 1);
2333
2334 NO_DEFER_POP;
2335 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2336 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2337 emit_queue ();
2338 emit_jump_insn (gen_jump (lab2));
2339 emit_barrier ();
2340 emit_label (lab1);
2341 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2342 emit_queue ();
2343 emit_label (lab2);
2344 OK_DEFER_POP;
2345 return target;
2346 }
2347 else if (suggest_reg && GET_CODE (target) == MEM
2348 && GET_MODE (target) != BLKmode)
2349 /* If target is in memory and caller wants value in a register instead,
2350 arrange that. Pass TARGET as target for expand_expr so that,
2351 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2352 We know expand_expr will not use the target in that case. */
2353 {
906c4e36 2354 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2355 GET_MODE (target), 0);
2356 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2357 temp = copy_to_reg (temp);
2358 dont_return_target = 1;
2359 }
2360 else if (queued_subexp_p (target))
2361 /* If target contains a postincrement, it is not safe
2362 to use as the returned value. It would access the wrong
2363 place by the time the queued increment gets output.
2364 So copy the value through a temporary and use that temp
2365 as the result. */
2366 {
2367 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2368 {
2369 /* Expand EXP into a new pseudo. */
2370 temp = gen_reg_rtx (GET_MODE (target));
2371 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2372 }
2373 else
906c4e36 2374 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2375 dont_return_target = 1;
2376 }
1499e0a8
RK
2377 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2378 /* If this is an scalar in a register that is stored in a wider mode
2379 than the declared mode, compute the result into its declared mode
2380 and then convert to the wider mode. Our value is the computed
2381 expression. */
2382 {
2383 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2384 convert_move (SUBREG_REG (target), temp,
2385 SUBREG_PROMOTED_UNSIGNED_P (target));
2386 return temp;
2387 }
bbf6f052
RK
2388 else
2389 {
2390 temp = expand_expr (exp, target, GET_MODE (target), 0);
2391 /* DO return TARGET if it's a specified hardware register.
2392 expand_return relies on this. */
2393 if (!(target && GET_CODE (target) == REG
2394 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2395 && CONSTANT_P (temp))
2396 dont_return_target = 1;
2397 }
2398
2399 /* If value was not generated in the target, store it there.
2400 Convert the value to TARGET's type first if nec. */
2401
2402 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2403 {
2404 target = protect_from_queue (target, 1);
2405 if (GET_MODE (temp) != GET_MODE (target)
2406 && GET_MODE (temp) != VOIDmode)
2407 {
2408 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2409 if (dont_return_target)
2410 {
2411 /* In this case, we will return TEMP,
2412 so make sure it has the proper mode.
2413 But don't forget to store the value into TARGET. */
2414 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2415 emit_move_insn (target, temp);
2416 }
2417 else
2418 convert_move (target, temp, unsignedp);
2419 }
2420
2421 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2422 {
2423 /* Handle copying a string constant into an array.
2424 The string constant may be shorter than the array.
2425 So copy just the string's actual length, and clear the rest. */
2426 rtx size;
2427
e87b4f3f
RS
2428 /* Get the size of the data type of the string,
2429 which is actually the size of the target. */
2430 size = expr_size (exp);
2431 if (GET_CODE (size) == CONST_INT
2432 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2433 emit_block_move (target, temp, size,
2434 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2435 else
bbf6f052 2436 {
e87b4f3f
RS
2437 /* Compute the size of the data to copy from the string. */
2438 tree copy_size
2439 = fold (build (MIN_EXPR, sizetype,
2440 size_binop (CEIL_DIV_EXPR,
2441 TYPE_SIZE (TREE_TYPE (exp)),
2442 size_int (BITS_PER_UNIT)),
2443 convert (sizetype,
2444 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
906c4e36
RK
2445 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2446 VOIDmode, 0);
e87b4f3f
RS
2447 rtx label = 0;
2448
2449 /* Copy that much. */
2450 emit_block_move (target, temp, copy_size_rtx,
2451 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2452
2453 /* Figure out how much is left in TARGET
2454 that we have to clear. */
2455 if (GET_CODE (copy_size_rtx) == CONST_INT)
2456 {
2457 temp = plus_constant (XEXP (target, 0),
2458 TREE_STRING_LENGTH (exp));
2459 size = plus_constant (size,
2460 - TREE_STRING_LENGTH (exp));
2461 }
2462 else
2463 {
2464 enum machine_mode size_mode = Pmode;
2465
2466 temp = force_reg (Pmode, XEXP (target, 0));
2467 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2468 copy_size_rtx, NULL_RTX, 0,
2469 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2470
2471 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2472 copy_size_rtx, NULL_RTX, 0,
2473 OPTAB_LIB_WIDEN);
e87b4f3f 2474
906c4e36 2475 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2476 GET_MODE (size), 0, 0);
2477 label = gen_label_rtx ();
2478 emit_jump_insn (gen_blt (label));
2479 }
2480
2481 if (size != const0_rtx)
2482 {
bbf6f052 2483#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2484 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2485 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2486#else
d562e42e 2487 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2488 temp, Pmode, size, Pmode);
bbf6f052 2489#endif
e87b4f3f
RS
2490 }
2491 if (label)
2492 emit_label (label);
bbf6f052
RK
2493 }
2494 }
2495 else if (GET_MODE (temp) == BLKmode)
2496 emit_block_move (target, temp, expr_size (exp),
2497 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2498 else
2499 emit_move_insn (target, temp);
2500 }
2501 if (dont_return_target)
2502 return temp;
2503 return target;
2504}
2505\f
2506/* Store the value of constructor EXP into the rtx TARGET.
2507 TARGET is either a REG or a MEM. */
2508
2509static void
2510store_constructor (exp, target)
2511 tree exp;
2512 rtx target;
2513{
4af3895e
JVA
2514 tree type = TREE_TYPE (exp);
2515
bbf6f052
RK
2516 /* We know our target cannot conflict, since safe_from_p has been called. */
2517#if 0
2518 /* Don't try copying piece by piece into a hard register
2519 since that is vulnerable to being clobbered by EXP.
2520 Instead, construct in a pseudo register and then copy it all. */
2521 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2522 {
2523 rtx temp = gen_reg_rtx (GET_MODE (target));
2524 store_constructor (exp, temp);
2525 emit_move_insn (target, temp);
2526 return;
2527 }
2528#endif
2529
4af3895e 2530 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2531 {
2532 register tree elt;
2533
4af3895e
JVA
2534 /* Inform later passes that the whole union value is dead. */
2535 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2536 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2537
2538 /* If we are building a static constructor into a register,
2539 set the initial value as zero so we can fold the value into
2540 a constant. */
2541 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2542 emit_move_insn (target, const0_rtx);
2543
bbf6f052
RK
2544 /* If the constructor has fewer fields than the structure,
2545 clear the whole structure first. */
2546 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2547 != list_length (TYPE_FIELDS (type)))
2548 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2549 else
2550 /* Inform later passes that the old value is dead. */
2551 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2552
2553 /* Store each element of the constructor into
2554 the corresponding field of TARGET. */
2555
2556 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2557 {
2558 register tree field = TREE_PURPOSE (elt);
2559 register enum machine_mode mode;
2560 int bitsize;
2561 int bitpos;
2562 int unsignedp;
2563
f32fd778
RS
2564 /* Just ignore missing fields.
2565 We cleared the whole structure, above,
2566 if any fields are missing. */
2567 if (field == 0)
2568 continue;
2569
bbf6f052
RK
2570 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2571 unsignedp = TREE_UNSIGNED (field);
2572 mode = DECL_MODE (field);
2573 if (DECL_BIT_FIELD (field))
2574 mode = VOIDmode;
2575
2576 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2577 /* ??? This case remains to be written. */
2578 abort ();
2579
2580 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2581
2582 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2583 /* The alignment of TARGET is
2584 at least what its type requires. */
2585 VOIDmode, 0,
4af3895e
JVA
2586 TYPE_ALIGN (type) / BITS_PER_UNIT,
2587 int_size_in_bytes (type));
bbf6f052
RK
2588 }
2589 }
4af3895e 2590 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2591 {
2592 register tree elt;
2593 register int i;
4af3895e 2594 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2595 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2596 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2597 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2598
2599 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2600 clear the whole structure first. Similarly if this this is
2601 static constructor of a non-BLKmode object. */
bbf6f052 2602
4af3895e
JVA
2603 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2604 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2605 clear_storage (target, maxelt - minelt + 1);
2606 else
2607 /* Inform later passes that the old value is dead. */
2608 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2609
2610 /* Store each element of the constructor into
2611 the corresponding element of TARGET, determined
2612 by counting the elements. */
2613 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2614 elt;
2615 elt = TREE_CHAIN (elt), i++)
2616 {
2617 register enum machine_mode mode;
2618 int bitsize;
2619 int bitpos;
2620 int unsignedp;
2621
2622 mode = TYPE_MODE (elttype);
2623 bitsize = GET_MODE_BITSIZE (mode);
2624 unsignedp = TREE_UNSIGNED (elttype);
2625
2626 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2627
2628 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2629 /* The alignment of TARGET is
2630 at least what its type requires. */
2631 VOIDmode, 0,
4af3895e
JVA
2632 TYPE_ALIGN (type) / BITS_PER_UNIT,
2633 int_size_in_bytes (type));
bbf6f052
RK
2634 }
2635 }
2636
2637 else
2638 abort ();
2639}
2640
2641/* Store the value of EXP (an expression tree)
2642 into a subfield of TARGET which has mode MODE and occupies
2643 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2644 If MODE is VOIDmode, it means that we are storing into a bit-field.
2645
2646 If VALUE_MODE is VOIDmode, return nothing in particular.
2647 UNSIGNEDP is not used in this case.
2648
2649 Otherwise, return an rtx for the value stored. This rtx
2650 has mode VALUE_MODE if that is convenient to do.
2651 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2652
2653 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2654 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2655
2656static rtx
2657store_field (target, bitsize, bitpos, mode, exp, value_mode,
2658 unsignedp, align, total_size)
2659 rtx target;
2660 int bitsize, bitpos;
2661 enum machine_mode mode;
2662 tree exp;
2663 enum machine_mode value_mode;
2664 int unsignedp;
2665 int align;
2666 int total_size;
2667{
906c4e36 2668 HOST_WIDE_INT width_mask = 0;
bbf6f052 2669
906c4e36
RK
2670 if (bitsize < HOST_BITS_PER_WIDE_INT)
2671 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2672
2673 /* If we are storing into an unaligned field of an aligned union that is
2674 in a register, we may have the mode of TARGET being an integer mode but
2675 MODE == BLKmode. In that case, get an aligned object whose size and
2676 alignment are the same as TARGET and store TARGET into it (we can avoid
2677 the store if the field being stored is the entire width of TARGET). Then
2678 call ourselves recursively to store the field into a BLKmode version of
2679 that object. Finally, load from the object into TARGET. This is not
2680 very efficient in general, but should only be slightly more expensive
2681 than the otherwise-required unaligned accesses. Perhaps this can be
2682 cleaned up later. */
2683
2684 if (mode == BLKmode
2685 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2686 {
2687 rtx object = assign_stack_temp (GET_MODE (target),
2688 GET_MODE_SIZE (GET_MODE (target)), 0);
2689 rtx blk_object = copy_rtx (object);
2690
2691 PUT_MODE (blk_object, BLKmode);
2692
2693 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2694 emit_move_insn (object, target);
2695
2696 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2697 align, total_size);
2698
2699 emit_move_insn (target, object);
2700
2701 return target;
2702 }
2703
2704 /* If the structure is in a register or if the component
2705 is a bit field, we cannot use addressing to access it.
2706 Use bit-field techniques or SUBREG to store in it. */
2707
4fa52007
RK
2708 if (mode == VOIDmode
2709 || (mode != BLKmode && ! direct_store[(int) mode])
2710 || GET_CODE (target) == REG
bbf6f052
RK
2711 || GET_CODE (target) == SUBREG)
2712 {
906c4e36 2713 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2714 /* Store the value in the bitfield. */
2715 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2716 if (value_mode != VOIDmode)
2717 {
2718 /* The caller wants an rtx for the value. */
2719 /* If possible, avoid refetching from the bitfield itself. */
2720 if (width_mask != 0
2721 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 2722 {
9074de27 2723 tree count;
5c4d7cfb 2724 enum machine_mode tmode;
86a2c12a 2725
5c4d7cfb
RS
2726 if (unsignedp)
2727 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2728 tmode = GET_MODE (temp);
86a2c12a
RS
2729 if (tmode == VOIDmode)
2730 tmode = value_mode;
5c4d7cfb
RS
2731 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2732 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2733 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2734 }
bbf6f052 2735 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2736 NULL_RTX, value_mode, 0, align,
2737 total_size);
bbf6f052
RK
2738 }
2739 return const0_rtx;
2740 }
2741 else
2742 {
2743 rtx addr = XEXP (target, 0);
2744 rtx to_rtx;
2745
2746 /* If a value is wanted, it must be the lhs;
2747 so make the address stable for multiple use. */
2748
2749 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2750 && ! CONSTANT_ADDRESS_P (addr)
2751 /* A frame-pointer reference is already stable. */
2752 && ! (GET_CODE (addr) == PLUS
2753 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2754 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2755 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2756 addr = copy_to_reg (addr);
2757
2758 /* Now build a reference to just the desired component. */
2759
2760 to_rtx = change_address (target, mode,
2761 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2762 MEM_IN_STRUCT_P (to_rtx) = 1;
2763
2764 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2765 }
2766}
2767\f
2768/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2769 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2770 ARRAY_REFs at constant positions and find the ultimate containing object,
2771 which we return.
2772
2773 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2774 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2775 If the position of the field is variable, we store a tree
2776 giving the variable offset (in units) in *POFFSET.
2777 This offset is in addition to the bit position.
2778 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2779
2780 If any of the extraction expressions is volatile,
2781 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2782
2783 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2784 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2785 is redundant.
2786
2787 If the field describes a variable-sized object, *PMODE is set to
2788 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2789 this case, but the address of the object can be found. */
bbf6f052
RK
2790
2791tree
7bb0943f 2792get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
bbf6f052
RK
2793 tree exp;
2794 int *pbitsize;
2795 int *pbitpos;
7bb0943f 2796 tree *poffset;
bbf6f052
RK
2797 enum machine_mode *pmode;
2798 int *punsignedp;
2799 int *pvolatilep;
2800{
2801 tree size_tree = 0;
2802 enum machine_mode mode = VOIDmode;
7bb0943f 2803 tree offset = 0;
bbf6f052
RK
2804
2805 if (TREE_CODE (exp) == COMPONENT_REF)
2806 {
2807 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2808 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2809 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2810 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2811 }
2812 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2813 {
2814 size_tree = TREE_OPERAND (exp, 1);
2815 *punsignedp = TREE_UNSIGNED (exp);
2816 }
2817 else
2818 {
2819 mode = TYPE_MODE (TREE_TYPE (exp));
2820 *pbitsize = GET_MODE_BITSIZE (mode);
2821 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2822 }
2823
2824 if (size_tree)
2825 {
2826 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2827 mode = BLKmode, *pbitsize = -1;
2828 else
2829 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2830 }
2831
2832 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2833 and find the ultimate containing object. */
2834
2835 *pbitpos = 0;
2836
2837 while (1)
2838 {
7bb0943f 2839 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2840 {
7bb0943f
RS
2841 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2842 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2843 : TREE_OPERAND (exp, 2));
bbf6f052 2844
7bb0943f
RS
2845 if (TREE_CODE (pos) == PLUS_EXPR)
2846 {
2847 tree constant, var;
2848 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2849 {
2850 constant = TREE_OPERAND (pos, 0);
2851 var = TREE_OPERAND (pos, 1);
2852 }
2853 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2854 {
2855 constant = TREE_OPERAND (pos, 1);
2856 var = TREE_OPERAND (pos, 0);
2857 }
2858 else
2859 abort ();
2860 *pbitpos += TREE_INT_CST_LOW (constant);
2861 if (offset)
2862 offset = size_binop (PLUS_EXPR, offset,
2863 size_binop (FLOOR_DIV_EXPR, var,
2864 size_int (BITS_PER_UNIT)));
2865 else
2866 offset = size_binop (FLOOR_DIV_EXPR, var,
2867 size_int (BITS_PER_UNIT));
2868 }
2869 else if (TREE_CODE (pos) == INTEGER_CST)
2870 *pbitpos += TREE_INT_CST_LOW (pos);
2871 else
2872 {
2873 /* Assume here that the offset is a multiple of a unit.
2874 If not, there should be an explicitly added constant. */
2875 if (offset)
2876 offset = size_binop (PLUS_EXPR, offset,
2877 size_binop (FLOOR_DIV_EXPR, pos,
2878 size_int (BITS_PER_UNIT)));
2879 else
2880 offset = size_binop (FLOOR_DIV_EXPR, pos,
2881 size_int (BITS_PER_UNIT));
2882 }
bbf6f052 2883 }
bbf6f052 2884
bbf6f052
RK
2885 else if (TREE_CODE (exp) == ARRAY_REF
2886 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2887 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2888 {
2889 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2890 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
2891 }
2892 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2893 && ! ((TREE_CODE (exp) == NOP_EXPR
2894 || TREE_CODE (exp) == CONVERT_EXPR)
2895 && (TYPE_MODE (TREE_TYPE (exp))
2896 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2897 break;
7bb0943f
RS
2898
2899 /* If any reference in the chain is volatile, the effect is volatile. */
2900 if (TREE_THIS_VOLATILE (exp))
2901 *pvolatilep = 1;
bbf6f052
RK
2902 exp = TREE_OPERAND (exp, 0);
2903 }
2904
2905 /* If this was a bit-field, see if there is a mode that allows direct
2906 access in case EXP is in memory. */
2907 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2908 {
2909 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2910 if (mode == BLKmode)
2911 mode = VOIDmode;
2912 }
2913
2914 *pmode = mode;
7bb0943f
RS
2915 *poffset = offset;
2916#if 0
2917 /* We aren't finished fixing the callers to really handle nonzero offset. */
2918 if (offset != 0)
2919 abort ();
2920#endif
bbf6f052
RK
2921
2922 return exp;
2923}
2924\f
2925/* Given an rtx VALUE that may contain additions and multiplications,
2926 return an equivalent value that just refers to a register or memory.
2927 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
2928 and returning a pseudo-register containing the value.
2929
2930 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
2931
2932rtx
2933force_operand (value, target)
2934 rtx value, target;
2935{
2936 register optab binoptab = 0;
2937 /* Use a temporary to force order of execution of calls to
2938 `force_operand'. */
2939 rtx tmp;
2940 register rtx op2;
2941 /* Use subtarget as the target for operand 0 of a binary operation. */
2942 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2943
2944 if (GET_CODE (value) == PLUS)
2945 binoptab = add_optab;
2946 else if (GET_CODE (value) == MINUS)
2947 binoptab = sub_optab;
2948 else if (GET_CODE (value) == MULT)
2949 {
2950 op2 = XEXP (value, 1);
2951 if (!CONSTANT_P (op2)
2952 && !(GET_CODE (op2) == REG && op2 != subtarget))
2953 subtarget = 0;
2954 tmp = force_operand (XEXP (value, 0), subtarget);
2955 return expand_mult (GET_MODE (value), tmp,
906c4e36 2956 force_operand (op2, NULL_RTX),
bbf6f052
RK
2957 target, 0);
2958 }
2959
2960 if (binoptab)
2961 {
2962 op2 = XEXP (value, 1);
2963 if (!CONSTANT_P (op2)
2964 && !(GET_CODE (op2) == REG && op2 != subtarget))
2965 subtarget = 0;
2966 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2967 {
2968 binoptab = add_optab;
2969 op2 = negate_rtx (GET_MODE (value), op2);
2970 }
2971
2972 /* Check for an addition with OP2 a constant integer and our first
2973 operand a PLUS of a virtual register and something else. In that
2974 case, we want to emit the sum of the virtual register and the
2975 constant first and then add the other value. This allows virtual
2976 register instantiation to simply modify the constant rather than
2977 creating another one around this addition. */
2978 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2979 && GET_CODE (XEXP (value, 0)) == PLUS
2980 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2981 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2982 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2983 {
2984 rtx temp = expand_binop (GET_MODE (value), binoptab,
2985 XEXP (XEXP (value, 0), 0), op2,
2986 subtarget, 0, OPTAB_LIB_WIDEN);
2987 return expand_binop (GET_MODE (value), binoptab, temp,
2988 force_operand (XEXP (XEXP (value, 0), 1), 0),
2989 target, 0, OPTAB_LIB_WIDEN);
2990 }
2991
2992 tmp = force_operand (XEXP (value, 0), subtarget);
2993 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 2994 force_operand (op2, NULL_RTX),
bbf6f052
RK
2995 target, 0, OPTAB_LIB_WIDEN);
2996 /* We give UNSIGNEP = 0 to expand_binop
2997 because the only operations we are expanding here are signed ones. */
2998 }
2999 return value;
3000}
3001\f
3002/* Subroutine of expand_expr:
3003 save the non-copied parts (LIST) of an expr (LHS), and return a list
3004 which can restore these values to their previous values,
3005 should something modify their storage. */
3006
3007static tree
3008save_noncopied_parts (lhs, list)
3009 tree lhs;
3010 tree list;
3011{
3012 tree tail;
3013 tree parts = 0;
3014
3015 for (tail = list; tail; tail = TREE_CHAIN (tail))
3016 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3017 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3018 else
3019 {
3020 tree part = TREE_VALUE (tail);
3021 tree part_type = TREE_TYPE (part);
906c4e36 3022 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3023 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3024 int_size_in_bytes (part_type), 0);
3025 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3026 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3027 parts = tree_cons (to_be_saved,
906c4e36
RK
3028 build (RTL_EXPR, part_type, NULL_TREE,
3029 (tree) target),
bbf6f052
RK
3030 parts);
3031 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3032 }
3033 return parts;
3034}
3035
3036/* Subroutine of expand_expr:
3037 record the non-copied parts (LIST) of an expr (LHS), and return a list
3038 which specifies the initial values of these parts. */
3039
3040static tree
3041init_noncopied_parts (lhs, list)
3042 tree lhs;
3043 tree list;
3044{
3045 tree tail;
3046 tree parts = 0;
3047
3048 for (tail = list; tail; tail = TREE_CHAIN (tail))
3049 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3050 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3051 else
3052 {
3053 tree part = TREE_VALUE (tail);
3054 tree part_type = TREE_TYPE (part);
906c4e36 3055 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3056 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3057 }
3058 return parts;
3059}
3060
3061/* Subroutine of expand_expr: return nonzero iff there is no way that
3062 EXP can reference X, which is being modified. */
3063
3064static int
3065safe_from_p (x, exp)
3066 rtx x;
3067 tree exp;
3068{
3069 rtx exp_rtl = 0;
3070 int i, nops;
3071
3072 if (x == 0)
3073 return 1;
3074
3075 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3076 find the underlying pseudo. */
3077 if (GET_CODE (x) == SUBREG)
3078 {
3079 x = SUBREG_REG (x);
3080 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3081 return 0;
3082 }
3083
3084 /* If X is a location in the outgoing argument area, it is always safe. */
3085 if (GET_CODE (x) == MEM
3086 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3087 || (GET_CODE (XEXP (x, 0)) == PLUS
3088 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3089 return 1;
3090
3091 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3092 {
3093 case 'd':
3094 exp_rtl = DECL_RTL (exp);
3095 break;
3096
3097 case 'c':
3098 return 1;
3099
3100 case 'x':
3101 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3102 return ((TREE_VALUE (exp) == 0
3103 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3104 && (TREE_CHAIN (exp) == 0
3105 || safe_from_p (x, TREE_CHAIN (exp))));
3106 else
3107 return 0;
3108
3109 case '1':
3110 return safe_from_p (x, TREE_OPERAND (exp, 0));
3111
3112 case '2':
3113 case '<':
3114 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3115 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3116
3117 case 'e':
3118 case 'r':
3119 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3120 the expression. If it is set, we conflict iff we are that rtx or
3121 both are in memory. Otherwise, we check all operands of the
3122 expression recursively. */
3123
3124 switch (TREE_CODE (exp))
3125 {
3126 case ADDR_EXPR:
3127 return staticp (TREE_OPERAND (exp, 0));
3128
3129 case INDIRECT_REF:
3130 if (GET_CODE (x) == MEM)
3131 return 0;
3132 break;
3133
3134 case CALL_EXPR:
3135 exp_rtl = CALL_EXPR_RTL (exp);
3136 if (exp_rtl == 0)
3137 {
3138 /* Assume that the call will clobber all hard registers and
3139 all of memory. */
3140 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3141 || GET_CODE (x) == MEM)
3142 return 0;
3143 }
3144
3145 break;
3146
3147 case RTL_EXPR:
3148 exp_rtl = RTL_EXPR_RTL (exp);
3149 if (exp_rtl == 0)
3150 /* We don't know what this can modify. */
3151 return 0;
3152
3153 break;
3154
3155 case WITH_CLEANUP_EXPR:
3156 exp_rtl = RTL_EXPR_RTL (exp);
3157 break;
3158
3159 case SAVE_EXPR:
3160 exp_rtl = SAVE_EXPR_RTL (exp);
3161 break;
3162
8129842c
RS
3163 case BIND_EXPR:
3164 /* The only operand we look at is operand 1. The rest aren't
3165 part of the expression. */
3166 return safe_from_p (x, TREE_OPERAND (exp, 1));
3167
bbf6f052
RK
3168 case METHOD_CALL_EXPR:
3169 /* This takes a rtx argument, but shouldn't appear here. */
3170 abort ();
3171 }
3172
3173 /* If we have an rtx, we do not need to scan our operands. */
3174 if (exp_rtl)
3175 break;
3176
3177 nops = tree_code_length[(int) TREE_CODE (exp)];
3178 for (i = 0; i < nops; i++)
3179 if (TREE_OPERAND (exp, i) != 0
3180 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3181 return 0;
3182 }
3183
3184 /* If we have an rtl, find any enclosed object. Then see if we conflict
3185 with it. */
3186 if (exp_rtl)
3187 {
3188 if (GET_CODE (exp_rtl) == SUBREG)
3189 {
3190 exp_rtl = SUBREG_REG (exp_rtl);
3191 if (GET_CODE (exp_rtl) == REG
3192 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3193 return 0;
3194 }
3195
3196 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3197 are memory and EXP is not readonly. */
3198 return ! (rtx_equal_p (x, exp_rtl)
3199 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3200 && ! TREE_READONLY (exp)));
3201 }
3202
3203 /* If we reach here, it is safe. */
3204 return 1;
3205}
3206
3207/* Subroutine of expand_expr: return nonzero iff EXP is an
3208 expression whose type is statically determinable. */
3209
3210static int
3211fixed_type_p (exp)
3212 tree exp;
3213{
3214 if (TREE_CODE (exp) == PARM_DECL
3215 || TREE_CODE (exp) == VAR_DECL
3216 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3217 || TREE_CODE (exp) == COMPONENT_REF
3218 || TREE_CODE (exp) == ARRAY_REF)
3219 return 1;
3220 return 0;
3221}
3222\f
3223/* expand_expr: generate code for computing expression EXP.
3224 An rtx for the computed value is returned. The value is never null.
3225 In the case of a void EXP, const0_rtx is returned.
3226
3227 The value may be stored in TARGET if TARGET is nonzero.
3228 TARGET is just a suggestion; callers must assume that
3229 the rtx returned may not be the same as TARGET.
3230
3231 If TARGET is CONST0_RTX, it means that the value will be ignored.
3232
3233 If TMODE is not VOIDmode, it suggests generating the
3234 result in mode TMODE. But this is done only when convenient.
3235 Otherwise, TMODE is ignored and the value generated in its natural mode.
3236 TMODE is just a suggestion; callers must assume that
3237 the rtx returned may not have mode TMODE.
3238
3239 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3240 with a constant address even if that address is not normally legitimate.
3241 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3242
3243 If MODIFIER is EXPAND_SUM then when EXP is an addition
3244 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3245 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3246 products as above, or REG or MEM, or constant.
3247 Ordinarily in such cases we would output mul or add instructions
3248 and then return a pseudo reg containing the sum.
3249
3250 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3251 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3252 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3253 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3254
3255rtx
3256expand_expr (exp, target, tmode, modifier)
3257 register tree exp;
3258 rtx target;
3259 enum machine_mode tmode;
3260 enum expand_modifier modifier;
3261{
3262 register rtx op0, op1, temp;
3263 tree type = TREE_TYPE (exp);
3264 int unsignedp = TREE_UNSIGNED (type);
3265 register enum machine_mode mode = TYPE_MODE (type);
3266 register enum tree_code code = TREE_CODE (exp);
3267 optab this_optab;
3268 /* Use subtarget as the target for operand 0 of a binary operation. */
3269 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3270 rtx original_target = target;
3271 int ignore = target == const0_rtx;
3272 tree context;
3273
3274 /* Don't use hard regs as subtargets, because the combiner
3275 can only handle pseudo regs. */
3276 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3277 subtarget = 0;
3278 /* Avoid subtargets inside loops,
3279 since they hide some invariant expressions. */
3280 if (preserve_subexpressions_p ())
3281 subtarget = 0;
3282
3283 if (ignore) target = 0, original_target = 0;
3284
3285 /* If will do cse, generate all results into pseudo registers
3286 since 1) that allows cse to find more things
3287 and 2) otherwise cse could produce an insn the machine
3288 cannot support. */
3289
3290 if (! cse_not_expected && mode != BLKmode && target
3291 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3292 target = subtarget;
3293
3294 /* Ensure we reference a volatile object even if value is ignored. */
3295 if (ignore && TREE_THIS_VOLATILE (exp)
3296 && mode != VOIDmode && mode != BLKmode)
3297 {
3298 target = gen_reg_rtx (mode);
3299 temp = expand_expr (exp, target, VOIDmode, modifier);
3300 if (temp != target)
3301 emit_move_insn (target, temp);
3302 return target;
3303 }
3304
3305 switch (code)
3306 {
3307 case LABEL_DECL:
b552441b
RS
3308 {
3309 tree function = decl_function_context (exp);
3310 /* Handle using a label in a containing function. */
3311 if (function != current_function_decl && function != 0)
3312 {
3313 struct function *p = find_function_data (function);
3314 /* Allocate in the memory associated with the function
3315 that the label is in. */
3316 push_obstacks (p->function_obstack,
3317 p->function_maybepermanent_obstack);
3318
3319 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3320 label_rtx (exp), p->forced_labels);
3321 pop_obstacks ();
3322 }
3323 else if (modifier == EXPAND_INITIALIZER)
3324 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3325 label_rtx (exp), forced_labels);
26fcb35a 3326 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3327 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3328 if (function != current_function_decl && function != 0)
3329 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3330 return temp;
b552441b 3331 }
bbf6f052
RK
3332
3333 case PARM_DECL:
3334 if (DECL_RTL (exp) == 0)
3335 {
3336 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3337 return CONST0_RTX (mode);
bbf6f052
RK
3338 }
3339
3340 case FUNCTION_DECL:
3341 case VAR_DECL:
3342 case RESULT_DECL:
3343 if (DECL_RTL (exp) == 0)
3344 abort ();
3345 /* Ensure variable marked as used
3346 even if it doesn't go through a parser. */
3347 TREE_USED (exp) = 1;
3348 /* Handle variables inherited from containing functions. */
3349 context = decl_function_context (exp);
3350
3351 /* We treat inline_function_decl as an alias for the current function
3352 because that is the inline function whose vars, types, etc.
3353 are being merged into the current function.
3354 See expand_inline_function. */
3355 if (context != 0 && context != current_function_decl
3356 && context != inline_function_decl
3357 /* If var is static, we don't need a static chain to access it. */
3358 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3359 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3360 {
3361 rtx addr;
3362
3363 /* Mark as non-local and addressable. */
81feeecb 3364 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3365 mark_addressable (exp);
3366 if (GET_CODE (DECL_RTL (exp)) != MEM)
3367 abort ();
3368 addr = XEXP (DECL_RTL (exp), 0);
3369 if (GET_CODE (addr) == MEM)
3370 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3371 else
3372 addr = fix_lexical_addr (addr, exp);
3373 return change_address (DECL_RTL (exp), mode, addr);
3374 }
4af3895e 3375
bbf6f052
RK
3376 /* This is the case of an array whose size is to be determined
3377 from its initializer, while the initializer is still being parsed.
3378 See expand_decl. */
3379 if (GET_CODE (DECL_RTL (exp)) == MEM
3380 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3381 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3382 XEXP (DECL_RTL (exp), 0));
3383 if (GET_CODE (DECL_RTL (exp)) == MEM
3384 && modifier != EXPAND_CONST_ADDRESS
3385 && modifier != EXPAND_SUM
3386 && modifier != EXPAND_INITIALIZER)
3387 {
3388 /* DECL_RTL probably contains a constant address.
3389 On RISC machines where a constant address isn't valid,
3390 make some insns to get that address into a register. */
3391 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3392 || (flag_force_addr
3393 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3394 return change_address (DECL_RTL (exp), VOIDmode,
3395 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3396 }
1499e0a8
RK
3397
3398 /* If the mode of DECL_RTL does not match that of the decl, it
3399 must be a promoted value. We return a SUBREG of the wanted mode,
3400 but mark it so that we know that it was already extended. */
3401
3402 if (GET_CODE (DECL_RTL (exp)) == REG
3403 && GET_MODE (DECL_RTL (exp)) != mode)
3404 {
3405 enum machine_mode decl_mode = DECL_MODE (exp);
3406
3407 /* Get the signedness used for this variable. Ensure we get the
3408 same mode we got when the variable was declared. */
3409
3410 PROMOTE_MODE (decl_mode, unsignedp, type);
3411
3412 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3413 abort ();
3414
3415 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3416 SUBREG_PROMOTED_VAR_P (temp) = 1;
3417 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3418 return temp;
3419 }
3420
bbf6f052
RK
3421 return DECL_RTL (exp);
3422
3423 case INTEGER_CST:
3424 return immed_double_const (TREE_INT_CST_LOW (exp),
3425 TREE_INT_CST_HIGH (exp),
3426 mode);
3427
3428 case CONST_DECL:
3429 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3430
3431 case REAL_CST:
3432 /* If optimized, generate immediate CONST_DOUBLE
3433 which will be turned into memory by reload if necessary.
3434
3435 We used to force a register so that loop.c could see it. But
3436 this does not allow gen_* patterns to perform optimizations with
3437 the constants. It also produces two insns in cases like "x = 1.0;".
3438 On most machines, floating-point constants are not permitted in
3439 many insns, so we'd end up copying it to a register in any case.
3440
3441 Now, we do the copying in expand_binop, if appropriate. */
3442 return immed_real_const (exp);
3443
3444 case COMPLEX_CST:
3445 case STRING_CST:
3446 if (! TREE_CST_RTL (exp))
3447 output_constant_def (exp);
3448
3449 /* TREE_CST_RTL probably contains a constant address.
3450 On RISC machines where a constant address isn't valid,
3451 make some insns to get that address into a register. */
3452 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3453 && modifier != EXPAND_CONST_ADDRESS
3454 && modifier != EXPAND_INITIALIZER
3455 && modifier != EXPAND_SUM
3456 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3457 return change_address (TREE_CST_RTL (exp), VOIDmode,
3458 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3459 return TREE_CST_RTL (exp);
3460
3461 case SAVE_EXPR:
3462 context = decl_function_context (exp);
3463 /* We treat inline_function_decl as an alias for the current function
3464 because that is the inline function whose vars, types, etc.
3465 are being merged into the current function.
3466 See expand_inline_function. */
3467 if (context == current_function_decl || context == inline_function_decl)
3468 context = 0;
3469
3470 /* If this is non-local, handle it. */
3471 if (context)
3472 {
3473 temp = SAVE_EXPR_RTL (exp);
3474 if (temp && GET_CODE (temp) == REG)
3475 {
3476 put_var_into_stack (exp);
3477 temp = SAVE_EXPR_RTL (exp);
3478 }
3479 if (temp == 0 || GET_CODE (temp) != MEM)
3480 abort ();
3481 return change_address (temp, mode,
3482 fix_lexical_addr (XEXP (temp, 0), exp));
3483 }
3484 if (SAVE_EXPR_RTL (exp) == 0)
3485 {
3486 if (mode == BLKmode)
3487 temp
3488 = assign_stack_temp (mode,
3489 int_size_in_bytes (TREE_TYPE (exp)), 0);
3490 else
1499e0a8
RK
3491 {
3492 enum machine_mode var_mode = mode;
3493
3494 if (TREE_CODE (type) == INTEGER_TYPE
3495 || TREE_CODE (type) == ENUMERAL_TYPE
3496 || TREE_CODE (type) == BOOLEAN_TYPE
3497 || TREE_CODE (type) == CHAR_TYPE
3498 || TREE_CODE (type) == REAL_TYPE
3499 || TREE_CODE (type) == POINTER_TYPE
3500 || TREE_CODE (type) == OFFSET_TYPE)
3501 {
3502 PROMOTE_MODE (var_mode, unsignedp, type);
3503 }
3504
3505 temp = gen_reg_rtx (var_mode);
3506 }
3507
bbf6f052
RK
3508 SAVE_EXPR_RTL (exp) = temp;
3509 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3510 if (!optimize && GET_CODE (temp) == REG)
3511 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3512 save_expr_regs);
3513 }
1499e0a8
RK
3514
3515 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3516 must be a promoted value. We return a SUBREG of the wanted mode,
3517 but mark it so that we know that it was already extended. Note
3518 that `unsignedp' was modified above in this case. */
3519
3520 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3521 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3522 {
3523 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3524 SUBREG_PROMOTED_VAR_P (temp) = 1;
3525 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3526 return temp;
3527 }
3528
bbf6f052
RK
3529 return SAVE_EXPR_RTL (exp);
3530
3531 case EXIT_EXPR:
3532 /* Exit the current loop if the body-expression is true. */
3533 {
3534 rtx label = gen_label_rtx ();
906c4e36
RK
3535 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3536 expand_exit_loop (NULL_PTR);
bbf6f052
RK
3537 emit_label (label);
3538 }
3539 return const0_rtx;
3540
3541 case LOOP_EXPR:
3542 expand_start_loop (1);
3543 expand_expr_stmt (TREE_OPERAND (exp, 0));
3544 expand_end_loop ();
3545
3546 return const0_rtx;
3547
3548 case BIND_EXPR:
3549 {
3550 tree vars = TREE_OPERAND (exp, 0);
3551 int vars_need_expansion = 0;
3552
3553 /* Need to open a binding contour here because
3554 if there are any cleanups they most be contained here. */
3555 expand_start_bindings (0);
3556
2df53c0b
RS
3557 /* Mark the corresponding BLOCK for output in its proper place. */
3558 if (TREE_OPERAND (exp, 2) != 0
3559 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3560 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3561
3562 /* If VARS have not yet been expanded, expand them now. */
3563 while (vars)
3564 {
3565 if (DECL_RTL (vars) == 0)
3566 {
3567 vars_need_expansion = 1;
3568 expand_decl (vars);
3569 }
3570 expand_decl_init (vars);
3571 vars = TREE_CHAIN (vars);
3572 }
3573
3574 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3575
3576 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3577
3578 return temp;
3579 }
3580
3581 case RTL_EXPR:
3582 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3583 abort ();
3584 emit_insns (RTL_EXPR_SEQUENCE (exp));
3585 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3586 return RTL_EXPR_RTL (exp);
3587
3588 case CONSTRUCTOR:
4af3895e
JVA
3589 /* All elts simple constants => refer to a constant in memory. But
3590 if this is a non-BLKmode mode, let it store a field at a time
3591 since that should make a CONST_INT or CONST_DOUBLE when we
3592 fold. */
3593 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3594 {
3595 rtx constructor = output_constant_def (exp);
b552441b
RS
3596 if (modifier != EXPAND_CONST_ADDRESS
3597 && modifier != EXPAND_INITIALIZER
3598 && modifier != EXPAND_SUM
3599 && !memory_address_p (GET_MODE (constructor),
3600 XEXP (constructor, 0)))
bbf6f052
RK
3601 constructor = change_address (constructor, VOIDmode,
3602 XEXP (constructor, 0));
3603 return constructor;
3604 }
3605
3606 if (ignore)
3607 {
3608 tree elt;
3609 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3610 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3611 return const0_rtx;
3612 }
3613 else
3614 {
3615 if (target == 0 || ! safe_from_p (target, exp))
3616 {
3617 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3618 target = gen_reg_rtx (mode);
3619 else
3620 {
3b94d087
RS
3621 enum tree_code c = TREE_CODE (type);
3622 target
3623 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3624 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
3625 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
3626 }
3627 }
3628 store_constructor (exp, target);
3629 return target;
3630 }
3631
3632 case INDIRECT_REF:
3633 {
3634 tree exp1 = TREE_OPERAND (exp, 0);
3635 tree exp2;
3636
3637 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3638 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3639 This code has the same general effect as simply doing
3640 expand_expr on the save expr, except that the expression PTR
3641 is computed for use as a memory address. This means different
3642 code, suitable for indexing, may be generated. */
3643 if (TREE_CODE (exp1) == SAVE_EXPR
3644 && SAVE_EXPR_RTL (exp1) == 0
3645 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3646 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3647 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3648 {
906c4e36
RK
3649 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3650 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3651 op0 = memory_address (mode, temp);
3652 op0 = copy_all_regs (op0);
3653 SAVE_EXPR_RTL (exp1) = op0;
3654 }
3655 else
3656 {
906c4e36 3657 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3658 op0 = memory_address (mode, op0);
3659 }
8c8a8e34
JW
3660
3661 temp = gen_rtx (MEM, mode, op0);
3662 /* If address was computed by addition,
3663 mark this as an element of an aggregate. */
3664 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3665 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3666 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3667 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3668 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3669 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3670 || (TREE_CODE (exp1) == ADDR_EXPR
3671 && (exp2 = TREE_OPERAND (exp1, 0))
3672 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3673 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3674 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3675 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3676 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
bbf6f052
RK
3677#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3678 a location is accessed through a pointer to const does not mean
3679 that the value there can never change. */
8c8a8e34 3680 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3681#endif
8c8a8e34
JW
3682 return temp;
3683 }
bbf6f052
RK
3684
3685 case ARRAY_REF:
3686 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3687 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3688 {
3689 /* Nonconstant array index or nonconstant element size.
3690 Generate the tree for *(&array+index) and expand that,
3691 except do it in a language-independent way
3692 and don't complain about non-lvalue arrays.
3693 `mark_addressable' should already have been called
3694 for any array for which this case will be reached. */
3695
3696 /* Don't forget the const or volatile flag from the array element. */
3697 tree variant_type = build_type_variant (type,
3698 TREE_READONLY (exp),
3699 TREE_THIS_VOLATILE (exp));
3700 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3701 TREE_OPERAND (exp, 0));
3702 tree index = TREE_OPERAND (exp, 1);
3703 tree elt;
3704
3705 /* Convert the integer argument to a type the same size as a pointer
3706 so the multiply won't overflow spuriously. */
3707 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3708 index = convert (type_for_size (POINTER_SIZE, 0), index);
3709
3710 /* Don't think the address has side effects
3711 just because the array does.
3712 (In some cases the address might have side effects,
3713 and we fail to record that fact here. However, it should not
3714 matter, since expand_expr should not care.) */
3715 TREE_SIDE_EFFECTS (array_adr) = 0;
3716
3717 elt = build1 (INDIRECT_REF, type,
3718 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3719 array_adr,
3720 fold (build (MULT_EXPR,
3721 TYPE_POINTER_TO (variant_type),
3722 index, size_in_bytes (type))))));
3723
3724 /* Volatility, etc., of new expression is same as old expression. */
3725 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3726 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3727 TREE_READONLY (elt) = TREE_READONLY (exp);
3728
3729 return expand_expr (elt, target, tmode, modifier);
3730 }
3731
3732 /* Fold an expression like: "foo"[2].
3733 This is not done in fold so it won't happen inside &. */
3734 {
3735 int i;
3736 tree arg0 = TREE_OPERAND (exp, 0);
3737 tree arg1 = TREE_OPERAND (exp, 1);
3738
3739 if (TREE_CODE (arg0) == STRING_CST
3740 && TREE_CODE (arg1) == INTEGER_CST
3741 && !TREE_INT_CST_HIGH (arg1)
3742 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3743 {
3744 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3745 {
3746 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3747 TREE_TYPE (exp) = integer_type_node;
3748 return expand_expr (exp, target, tmode, modifier);
3749 }
3750 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3751 {
3752 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3753 TREE_TYPE (exp) = integer_type_node;
3754 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3755 }
3756 }
3757 }
3758
3759 /* If this is a constant index into a constant array,
4af3895e
JVA
3760 just get the value from the array. Handle both the cases when
3761 we have an explicit constructor and when our operand is a variable
3762 that was declared const. */
3763
3764 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3765 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3766 {
3767 tree index = fold (TREE_OPERAND (exp, 1));
3768 if (TREE_CODE (index) == INTEGER_CST
3769 && TREE_INT_CST_HIGH (index) == 0)
3770 {
3771 int i = TREE_INT_CST_LOW (index);
3772 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3773
3774 while (elem && i--)
3775 elem = TREE_CHAIN (elem);
3776 if (elem)
3777 return expand_expr (fold (TREE_VALUE (elem)), target,
3778 tmode, modifier);
3779 }
3780 }
3781
3782 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3783 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3784 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3785 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3786 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3787 && optimize >= 1
3788 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3789 != ERROR_MARK))
bbf6f052
RK
3790 {
3791 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3792 if (TREE_CODE (index) == INTEGER_CST
3793 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3794 {
3795 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3796 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3797
8c8a8e34
JW
3798 if (TREE_CODE (init) == CONSTRUCTOR)
3799 {
3800 tree elem = CONSTRUCTOR_ELTS (init);
3801
3802 while (elem && i--)
3803 elem = TREE_CHAIN (elem);
3804 if (elem)
3805 return expand_expr (fold (TREE_VALUE (elem)), target,
3806 tmode, modifier);
3807 }
3808 else if (TREE_CODE (init) == STRING_CST
3809 && i < TREE_STRING_LENGTH (init))
3810 {
906c4e36 3811 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
8c8a8e34
JW
3812 return convert_to_mode (mode, temp, 0);
3813 }
bbf6f052
RK
3814 }
3815 }
3816 /* Treat array-ref with constant index as a component-ref. */
3817
3818 case COMPONENT_REF:
3819 case BIT_FIELD_REF:
4af3895e
JVA
3820 /* If the operand is a CONSTRUCTOR, we can just extract the
3821 appropriate field if it is present. */
3822 if (code != ARRAY_REF
3823 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3824 {
3825 tree elt;
3826
3827 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3828 elt = TREE_CHAIN (elt))
3829 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3830 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3831 }
3832
bbf6f052
RK
3833 {
3834 enum machine_mode mode1;
3835 int bitsize;
3836 int bitpos;
7bb0943f 3837 tree offset;
bbf6f052 3838 int volatilep = 0;
7bb0943f 3839 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3840 &mode1, &unsignedp, &volatilep);
3841
3842 /* In some cases, we will be offsetting OP0's address by a constant.
3843 So get it as a sum, if possible. If we will be using it
3844 directly in an insn, we validate it. */
906c4e36 3845 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3846
8c8a8e34
JW
3847 /* If this is a constant, put it into a register if it is a
3848 legimate constant and memory if it isn't. */
3849 if (CONSTANT_P (op0))
3850 {
3851 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3852 if (LEGITIMATE_CONSTANT_P (op0))
3853 op0 = force_reg (mode, op0);
3854 else
3855 op0 = validize_mem (force_const_mem (mode, op0));
3856 }
3857
7bb0943f
RS
3858 if (offset != 0)
3859 {
906c4e36 3860 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3861
3862 if (GET_CODE (op0) != MEM)
3863 abort ();
3864 op0 = change_address (op0, VOIDmode,
3865 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3866 force_reg (Pmode, offset_rtx)));
3867 }
3868
bbf6f052
RK
3869 /* Don't forget about volatility even if this is a bitfield. */
3870 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3871 {
3872 op0 = copy_rtx (op0);
3873 MEM_VOLATILE_P (op0) = 1;
3874 }
3875
3876 if (mode1 == VOIDmode
0bba3f6f
RK
3877 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3878 && modifier != EXPAND_CONST_ADDRESS
3879 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
3880 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3881 {
3882 /* In cases where an aligned union has an unaligned object
3883 as a field, we might be extracting a BLKmode value from
3884 an integer-mode (e.g., SImode) object. Handle this case
3885 by doing the extract into an object as wide as the field
3886 (which we know to be the width of a basic mode), then
3887 storing into memory, and changing the mode to BLKmode. */
3888 enum machine_mode ext_mode = mode;
3889
3890 if (ext_mode == BLKmode)
3891 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3892
3893 if (ext_mode == BLKmode)
3894 abort ();
3895
3896 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3897 unsignedp, target, ext_mode, ext_mode,
3898 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3899 int_size_in_bytes (TREE_TYPE (tem)));
3900 if (mode == BLKmode)
3901 {
3902 rtx new = assign_stack_temp (ext_mode,
3903 bitsize / BITS_PER_UNIT, 0);
3904
3905 emit_move_insn (new, op0);
3906 op0 = copy_rtx (new);
3907 PUT_MODE (op0, BLKmode);
3908 }
3909
3910 return op0;
3911 }
3912
3913 /* Get a reference to just this component. */
3914 if (modifier == EXPAND_CONST_ADDRESS
3915 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3916 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3917 (bitpos / BITS_PER_UNIT)));
3918 else
3919 op0 = change_address (op0, mode1,
3920 plus_constant (XEXP (op0, 0),
3921 (bitpos / BITS_PER_UNIT)));
3922 MEM_IN_STRUCT_P (op0) = 1;
3923 MEM_VOLATILE_P (op0) |= volatilep;
3924 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3925 return op0;
3926 if (target == 0)
3927 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3928 convert_move (target, op0, unsignedp);
3929 return target;
3930 }
3931
3932 case OFFSET_REF:
3933 {
3934 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3935 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 3936 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3937 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3938 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3939 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
bbf6f052
RK
3940#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3941 a location is accessed through a pointer to const does not mean
3942 that the value there can never change. */
3943 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3944#endif
3945 return temp;
3946 }
3947
3948 /* Intended for a reference to a buffer of a file-object in Pascal.
3949 But it's not certain that a special tree code will really be
3950 necessary for these. INDIRECT_REF might work for them. */
3951 case BUFFER_REF:
3952 abort ();
3953
7308a047
RS
3954 /* IN_EXPR: Inlined pascal set IN expression.
3955
3956 Algorithm:
3957 rlo = set_low - (set_low%bits_per_word);
3958 the_word = set [ (index - rlo)/bits_per_word ];
3959 bit_index = index % bits_per_word;
3960 bitmask = 1 << bit_index;
3961 return !!(the_word & bitmask); */
3962 case IN_EXPR:
3963 preexpand_calls (exp);
3964 {
3965 tree set = TREE_OPERAND (exp, 0);
3966 tree index = TREE_OPERAND (exp, 1);
3967 tree set_type = TREE_TYPE (set);
3968
3969 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3970 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3971
3972 rtx index_val;
3973 rtx lo_r;
3974 rtx hi_r;
3975 rtx rlow;
3976 rtx diff, quo, rem, addr, bit, result;
3977 rtx setval, setaddr;
3978 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3979
3980 if (target == 0)
3981 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
3982
3983 /* If domain is empty, answer is no. */
3984 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3985 return const0_rtx;
3986
3987 index_val = expand_expr (index, 0, VOIDmode, 0);
3988 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3989 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3990 setval = expand_expr (set, 0, VOIDmode, 0);
3991 setaddr = XEXP (setval, 0);
3992
3993 /* Compare index against bounds, if they are constant. */
3994 if (GET_CODE (index_val) == CONST_INT
3995 && GET_CODE (lo_r) == CONST_INT)
3996 {
3997 if (INTVAL (index_val) < INTVAL (lo_r))
3998 return const0_rtx;
3999 }
4000
4001 if (GET_CODE (index_val) == CONST_INT
4002 && GET_CODE (hi_r) == CONST_INT)
4003 {
4004 if (INTVAL (hi_r) < INTVAL (index_val))
4005 return const0_rtx;
4006 }
4007
4008 /* If we get here, we have to generate the code for both cases
4009 (in range and out of range). */
4010
4011 op0 = gen_label_rtx ();
4012 op1 = gen_label_rtx ();
4013
4014 if (! (GET_CODE (index_val) == CONST_INT
4015 && GET_CODE (lo_r) == CONST_INT))
4016 {
4017 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4018 emit_jump_insn (gen_blt (op1));
4019 }
4020
4021 if (! (GET_CODE (index_val) == CONST_INT
4022 && GET_CODE (hi_r) == CONST_INT))
4023 {
4024 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4025 emit_jump_insn (gen_bgt (op1));
4026 }
4027
4028 /* Calculate the element number of bit zero in the first word
4029 of the set. */
4030 if (GET_CODE (lo_r) == CONST_INT)
4031 rlow = gen_rtx (CONST_INT, VOIDmode,
4032 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4033 else
4034 rlow = expand_binop (index_mode, and_optab,
4035 lo_r, gen_rtx (CONST_INT, VOIDmode,
4036 ~ (1 << BITS_PER_UNIT)),
4037 0, 0, OPTAB_LIB_WIDEN);
4038
4039 diff = expand_binop (index_mode, sub_optab,
4040 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4041
4042 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4043 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4044 0, 0);
4045 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4046 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4047 0, 0);
4048 addr = memory_address (byte_mode,
4049 expand_binop (index_mode, add_optab,
4050 diff, setaddr));
4051 /* Extract the bit we want to examine */
4052 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4053 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4054 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4055 1, OPTAB_LIB_WIDEN);
4056 emit_move_insn (target, result);
4057
4058 /* Output the code to handle the out-of-range case. */
4059 emit_jump (op0);
4060 emit_label (op1);
4061 emit_move_insn (target, const0_rtx);
4062 emit_label (op0);
4063 return target;
4064 }
4065
bbf6f052
RK
4066 case WITH_CLEANUP_EXPR:
4067 if (RTL_EXPR_RTL (exp) == 0)
4068 {
4069 RTL_EXPR_RTL (exp)
4070 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4071 cleanups_this_call
4072 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4073 /* That's it for this cleanup. */
4074 TREE_OPERAND (exp, 2) = 0;
4075 }
4076 return RTL_EXPR_RTL (exp);
4077
4078 case CALL_EXPR:
4079 /* Check for a built-in function. */
4080 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4081 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4082 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4083 return expand_builtin (exp, target, subtarget, tmode, ignore);
4084 /* If this call was expanded already by preexpand_calls,
4085 just return the result we got. */
4086 if (CALL_EXPR_RTL (exp) != 0)
4087 return CALL_EXPR_RTL (exp);
8129842c 4088 return expand_call (exp, target, ignore);
bbf6f052
RK
4089
4090 case NON_LVALUE_EXPR:
4091 case NOP_EXPR:
4092 case CONVERT_EXPR:
4093 case REFERENCE_EXPR:
4094 if (TREE_CODE (type) == VOID_TYPE || ignore)
4095 {
4096 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4097 return const0_rtx;
4098 }
4099 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4100 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4101 if (TREE_CODE (type) == UNION_TYPE)
4102 {
4103 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4104 if (target == 0)
4105 {
4106 if (mode == BLKmode)
4107 {
4108 if (TYPE_SIZE (type) == 0
4109 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4110 abort ();
4111 target = assign_stack_temp (BLKmode,
4112 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4113 + BITS_PER_UNIT - 1)
4114 / BITS_PER_UNIT, 0);
4115 }
4116 else
4117 target = gen_reg_rtx (mode);
4118 }
4119 if (GET_CODE (target) == MEM)
4120 /* Store data into beginning of memory target. */
4121 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4122 change_address (target, TYPE_MODE (valtype), 0), 0);
4123
bbf6f052
RK
4124 else if (GET_CODE (target) == REG)
4125 /* Store this field into a union of the proper type. */
4126 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4127 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4128 VOIDmode, 0, 1,
4129 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4130 else
4131 abort ();
4132
4133 /* Return the entire union. */
4134 return target;
4135 }
1499e0a8 4136 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
bbf6f052
RK
4137 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
4138 return op0;
26fcb35a
RS
4139 if (modifier == EXPAND_INITIALIZER)
4140 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4141 if (flag_force_mem && GET_CODE (op0) == MEM)
4142 op0 = copy_to_reg (op0);
4143
4144 if (target == 0)
4145 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4146 else
4147 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4148 return target;
4149
4150 case PLUS_EXPR:
4151 /* We come here from MINUS_EXPR when the second operand is a constant. */
4152 plus_expr:
4153 this_optab = add_optab;
4154
4155 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4156 something else, make sure we add the register to the constant and
4157 then to the other thing. This case can occur during strength
4158 reduction and doing it this way will produce better code if the
4159 frame pointer or argument pointer is eliminated.
4160
4161 fold-const.c will ensure that the constant is always in the inner
4162 PLUS_EXPR, so the only case we need to do anything about is if
4163 sp, ap, or fp is our second argument, in which case we must swap
4164 the innermost first argument and our second argument. */
4165
4166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4167 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4168 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4169 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4170 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4171 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4172 {
4173 tree t = TREE_OPERAND (exp, 1);
4174
4175 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4176 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4177 }
4178
4179 /* If the result is to be Pmode and we are adding an integer to
4180 something, we might be forming a constant. So try to use
4181 plus_constant. If it produces a sum and we can't accept it,
4182 use force_operand. This allows P = &ARR[const] to generate
4183 efficient code on machines where a SYMBOL_REF is not a valid
4184 address.
4185
4186 If this is an EXPAND_SUM call, always return the sum. */
4187 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 4188 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
4189 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4190 || mode == Pmode))
4191 {
4192 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4193 EXPAND_SUM);
4194 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4195 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4196 op1 = force_operand (op1, target);
4197 return op1;
4198 }
4199
4200 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4201 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4202 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4203 || mode == Pmode))
4204 {
4205 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4206 EXPAND_SUM);
4207 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4208 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4209 op0 = force_operand (op0, target);
4210 return op0;
4211 }
4212
4213 /* No sense saving up arithmetic to be done
4214 if it's all in the wrong mode to form part of an address.
4215 And force_operand won't know whether to sign-extend or
4216 zero-extend. */
4217 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4218 || mode != Pmode) goto binop;
4219
4220 preexpand_calls (exp);
4221 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4222 subtarget = 0;
4223
4224 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4225 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
4226
4227 /* Make sure any term that's a sum with a constant comes last. */
4228 if (GET_CODE (op0) == PLUS
4229 && CONSTANT_P (XEXP (op0, 1)))
4230 {
4231 temp = op0;
4232 op0 = op1;
4233 op1 = temp;
4234 }
4235 /* If adding to a sum including a constant,
4236 associate it to put the constant outside. */
4237 if (GET_CODE (op1) == PLUS
4238 && CONSTANT_P (XEXP (op1, 1)))
4239 {
4240 rtx constant_term = const0_rtx;
4241
4242 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4243 if (temp != 0)
4244 op0 = temp;
6f90e075
JW
4245 /* Ensure that MULT comes first if there is one. */
4246 else if (GET_CODE (op0) == MULT)
4247 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4248 else
4249 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4250
4251 /* Let's also eliminate constants from op0 if possible. */
4252 op0 = eliminate_constant_term (op0, &constant_term);
4253
4254 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4255 their sum should be a constant. Form it into OP1, since the
4256 result we want will then be OP0 + OP1. */
4257
4258 temp = simplify_binary_operation (PLUS, mode, constant_term,
4259 XEXP (op1, 1));
4260 if (temp != 0)
4261 op1 = temp;
4262 else
4263 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4264 }
4265
4266 /* Put a constant term last and put a multiplication first. */
4267 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4268 temp = op1, op1 = op0, op0 = temp;
4269
4270 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4271 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4272
4273 case MINUS_EXPR:
4274 /* Handle difference of two symbolic constants,
4275 for the sake of an initializer. */
4276 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4277 && really_constant_p (TREE_OPERAND (exp, 0))
4278 && really_constant_p (TREE_OPERAND (exp, 1)))
4279 {
906c4e36
RK
4280 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4281 VOIDmode, modifier);
4282 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4283 VOIDmode, modifier);
bbf6f052
RK
4284 return gen_rtx (MINUS, mode, op0, op1);
4285 }
4286 /* Convert A - const to A + (-const). */
4287 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4288 {
4289 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4290 fold (build1 (NEGATE_EXPR, type,
4291 TREE_OPERAND (exp, 1))));
4292 goto plus_expr;
4293 }
4294 this_optab = sub_optab;
4295 goto binop;
4296
4297 case MULT_EXPR:
4298 preexpand_calls (exp);
4299 /* If first operand is constant, swap them.
4300 Thus the following special case checks need only
4301 check the second operand. */
4302 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4303 {
4304 register tree t1 = TREE_OPERAND (exp, 0);
4305 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4306 TREE_OPERAND (exp, 1) = t1;
4307 }
4308
4309 /* Attempt to return something suitable for generating an
4310 indexed address, for machines that support that. */
4311
4312 if (modifier == EXPAND_SUM && mode == Pmode
4313 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4314 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4315 {
4316 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4317
4318 /* Apply distributive law if OP0 is x+c. */
4319 if (GET_CODE (op0) == PLUS
4320 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4321 return gen_rtx (PLUS, mode,
4322 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4323 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4324 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4325 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4326
4327 if (GET_CODE (op0) != REG)
906c4e36 4328 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4329 if (GET_CODE (op0) != REG)
4330 op0 = copy_to_mode_reg (mode, op0);
4331
4332 return gen_rtx (MULT, mode, op0,
906c4e36 4333 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4334 }
4335
4336 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4337 subtarget = 0;
4338
4339 /* Check for multiplying things that have been extended
4340 from a narrower type. If this machine supports multiplying
4341 in that narrower type with a result in the desired type,
4342 do it that way, and avoid the explicit type-conversion. */
4343 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4344 && TREE_CODE (type) == INTEGER_TYPE
4345 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4346 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4347 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4348 && int_fits_type_p (TREE_OPERAND (exp, 1),
4349 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4350 /* Don't use a widening multiply if a shift will do. */
4351 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4352 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4353 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4354 ||
4355 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4356 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4357 ==
4358 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4359 /* If both operands are extended, they must either both
4360 be zero-extended or both be sign-extended. */
4361 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4362 ==
4363 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4364 {
4365 enum machine_mode innermode
4366 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4367 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4368 ? umul_widen_optab : smul_widen_optab);
4369 if (mode == GET_MODE_WIDER_MODE (innermode)
4370 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4371 {
4372 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4373 NULL_RTX, VOIDmode, 0);
bbf6f052 4374 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4375 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4376 VOIDmode, 0);
bbf6f052
RK
4377 else
4378 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4379 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4380 goto binop2;
4381 }
4382 }
4383 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4384 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4385 return expand_mult (mode, op0, op1, target, unsignedp);
4386
4387 case TRUNC_DIV_EXPR:
4388 case FLOOR_DIV_EXPR:
4389 case CEIL_DIV_EXPR:
4390 case ROUND_DIV_EXPR:
4391 case EXACT_DIV_EXPR:
4392 preexpand_calls (exp);
4393 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4394 subtarget = 0;
4395 /* Possible optimization: compute the dividend with EXPAND_SUM
4396 then if the divisor is constant can optimize the case
4397 where some terms of the dividend have coeffs divisible by it. */
4398 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4399 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4400 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4401
4402 case RDIV_EXPR:
4403 this_optab = flodiv_optab;
4404 goto binop;
4405
4406 case TRUNC_MOD_EXPR:
4407 case FLOOR_MOD_EXPR:
4408 case CEIL_MOD_EXPR:
4409 case ROUND_MOD_EXPR:
4410 preexpand_calls (exp);
4411 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4412 subtarget = 0;
4413 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4414 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4415 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4416
4417 case FIX_ROUND_EXPR:
4418 case FIX_FLOOR_EXPR:
4419 case FIX_CEIL_EXPR:
4420 abort (); /* Not used for C. */
4421
4422 case FIX_TRUNC_EXPR:
906c4e36 4423 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4424 if (target == 0)
4425 target = gen_reg_rtx (mode);
4426 expand_fix (target, op0, unsignedp);
4427 return target;
4428
4429 case FLOAT_EXPR:
906c4e36 4430 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4431 if (target == 0)
4432 target = gen_reg_rtx (mode);
4433 /* expand_float can't figure out what to do if FROM has VOIDmode.
4434 So give it the correct mode. With -O, cse will optimize this. */
4435 if (GET_MODE (op0) == VOIDmode)
4436 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4437 op0);
4438 expand_float (target, op0,
4439 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4440 return target;
4441
4442 case NEGATE_EXPR:
4443 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4444 temp = expand_unop (mode, neg_optab, op0, target, 0);
4445 if (temp == 0)
4446 abort ();
4447 return temp;
4448
4449 case ABS_EXPR:
4450 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4451
2d7050fd
RS
4452 /* Handle complex values specially. */
4453 {
4454 enum machine_mode opmode
4455 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4456
4457 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4458 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4459 return expand_complex_abs (opmode, op0, target, unsignedp);
4460 }
4461
bbf6f052
RK
4462 /* Unsigned abs is simply the operand. Testing here means we don't
4463 risk generating incorrect code below. */
4464 if (TREE_UNSIGNED (type))
4465 return op0;
4466
4467 /* First try to do it with a special abs instruction. */
4468 temp = expand_unop (mode, abs_optab, op0, target, 0);
4469 if (temp != 0)
4470 return temp;
4471
4472 /* If this machine has expensive jumps, we can do integer absolute
4473 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4474 where W is the width of MODE. */
4475
4476 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4477 {
4478 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4479 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4480 NULL_RTX, 0);
bbf6f052
RK
4481
4482 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4483 OPTAB_LIB_WIDEN);
4484 if (temp != 0)
4485 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4486 OPTAB_LIB_WIDEN);
4487
4488 if (temp != 0)
4489 return temp;
4490 }
4491
4492 /* If that does not win, use conditional jump and negate. */
4493 target = original_target;
4494 temp = gen_label_rtx ();
4495 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4496 || (GET_CODE (target) == REG
4497 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4498 target = gen_reg_rtx (mode);
4499 emit_move_insn (target, op0);
4500 emit_cmp_insn (target,
4501 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4502 NULL_RTX, VOIDmode, 0),
4503 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4504 NO_DEFER_POP;
4505 emit_jump_insn (gen_bge (temp));
4506 op0 = expand_unop (mode, neg_optab, target, target, 0);
4507 if (op0 != target)
4508 emit_move_insn (target, op0);
4509 emit_label (temp);
4510 OK_DEFER_POP;
4511 return target;
4512
4513 case MAX_EXPR:
4514 case MIN_EXPR:
4515 target = original_target;
4516 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4517 || (GET_CODE (target) == REG
4518 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4519 target = gen_reg_rtx (mode);
906c4e36 4520 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4521 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4522
4523 /* First try to do it with a special MIN or MAX instruction.
4524 If that does not win, use a conditional jump to select the proper
4525 value. */
4526 this_optab = (TREE_UNSIGNED (type)
4527 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4528 : (code == MIN_EXPR ? smin_optab : smax_optab));
4529
4530 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4531 OPTAB_WIDEN);
4532 if (temp != 0)
4533 return temp;
4534
4535 if (target != op0)
4536 emit_move_insn (target, op0);
4537 op0 = gen_label_rtx ();
4538 if (code == MAX_EXPR)
4539 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4540 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4541 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4542 else
4543 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4544 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4545 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4546 if (temp == const0_rtx)
4547 emit_move_insn (target, op1);
4548 else if (temp != const_true_rtx)
4549 {
4550 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4551 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4552 else
4553 abort ();
4554 emit_move_insn (target, op1);
4555 }
4556 emit_label (op0);
4557 return target;
4558
4559/* ??? Can optimize when the operand of this is a bitwise operation,
4560 by using a different bitwise operation. */
4561 case BIT_NOT_EXPR:
4562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4563 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4564 if (temp == 0)
4565 abort ();
4566 return temp;
4567
4568 case FFS_EXPR:
4569 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4570 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4571 if (temp == 0)
4572 abort ();
4573 return temp;
4574
4575/* ??? Can optimize bitwise operations with one arg constant.
4576 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4577 and (a bitwise1 b) bitwise2 b (etc)
4578 but that is probably not worth while. */
4579
4580/* BIT_AND_EXPR is for bitwise anding.
4581 TRUTH_AND_EXPR is for anding two boolean values
4582 when we want in all cases to compute both of them.
4583 In general it is fastest to do TRUTH_AND_EXPR by
4584 computing both operands as actual zero-or-1 values
4585 and then bitwise anding. In cases where there cannot
4586 be any side effects, better code would be made by
4587 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4588 but the question is how to recognize those cases. */
4589
4590 case TRUTH_AND_EXPR:
4591 case BIT_AND_EXPR:
4592 this_optab = and_optab;
4593 goto binop;
4594
4595/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4596 case TRUTH_OR_EXPR:
4597 case BIT_IOR_EXPR:
4598 this_optab = ior_optab;
4599 goto binop;
4600
4601 case BIT_XOR_EXPR:
4602 this_optab = xor_optab;
4603 goto binop;
4604
4605 case LSHIFT_EXPR:
4606 case RSHIFT_EXPR:
4607 case LROTATE_EXPR:
4608 case RROTATE_EXPR:
4609 preexpand_calls (exp);
4610 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4611 subtarget = 0;
4612 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4613 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4614 unsignedp);
4615
4616/* Could determine the answer when only additive constants differ.
4617 Also, the addition of one can be handled by changing the condition. */
4618 case LT_EXPR:
4619 case LE_EXPR:
4620 case GT_EXPR:
4621 case GE_EXPR:
4622 case EQ_EXPR:
4623 case NE_EXPR:
4624 preexpand_calls (exp);
4625 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4626 if (temp != 0)
4627 return temp;
4628 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4629 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4630 && original_target
4631 && GET_CODE (original_target) == REG
4632 && (GET_MODE (original_target)
4633 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4634 {
4635 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4636 if (temp != original_target)
4637 temp = copy_to_reg (temp);
4638 op1 = gen_label_rtx ();
906c4e36 4639 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4640 GET_MODE (temp), unsignedp, 0);
4641 emit_jump_insn (gen_beq (op1));
4642 emit_move_insn (temp, const1_rtx);
4643 emit_label (op1);
4644 return temp;
4645 }
4646 /* If no set-flag instruction, must generate a conditional
4647 store into a temporary variable. Drop through
4648 and handle this like && and ||. */
4649
4650 case TRUTH_ANDIF_EXPR:
4651 case TRUTH_ORIF_EXPR:
4652 if (target == 0 || ! safe_from_p (target, exp)
4653 /* Make sure we don't have a hard reg (such as function's return
4654 value) live across basic blocks, if not optimizing. */
4655 || (!optimize && GET_CODE (target) == REG
4656 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4657 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4658 emit_clr_insn (target);
4659 op1 = gen_label_rtx ();
4660 jumpifnot (exp, op1);
4661 emit_0_to_1_insn (target);
4662 emit_label (op1);
4663 return target;
4664
4665 case TRUTH_NOT_EXPR:
4666 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4667 /* The parser is careful to generate TRUTH_NOT_EXPR
4668 only with operands that are always zero or one. */
906c4e36 4669 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4670 target, 1, OPTAB_LIB_WIDEN);
4671 if (temp == 0)
4672 abort ();
4673 return temp;
4674
4675 case COMPOUND_EXPR:
4676 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4677 emit_queue ();
4678 return expand_expr (TREE_OPERAND (exp, 1),
4679 (ignore ? const0_rtx : target),
4680 VOIDmode, 0);
4681
4682 case COND_EXPR:
4683 {
4684 /* Note that COND_EXPRs whose type is a structure or union
4685 are required to be constructed to contain assignments of
4686 a temporary variable, so that we can evaluate them here
4687 for side effect only. If type is void, we must do likewise. */
4688
4689 /* If an arm of the branch requires a cleanup,
4690 only that cleanup is performed. */
4691
4692 tree singleton = 0;
4693 tree binary_op = 0, unary_op = 0;
4694 tree old_cleanups = cleanups_this_call;
4695 cleanups_this_call = 0;
4696
4697 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4698 convert it to our mode, if necessary. */
4699 if (integer_onep (TREE_OPERAND (exp, 1))
4700 && integer_zerop (TREE_OPERAND (exp, 2))
4701 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4702 {
4703 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4704 if (GET_MODE (op0) == mode)
4705 return op0;
4706 if (target == 0)
4707 target = gen_reg_rtx (mode);
4708 convert_move (target, op0, unsignedp);
4709 return target;
4710 }
4711
4712 /* If we are not to produce a result, we have no target. Otherwise,
4713 if a target was specified use it; it will not be used as an
4714 intermediate target unless it is safe. If no target, use a
4715 temporary. */
4716
4717 if (mode == VOIDmode || ignore)
4718 temp = 0;
4719 else if (original_target
4720 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4721 temp = original_target;
4722 else if (mode == BLKmode)
4723 {
4724 if (TYPE_SIZE (type) == 0
4725 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4726 abort ();
4727 temp = assign_stack_temp (BLKmode,
4728 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4729 + BITS_PER_UNIT - 1)
4730 / BITS_PER_UNIT, 0);
4731 }
4732 else
4733 temp = gen_reg_rtx (mode);
4734
4735 /* Check for X ? A + B : A. If we have this, we can copy
4736 A to the output and conditionally add B. Similarly for unary
4737 operations. Don't do this if X has side-effects because
4738 those side effects might affect A or B and the "?" operation is
4739 a sequence point in ANSI. (We test for side effects later.) */
4740
4741 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4742 && operand_equal_p (TREE_OPERAND (exp, 2),
4743 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4744 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4745 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4746 && operand_equal_p (TREE_OPERAND (exp, 1),
4747 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4748 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4749 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4750 && operand_equal_p (TREE_OPERAND (exp, 2),
4751 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4752 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4753 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4754 && operand_equal_p (TREE_OPERAND (exp, 1),
4755 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4756 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4757
4758 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4759 operation, do this as A + (X != 0). Similarly for other simple
4760 binary operators. */
4761 if (singleton && binary_op
4762 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4763 && (TREE_CODE (binary_op) == PLUS_EXPR
4764 || TREE_CODE (binary_op) == MINUS_EXPR
4765 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4766 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4767 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4768 && integer_onep (TREE_OPERAND (binary_op, 1))
4769 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4770 {
4771 rtx result;
4772 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4773 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4774 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4775 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4776 : and_optab);
4777
4778 /* If we had X ? A : A + 1, do this as A + (X == 0).
4779
4780 We have to invert the truth value here and then put it
4781 back later if do_store_flag fails. We cannot simply copy
4782 TREE_OPERAND (exp, 0) to another variable and modify that
4783 because invert_truthvalue can modify the tree pointed to
4784 by its argument. */
4785 if (singleton == TREE_OPERAND (exp, 1))
4786 TREE_OPERAND (exp, 0)
4787 = invert_truthvalue (TREE_OPERAND (exp, 0));
4788
4789 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4790 (safe_from_p (temp, singleton)
4791 ? temp : NULL_RTX),
bbf6f052
RK
4792 mode, BRANCH_COST <= 1);
4793
4794 if (result)
4795 {
906c4e36 4796 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4797 return expand_binop (mode, boptab, op1, result, temp,
4798 unsignedp, OPTAB_LIB_WIDEN);
4799 }
4800 else if (singleton == TREE_OPERAND (exp, 1))
4801 TREE_OPERAND (exp, 0)
4802 = invert_truthvalue (TREE_OPERAND (exp, 0));
4803 }
4804
4805 NO_DEFER_POP;
4806 op0 = gen_label_rtx ();
4807
4808 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4809 {
4810 if (temp != 0)
4811 {
4812 /* If the target conflicts with the other operand of the
4813 binary op, we can't use it. Also, we can't use the target
4814 if it is a hard register, because evaluating the condition
4815 might clobber it. */
4816 if ((binary_op
4817 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4818 || (GET_CODE (temp) == REG
4819 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4820 temp = gen_reg_rtx (mode);
4821 store_expr (singleton, temp, 0);
4822 }
4823 else
906c4e36
RK
4824 expand_expr (singleton,
4825 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4826 if (cleanups_this_call)
4827 {
4828 sorry ("aggregate value in COND_EXPR");
4829 cleanups_this_call = 0;
4830 }
4831 if (singleton == TREE_OPERAND (exp, 1))
4832 jumpif (TREE_OPERAND (exp, 0), op0);
4833 else
4834 jumpifnot (TREE_OPERAND (exp, 0), op0);
4835
4836 if (binary_op && temp == 0)
4837 /* Just touch the other operand. */
4838 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 4839 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4840 else if (binary_op)
4841 store_expr (build (TREE_CODE (binary_op), type,
4842 make_tree (type, temp),
4843 TREE_OPERAND (binary_op, 1)),
4844 temp, 0);
4845 else
4846 store_expr (build1 (TREE_CODE (unary_op), type,
4847 make_tree (type, temp)),
4848 temp, 0);
4849 op1 = op0;
4850 }
4851#if 0
4852 /* This is now done in jump.c and is better done there because it
4853 produces shorter register lifetimes. */
4854
4855 /* Check for both possibilities either constants or variables
4856 in registers (but not the same as the target!). If so, can
4857 save branches by assigning one, branching, and assigning the
4858 other. */
4859 else if (temp && GET_MODE (temp) != BLKmode
4860 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4861 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4862 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4863 && DECL_RTL (TREE_OPERAND (exp, 1))
4864 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4865 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4866 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4867 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4868 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4869 && DECL_RTL (TREE_OPERAND (exp, 2))
4870 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4871 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4872 {
4873 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4874 temp = gen_reg_rtx (mode);
4875 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4876 jumpifnot (TREE_OPERAND (exp, 0), op0);
4877 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4878 op1 = op0;
4879 }
4880#endif
4881 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4882 comparison operator. If we have one of these cases, set the
4883 output to A, branch on A (cse will merge these two references),
4884 then set the output to FOO. */
4885 else if (temp
4886 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4887 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4888 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4889 TREE_OPERAND (exp, 1), 0)
4890 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4891 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4892 {
4893 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4894 temp = gen_reg_rtx (mode);
4895 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4896 jumpif (TREE_OPERAND (exp, 0), op0);
4897 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4898 op1 = op0;
4899 }
4900 else if (temp
4901 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4902 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4903 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4904 TREE_OPERAND (exp, 2), 0)
4905 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4906 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4907 {
4908 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4909 temp = gen_reg_rtx (mode);
4910 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4911 jumpifnot (TREE_OPERAND (exp, 0), op0);
4912 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4913 op1 = op0;
4914 }
4915 else
4916 {
4917 op1 = gen_label_rtx ();
4918 jumpifnot (TREE_OPERAND (exp, 0), op0);
4919 if (temp != 0)
4920 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4921 else
906c4e36
RK
4922 expand_expr (TREE_OPERAND (exp, 1),
4923 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4924 if (cleanups_this_call)
4925 {
4926 sorry ("aggregate value in COND_EXPR");
4927 cleanups_this_call = 0;
4928 }
4929
4930 emit_queue ();
4931 emit_jump_insn (gen_jump (op1));
4932 emit_barrier ();
4933 emit_label (op0);
4934 if (temp != 0)
4935 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4936 else
906c4e36
RK
4937 expand_expr (TREE_OPERAND (exp, 2),
4938 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4939 }
4940
4941 if (cleanups_this_call)
4942 {
4943 sorry ("aggregate value in COND_EXPR");
4944 cleanups_this_call = 0;
4945 }
4946
4947 emit_queue ();
4948 emit_label (op1);
4949 OK_DEFER_POP;
4950 cleanups_this_call = old_cleanups;
4951 return temp;
4952 }
4953
4954 case TARGET_EXPR:
4955 {
4956 /* Something needs to be initialized, but we didn't know
4957 where that thing was when building the tree. For example,
4958 it could be the return value of a function, or a parameter
4959 to a function which lays down in the stack, or a temporary
4960 variable which must be passed by reference.
4961
4962 We guarantee that the expression will either be constructed
4963 or copied into our original target. */
4964
4965 tree slot = TREE_OPERAND (exp, 0);
5c062816 4966 tree exp1;
bbf6f052
RK
4967
4968 if (TREE_CODE (slot) != VAR_DECL)
4969 abort ();
4970
4971 if (target == 0)
4972 {
4973 if (DECL_RTL (slot) != 0)
ac993f4f
MS
4974 {
4975 target = DECL_RTL (slot);
5c062816 4976 /* If we have already expanded the slot, so don't do
ac993f4f 4977 it again. (mrs) */
5c062816
MS
4978 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4979 return target;
ac993f4f 4980 }
bbf6f052
RK
4981 else
4982 {
4983 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4984 /* All temp slots at this level must not conflict. */
4985 preserve_temp_slots (target);
4986 DECL_RTL (slot) = target;
4987 }
4988
4989#if 0
ac993f4f
MS
4990 /* I bet this needs to be done, and I bet that it needs to
4991 be above, inside the else clause. The reason is
4992 simple, how else is it going to get cleaned up? (mrs)
4993
4994 The reason is probably did not work before, and was
4995 commented out is because this was re-expanding already
4996 expanded target_exprs (target == 0 and DECL_RTL (slot)
4997 != 0) also cleaning them up many times as well. :-( */
4998
bbf6f052
RK
4999 /* Since SLOT is not known to the called function
5000 to belong to its stack frame, we must build an explicit
5001 cleanup. This case occurs when we must build up a reference
5002 to pass the reference as an argument. In this case,
5003 it is very likely that such a reference need not be
5004 built here. */
5005
5006 if (TREE_OPERAND (exp, 2) == 0)
5007 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5008 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5009 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5010 cleanups_this_call);
bbf6f052
RK
5011#endif
5012 }
5013 else
5014 {
5015 /* This case does occur, when expanding a parameter which
5016 needs to be constructed on the stack. The target
5017 is the actual stack address that we want to initialize.
5018 The function we call will perform the cleanup in this case. */
5019
5020 DECL_RTL (slot) = target;
5021 }
5022
5c062816
MS
5023 exp1 = TREE_OPERAND (exp, 1);
5024 /* Mark it as expanded. */
5025 TREE_OPERAND (exp, 1) = NULL_TREE;
5026
5027 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5028 }
5029
5030 case INIT_EXPR:
5031 {
5032 tree lhs = TREE_OPERAND (exp, 0);
5033 tree rhs = TREE_OPERAND (exp, 1);
5034 tree noncopied_parts = 0;
5035 tree lhs_type = TREE_TYPE (lhs);
5036
5037 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5038 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5039 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5040 TYPE_NONCOPIED_PARTS (lhs_type));
5041 while (noncopied_parts != 0)
5042 {
5043 expand_assignment (TREE_VALUE (noncopied_parts),
5044 TREE_PURPOSE (noncopied_parts), 0, 0);
5045 noncopied_parts = TREE_CHAIN (noncopied_parts);
5046 }
5047 return temp;
5048 }
5049
5050 case MODIFY_EXPR:
5051 {
5052 /* If lhs is complex, expand calls in rhs before computing it.
5053 That's so we don't compute a pointer and save it over a call.
5054 If lhs is simple, compute it first so we can give it as a
5055 target if the rhs is just a call. This avoids an extra temp and copy
5056 and that prevents a partial-subsumption which makes bad code.
5057 Actually we could treat component_ref's of vars like vars. */
5058
5059 tree lhs = TREE_OPERAND (exp, 0);
5060 tree rhs = TREE_OPERAND (exp, 1);
5061 tree noncopied_parts = 0;
5062 tree lhs_type = TREE_TYPE (lhs);
5063
5064 temp = 0;
5065
5066 if (TREE_CODE (lhs) != VAR_DECL
5067 && TREE_CODE (lhs) != RESULT_DECL
5068 && TREE_CODE (lhs) != PARM_DECL)
5069 preexpand_calls (exp);
5070
5071 /* Check for |= or &= of a bitfield of size one into another bitfield
5072 of size 1. In this case, (unless we need the result of the
5073 assignment) we can do this more efficiently with a
5074 test followed by an assignment, if necessary.
5075
5076 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5077 things change so we do, this code should be enhanced to
5078 support it. */
5079 if (ignore
5080 && TREE_CODE (lhs) == COMPONENT_REF
5081 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5082 || TREE_CODE (rhs) == BIT_AND_EXPR)
5083 && TREE_OPERAND (rhs, 0) == lhs
5084 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5085 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5086 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5087 {
5088 rtx label = gen_label_rtx ();
5089
5090 do_jump (TREE_OPERAND (rhs, 1),
5091 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5092 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5093 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5094 (TREE_CODE (rhs) == BIT_IOR_EXPR
5095 ? integer_one_node
5096 : integer_zero_node)),
5097 0, 0);
e7c33f54 5098 do_pending_stack_adjust ();
bbf6f052
RK
5099 emit_label (label);
5100 return const0_rtx;
5101 }
5102
5103 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5104 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5105 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5106 TYPE_NONCOPIED_PARTS (lhs_type));
5107
5108 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5109 while (noncopied_parts != 0)
5110 {
5111 expand_assignment (TREE_PURPOSE (noncopied_parts),
5112 TREE_VALUE (noncopied_parts), 0, 0);
5113 noncopied_parts = TREE_CHAIN (noncopied_parts);
5114 }
5115 return temp;
5116 }
5117
5118 case PREINCREMENT_EXPR:
5119 case PREDECREMENT_EXPR:
5120 return expand_increment (exp, 0);
5121
5122 case POSTINCREMENT_EXPR:
5123 case POSTDECREMENT_EXPR:
5124 /* Faster to treat as pre-increment if result is not used. */
5125 return expand_increment (exp, ! ignore);
5126
5127 case ADDR_EXPR:
5128 /* Are we taking the address of a nested function? */
5129 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5130 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5131 {
5132 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5133 op0 = force_operand (op0, target);
5134 }
5135 else
5136 {
906c4e36 5137 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5138 (modifier == EXPAND_INITIALIZER
5139 ? modifier : EXPAND_CONST_ADDRESS));
5140 if (GET_CODE (op0) != MEM)
5141 abort ();
5142
5143 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5144 return XEXP (op0, 0);
5145 op0 = force_operand (XEXP (op0, 0), target);
5146 }
5147 if (flag_force_addr && GET_CODE (op0) != REG)
5148 return force_reg (Pmode, op0);
5149 return op0;
5150
5151 case ENTRY_VALUE_EXPR:
5152 abort ();
5153
7308a047
RS
5154 /* COMPLEX type for Extended Pascal & Fortran */
5155 case COMPLEX_EXPR:
5156 {
5157 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5158
5159 rtx prev;
5160
5161 /* Get the rtx code of the operands. */
5162 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5163 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5164
5165 if (! target)
5166 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5167
5168 prev = get_last_insn ();
5169
5170 /* Tell flow that the whole of the destination is being set. */
5171 if (GET_CODE (target) == REG)
5172 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5173
5174 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5175 emit_move_insn (gen_realpart (mode, target), op0);
5176 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5177
5178 /* Complex construction should appear as a single unit. */
5179 group_insns (prev);
5180
5181 return target;
5182 }
5183
5184 case REALPART_EXPR:
2d7050fd
RS
5185 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5186 return gen_realpart (mode, op0);
7308a047
RS
5187
5188 case IMAGPART_EXPR:
2d7050fd
RS
5189 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5190 return gen_imagpart (mode, op0);
7308a047
RS
5191
5192 case CONJ_EXPR:
5193 {
5194 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5195 rtx imag_t;
5196 rtx prev;
5197
5198 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5199
5200 if (! target)
5201 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5202
5203 prev = get_last_insn ();
5204
5205 /* Tell flow that the whole of the destination is being set. */
5206 if (GET_CODE (target) == REG)
5207 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5208
5209 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5210 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5211
2d7050fd 5212 imag_t = gen_imagpart (mode, target);
7308a047 5213 temp = expand_unop (mode, neg_optab,
2d7050fd 5214 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5215 if (temp != imag_t)
5216 emit_move_insn (imag_t, temp);
5217
5218 /* Conjugate should appear as a single unit */
5219 group_insns (prev);
5220
5221 return target;
5222 }
5223
bbf6f052
RK
5224 case ERROR_MARK:
5225 return const0_rtx;
5226
5227 default:
5228 return (*lang_expand_expr) (exp, target, tmode, modifier);
5229 }
5230
5231 /* Here to do an ordinary binary operator, generating an instruction
5232 from the optab already placed in `this_optab'. */
5233 binop:
5234 preexpand_calls (exp);
5235 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5236 subtarget = 0;
5237 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5238 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5239 binop2:
5240 temp = expand_binop (mode, this_optab, op0, op1, target,
5241 unsignedp, OPTAB_LIB_WIDEN);
5242 if (temp == 0)
5243 abort ();
5244 return temp;
5245}
5246\f
e87b4f3f
RS
5247/* Return the alignment in bits of EXP, a pointer valued expression.
5248 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5249 The alignment returned is, by default, the alignment of the thing that
5250 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5251
5252 Otherwise, look at the expression to see if we can do better, i.e., if the
5253 expression is actually pointing at an object whose alignment is tighter. */
5254
5255static int
5256get_pointer_alignment (exp, max_align)
5257 tree exp;
5258 unsigned max_align;
5259{
5260 unsigned align, inner;
5261
5262 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5263 return 0;
5264
5265 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5266 align = MIN (align, max_align);
5267
5268 while (1)
5269 {
5270 switch (TREE_CODE (exp))
5271 {
5272 case NOP_EXPR:
5273 case CONVERT_EXPR:
5274 case NON_LVALUE_EXPR:
5275 exp = TREE_OPERAND (exp, 0);
5276 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5277 return align;
5278 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5279 inner = MIN (inner, max_align);
5280 align = MAX (align, inner);
5281 break;
5282
5283 case PLUS_EXPR:
5284 /* If sum of pointer + int, restrict our maximum alignment to that
5285 imposed by the integer. If not, we can't do any better than
5286 ALIGN. */
5287 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5288 return align;
5289
e87b4f3f
RS
5290 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5291 & (max_align - 1))
5292 != 0)
bbf6f052
RK
5293 max_align >>= 1;
5294
5295 exp = TREE_OPERAND (exp, 0);
5296 break;
5297
5298 case ADDR_EXPR:
5299 /* See what we are pointing at and look at its alignment. */
5300 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5301 if (TREE_CODE (exp) == FUNCTION_DECL)
5302 align = MAX (align, FUNCTION_BOUNDARY);
5303 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5304 align = MAX (align, DECL_ALIGN (exp));
5305#ifdef CONSTANT_ALIGNMENT
5306 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5307 align = CONSTANT_ALIGNMENT (exp, align);
5308#endif
5309 return MIN (align, max_align);
5310
5311 default:
5312 return align;
5313 }
5314 }
5315}
5316\f
5317/* Return the tree node and offset if a given argument corresponds to
5318 a string constant. */
5319
5320static tree
5321string_constant (arg, ptr_offset)
5322 tree arg;
5323 tree *ptr_offset;
5324{
5325 STRIP_NOPS (arg);
5326
5327 if (TREE_CODE (arg) == ADDR_EXPR
5328 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5329 {
5330 *ptr_offset = integer_zero_node;
5331 return TREE_OPERAND (arg, 0);
5332 }
5333 else if (TREE_CODE (arg) == PLUS_EXPR)
5334 {
5335 tree arg0 = TREE_OPERAND (arg, 0);
5336 tree arg1 = TREE_OPERAND (arg, 1);
5337
5338 STRIP_NOPS (arg0);
5339 STRIP_NOPS (arg1);
5340
5341 if (TREE_CODE (arg0) == ADDR_EXPR
5342 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5343 {
5344 *ptr_offset = arg1;
5345 return TREE_OPERAND (arg0, 0);
5346 }
5347 else if (TREE_CODE (arg1) == ADDR_EXPR
5348 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5349 {
5350 *ptr_offset = arg0;
5351 return TREE_OPERAND (arg1, 0);
5352 }
5353 }
5354
5355 return 0;
5356}
5357
5358/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5359 way, because it could contain a zero byte in the middle.
5360 TREE_STRING_LENGTH is the size of the character array, not the string.
5361
5362 Unfortunately, string_constant can't access the values of const char
5363 arrays with initializers, so neither can we do so here. */
5364
5365static tree
5366c_strlen (src)
5367 tree src;
5368{
5369 tree offset_node;
5370 int offset, max;
5371 char *ptr;
5372
5373 src = string_constant (src, &offset_node);
5374 if (src == 0)
5375 return 0;
5376 max = TREE_STRING_LENGTH (src);
5377 ptr = TREE_STRING_POINTER (src);
5378 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5379 {
5380 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5381 compute the offset to the following null if we don't know where to
5382 start searching for it. */
5383 int i;
5384 for (i = 0; i < max; i++)
5385 if (ptr[i] == 0)
5386 return 0;
5387 /* We don't know the starting offset, but we do know that the string
5388 has no internal zero bytes. We can assume that the offset falls
5389 within the bounds of the string; otherwise, the programmer deserves
5390 what he gets. Subtract the offset from the length of the string,
5391 and return that. */
5392 /* This would perhaps not be valid if we were dealing with named
5393 arrays in addition to literal string constants. */
5394 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5395 }
5396
5397 /* We have a known offset into the string. Start searching there for
5398 a null character. */
5399 if (offset_node == 0)
5400 offset = 0;
5401 else
5402 {
5403 /* Did we get a long long offset? If so, punt. */
5404 if (TREE_INT_CST_HIGH (offset_node) != 0)
5405 return 0;
5406 offset = TREE_INT_CST_LOW (offset_node);
5407 }
5408 /* If the offset is known to be out of bounds, warn, and call strlen at
5409 runtime. */
5410 if (offset < 0 || offset > max)
5411 {
5412 warning ("offset outside bounds of constant string");
5413 return 0;
5414 }
5415 /* Use strlen to search for the first zero byte. Since any strings
5416 constructed with build_string will have nulls appended, we win even
5417 if we get handed something like (char[4])"abcd".
5418
5419 Since OFFSET is our starting index into the string, no further
5420 calculation is needed. */
5421 return size_int (strlen (ptr + offset));
5422}
5423\f
5424/* Expand an expression EXP that calls a built-in function,
5425 with result going to TARGET if that's convenient
5426 (and in mode MODE if that's convenient).
5427 SUBTARGET may be used as the target for computing one of EXP's operands.
5428 IGNORE is nonzero if the value is to be ignored. */
5429
5430static rtx
5431expand_builtin (exp, target, subtarget, mode, ignore)
5432 tree exp;
5433 rtx target;
5434 rtx subtarget;
5435 enum machine_mode mode;
5436 int ignore;
5437{
5438 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5439 tree arglist = TREE_OPERAND (exp, 1);
5440 rtx op0;
60bac6ea 5441 rtx lab1, insns;
bbf6f052 5442 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 5443 optab builtin_optab;
bbf6f052
RK
5444
5445 switch (DECL_FUNCTION_CODE (fndecl))
5446 {
5447 case BUILT_IN_ABS:
5448 case BUILT_IN_LABS:
5449 case BUILT_IN_FABS:
5450 /* build_function_call changes these into ABS_EXPR. */
5451 abort ();
5452
1bbddf11
JVA
5453 case BUILT_IN_SIN:
5454 case BUILT_IN_COS:
e87b4f3f
RS
5455 case BUILT_IN_FSQRT:
5456 /* If not optimizing, call the library function. */
8c8a8e34 5457 if (! optimize)
e87b4f3f
RS
5458 break;
5459
5460 if (arglist == 0
19deaec9 5461 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5462 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5463 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5464
db0e6d01
RS
5465 /* Stabilize and compute the argument. */
5466 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5467 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5468 {
5469 exp = copy_node (exp);
5470 arglist = copy_node (arglist);
5471 TREE_OPERAND (exp, 1) = arglist;
5472 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5473 }
e87b4f3f 5474 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5475
5476 /* Make a suitable register to place result in. */
5477 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5478
c1f7c223 5479 emit_queue ();
8c8a8e34 5480 start_sequence ();
e7c33f54 5481
1bbddf11
JVA
5482 switch (DECL_FUNCTION_CODE (fndecl))
5483 {
5484 case BUILT_IN_SIN:
5485 builtin_optab = sin_optab; break;
5486 case BUILT_IN_COS:
5487 builtin_optab = cos_optab; break;
5488 case BUILT_IN_FSQRT:
5489 builtin_optab = sqrt_optab; break;
5490 default:
5491 abort ();
5492 }
5493
5494 /* Compute into TARGET.
e87b4f3f
RS
5495 Set TARGET to wherever the result comes back. */
5496 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 5497 builtin_optab, op0, target, 0);
e7c33f54
RK
5498
5499 /* If we were unable to expand via the builtin, stop the
5500 sequence (without outputting the insns) and break, causing
5501 a call the the library function. */
e87b4f3f 5502 if (target == 0)
e7c33f54 5503 {
8c8a8e34 5504 end_sequence ();
e7c33f54
RK
5505 break;
5506 }
e87b4f3f 5507
60bac6ea
RS
5508 /* Check the results by default. But if flag_fast_math is turned on,
5509 then assume sqrt will always be called with valid arguments. */
5510
5511 if (! flag_fast_math)
5512 {
1bbddf11 5513 /* Don't define the builtin FP instructions
60bac6ea
RS
5514 if your machine is not IEEE. */
5515 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5516 abort ();
5517
5518 lab1 = gen_label_rtx ();
5519
5520 /* Test the result; if it is NaN, set errno=EDOM because
5521 the argument was not in the domain. */
5522 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5523 emit_jump_insn (gen_beq (lab1));
5524
5525#if TARGET_EDOM
5526 {
5527#ifdef GEN_ERRNO_RTX
5528 rtx errno_rtx = GEN_ERRNO_RTX;
5529#else
5530 rtx errno_rtx
5531 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5532#endif
5533
5534 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5535 }
5536#else
5537 /* We can't set errno=EDOM directly; let the library call do it.
5538 Pop the arguments right away in case the call gets deleted. */
5539 NO_DEFER_POP;
5540 expand_call (exp, target, 0);
5541 OK_DEFER_POP;
5542#endif
5543
5544 emit_label (lab1);
5545 }
e87b4f3f 5546
e7c33f54 5547 /* Output the entire sequence. */
8c8a8e34
JW
5548 insns = get_insns ();
5549 end_sequence ();
5550 emit_insns (insns);
e7c33f54
RK
5551
5552 return target;
5553
bbf6f052
RK
5554 case BUILT_IN_SAVEREGS:
5555 /* Don't do __builtin_saveregs more than once in a function.
5556 Save the result of the first call and reuse it. */
5557 if (saveregs_value != 0)
5558 return saveregs_value;
5559 {
5560 /* When this function is called, it means that registers must be
5561 saved on entry to this function. So we migrate the
5562 call to the first insn of this function. */
5563 rtx temp;
5564 rtx seq;
5565 rtx valreg, saved_valreg;
5566
5567 /* Now really call the function. `expand_call' does not call
5568 expand_builtin, so there is no danger of infinite recursion here. */
5569 start_sequence ();
5570
5571#ifdef EXPAND_BUILTIN_SAVEREGS
5572 /* Do whatever the machine needs done in this case. */
5573 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5574#else
5575 /* The register where the function returns its value
5576 is likely to have something else in it, such as an argument.
5577 So preserve that register around the call. */
5578 if (value_mode != VOIDmode)
5579 {
5580 valreg = hard_libcall_value (value_mode);
5581 saved_valreg = gen_reg_rtx (value_mode);
5582 emit_move_insn (saved_valreg, valreg);
5583 }
5584
5585 /* Generate the call, putting the value in a pseudo. */
5586 temp = expand_call (exp, target, ignore);
5587
5588 if (value_mode != VOIDmode)
5589 emit_move_insn (valreg, saved_valreg);
5590#endif
5591
5592 seq = get_insns ();
5593 end_sequence ();
5594
5595 saveregs_value = temp;
5596
5597 /* This won't work inside a SEQUENCE--it really has to be
5598 at the start of the function. */
5599 if (in_sequence_p ())
5600 {
5601 /* Better to do this than to crash. */
5602 error ("`va_start' used within `({...})'");
5603 return temp;
5604 }
5605
5606 /* Put the sequence after the NOTE that starts the function. */
5607 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5608 return temp;
5609 }
5610
5611 /* __builtin_args_info (N) returns word N of the arg space info
5612 for the current function. The number and meanings of words
5613 is controlled by the definition of CUMULATIVE_ARGS. */
5614 case BUILT_IN_ARGS_INFO:
5615 {
5616 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5617 int i;
5618 int *word_ptr = (int *) &current_function_args_info;
5619 tree type, elts, result;
5620
5621 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5622 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5623 __FILE__, __LINE__);
5624
5625 if (arglist != 0)
5626 {
5627 tree arg = TREE_VALUE (arglist);
5628 if (TREE_CODE (arg) != INTEGER_CST)
5629 error ("argument of __builtin_args_info must be constant");
5630 else
5631 {
5632 int wordnum = TREE_INT_CST_LOW (arg);
5633
5634 if (wordnum < 0 || wordnum >= nwords)
5635 error ("argument of __builtin_args_info out of range");
5636 else
906c4e36 5637 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5638 }
5639 }
5640 else
5641 error ("missing argument in __builtin_args_info");
5642
5643 return const0_rtx;
5644
5645#if 0
5646 for (i = 0; i < nwords; i++)
5647 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5648
5649 type = build_array_type (integer_type_node,
5650 build_index_type (build_int_2 (nwords, 0)));
5651 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5652 TREE_CONSTANT (result) = 1;
5653 TREE_STATIC (result) = 1;
5654 result = build (INDIRECT_REF, build_pointer_type (type), result);
5655 TREE_CONSTANT (result) = 1;
906c4e36 5656 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5657#endif
5658 }
5659
5660 /* Return the address of the first anonymous stack arg. */
5661 case BUILT_IN_NEXT_ARG:
5662 {
5663 tree fntype = TREE_TYPE (current_function_decl);
5664 if (!(TYPE_ARG_TYPES (fntype) != 0
5665 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5666 != void_type_node)))
5667 {
5668 error ("`va_start' used in function with fixed args");
5669 return const0_rtx;
5670 }
5671 }
5672
5673 return expand_binop (Pmode, add_optab,
5674 current_function_internal_arg_pointer,
5675 current_function_arg_offset_rtx,
906c4e36 5676 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5677
5678 case BUILT_IN_CLASSIFY_TYPE:
5679 if (arglist != 0)
5680 {
5681 tree type = TREE_TYPE (TREE_VALUE (arglist));
5682 enum tree_code code = TREE_CODE (type);
5683 if (code == VOID_TYPE)
906c4e36 5684 return GEN_INT (void_type_class);
bbf6f052 5685 if (code == INTEGER_TYPE)
906c4e36 5686 return GEN_INT (integer_type_class);
bbf6f052 5687 if (code == CHAR_TYPE)
906c4e36 5688 return GEN_INT (char_type_class);
bbf6f052 5689 if (code == ENUMERAL_TYPE)
906c4e36 5690 return GEN_INT (enumeral_type_class);
bbf6f052 5691 if (code == BOOLEAN_TYPE)
906c4e36 5692 return GEN_INT (boolean_type_class);
bbf6f052 5693 if (code == POINTER_TYPE)
906c4e36 5694 return GEN_INT (pointer_type_class);
bbf6f052 5695 if (code == REFERENCE_TYPE)
906c4e36 5696 return GEN_INT (reference_type_class);
bbf6f052 5697 if (code == OFFSET_TYPE)
906c4e36 5698 return GEN_INT (offset_type_class);
bbf6f052 5699 if (code == REAL_TYPE)
906c4e36 5700 return GEN_INT (real_type_class);
bbf6f052 5701 if (code == COMPLEX_TYPE)
906c4e36 5702 return GEN_INT (complex_type_class);
bbf6f052 5703 if (code == FUNCTION_TYPE)
906c4e36 5704 return GEN_INT (function_type_class);
bbf6f052 5705 if (code == METHOD_TYPE)
906c4e36 5706 return GEN_INT (method_type_class);
bbf6f052 5707 if (code == RECORD_TYPE)
906c4e36 5708 return GEN_INT (record_type_class);
bbf6f052 5709 if (code == UNION_TYPE)
906c4e36 5710 return GEN_INT (union_type_class);
bbf6f052 5711 if (code == ARRAY_TYPE)
906c4e36 5712 return GEN_INT (array_type_class);
bbf6f052 5713 if (code == STRING_TYPE)
906c4e36 5714 return GEN_INT (string_type_class);
bbf6f052 5715 if (code == SET_TYPE)
906c4e36 5716 return GEN_INT (set_type_class);
bbf6f052 5717 if (code == FILE_TYPE)
906c4e36 5718 return GEN_INT (file_type_class);
bbf6f052 5719 if (code == LANG_TYPE)
906c4e36 5720 return GEN_INT (lang_type_class);
bbf6f052 5721 }
906c4e36 5722 return GEN_INT (no_type_class);
bbf6f052
RK
5723
5724 case BUILT_IN_CONSTANT_P:
5725 if (arglist == 0)
5726 return const0_rtx;
5727 else
cda0ec81 5728 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5729 ? const1_rtx : const0_rtx);
5730
5731 case BUILT_IN_FRAME_ADDRESS:
5732 /* The argument must be a nonnegative integer constant.
5733 It counts the number of frames to scan up the stack.
5734 The value is the address of that frame. */
5735 case BUILT_IN_RETURN_ADDRESS:
5736 /* The argument must be a nonnegative integer constant.
5737 It counts the number of frames to scan up the stack.
5738 The value is the return address saved in that frame. */
5739 if (arglist == 0)
5740 /* Warning about missing arg was already issued. */
5741 return const0_rtx;
5742 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5743 {
5744 error ("invalid arg to __builtin_return_address");
5745 return const0_rtx;
5746 }
5747 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5748 {
5749 error ("invalid arg to __builtin_return_address");
5750 return const0_rtx;
5751 }
5752 else
5753 {
5754 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5755 rtx tem = frame_pointer_rtx;
5756 int i;
5757
5758 /* Scan back COUNT frames to the specified frame. */
5759 for (i = 0; i < count; i++)
5760 {
5761 /* Assume the dynamic chain pointer is in the word that
5762 the frame address points to, unless otherwise specified. */
5763#ifdef DYNAMIC_CHAIN_ADDRESS
5764 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5765#endif
5766 tem = memory_address (Pmode, tem);
5767 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5768 }
5769
5770 /* For __builtin_frame_address, return what we've got. */
5771 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5772 return tem;
5773
5774 /* For __builtin_return_address,
5775 Get the return address from that frame. */
5776#ifdef RETURN_ADDR_RTX
5777 return RETURN_ADDR_RTX (count, tem);
5778#else
5779 tem = memory_address (Pmode,
5780 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5781 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5782#endif
5783 }
5784
5785 case BUILT_IN_ALLOCA:
5786 if (arglist == 0
5787 /* Arg could be non-integer if user redeclared this fcn wrong. */
5788 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5789 return const0_rtx;
5790 current_function_calls_alloca = 1;
5791 /* Compute the argument. */
906c4e36 5792 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5793
5794 /* Allocate the desired space. */
8c8a8e34 5795 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5796
5797 /* Record the new stack level for nonlocal gotos. */
6dc42e49 5798 if (nonlocal_goto_handler_slot != 0)
906c4e36 5799 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
5800 return target;
5801
5802 case BUILT_IN_FFS:
5803 /* If not optimizing, call the library function. */
5804 if (!optimize)
5805 break;
5806
5807 if (arglist == 0
5808 /* Arg could be non-integer if user redeclared this fcn wrong. */
5809 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5810 return const0_rtx;
5811
5812 /* Compute the argument. */
5813 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5814 /* Compute ffs, into TARGET if possible.
5815 Set TARGET to wherever the result comes back. */
5816 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5817 ffs_optab, op0, target, 1);
5818 if (target == 0)
5819 abort ();
5820 return target;
5821
5822 case BUILT_IN_STRLEN:
5823 /* If not optimizing, call the library function. */
5824 if (!optimize)
5825 break;
5826
5827 if (arglist == 0
5828 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5829 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5830 return const0_rtx;
5831 else
5832 {
e7c33f54
RK
5833 tree src = TREE_VALUE (arglist);
5834 tree len = c_strlen (src);
bbf6f052 5835
e7c33f54
RK
5836 int align
5837 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5838
5839 rtx result, src_rtx, char_rtx;
5840 enum machine_mode insn_mode = value_mode, char_mode;
5841 enum insn_code icode;
5842
5843 /* If the length is known, just return it. */
5844 if (len != 0)
5845 return expand_expr (len, target, mode, 0);
5846
5847 /* If SRC is not a pointer type, don't do this operation inline. */
5848 if (align == 0)
5849 break;
5850
5851 /* Call a function if we can't compute strlen in the right mode. */
5852
5853 while (insn_mode != VOIDmode)
5854 {
5855 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5856 if (icode != CODE_FOR_nothing)
5857 break;
5858
5859 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5860 }
5861 if (insn_mode == VOIDmode)
bbf6f052 5862 break;
e7c33f54
RK
5863
5864 /* Make a place to write the result of the instruction. */
5865 result = target;
5866 if (! (result != 0
5867 && GET_CODE (result) == REG
5868 && GET_MODE (result) == insn_mode
5869 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5870 result = gen_reg_rtx (insn_mode);
5871
4d613828 5872 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 5873
4d613828 5874 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
5875 result = gen_reg_rtx (insn_mode);
5876
5877 src_rtx = memory_address (BLKmode,
906c4e36 5878 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 5879 EXPAND_NORMAL));
4d613828 5880 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
5881 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5882
5883 char_rtx = const0_rtx;
4d613828
RS
5884 char_mode = insn_operand_mode[(int)icode][2];
5885 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
5886 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5887
5888 emit_insn (GEN_FCN (icode) (result,
5889 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 5890 char_rtx, GEN_INT (align)));
e7c33f54
RK
5891
5892 /* Return the value in the proper mode for this function. */
5893 if (GET_MODE (result) == value_mode)
5894 return result;
5895 else if (target != 0)
5896 {
5897 convert_move (target, result, 0);
5898 return target;
5899 }
5900 else
5901 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
5902 }
5903
5904 case BUILT_IN_STRCPY:
5905 /* If not optimizing, call the library function. */
5906 if (!optimize)
5907 break;
5908
5909 if (arglist == 0
5910 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5911 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5912 || TREE_CHAIN (arglist) == 0
5913 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5914 return const0_rtx;
5915 else
5916 {
5917 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5918
5919 if (len == 0)
5920 break;
5921
5922 len = size_binop (PLUS_EXPR, len, integer_one_node);
5923
906c4e36 5924 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
5925 }
5926
5927 /* Drops in. */
5928 case BUILT_IN_MEMCPY:
5929 /* If not optimizing, call the library function. */
5930 if (!optimize)
5931 break;
5932
5933 if (arglist == 0
5934 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5935 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5936 || TREE_CHAIN (arglist) == 0
5937 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5938 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5939 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5940 return const0_rtx;
5941 else
5942 {
5943 tree dest = TREE_VALUE (arglist);
5944 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5945 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5946
5947 int src_align
5948 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5949 int dest_align
5950 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5951 rtx dest_rtx;
5952
5953 /* If either SRC or DEST is not a pointer type, don't do
5954 this operation in-line. */
5955 if (src_align == 0 || dest_align == 0)
5956 {
5957 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5958 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5959 break;
5960 }
5961
906c4e36 5962 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
bbf6f052
RK
5963
5964 /* Copy word part most expediently. */
5965 emit_block_move (gen_rtx (MEM, BLKmode,
5966 memory_address (BLKmode, dest_rtx)),
5967 gen_rtx (MEM, BLKmode,
5968 memory_address (BLKmode,
906c4e36
RK
5969 expand_expr (src, NULL_RTX,
5970 Pmode,
bbf6f052 5971 EXPAND_NORMAL))),
906c4e36 5972 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
5973 MIN (src_align, dest_align));
5974 return dest_rtx;
5975 }
5976
5977/* These comparison functions need an instruction that returns an actual
5978 index. An ordinary compare that just sets the condition codes
5979 is not enough. */
5980#ifdef HAVE_cmpstrsi
5981 case BUILT_IN_STRCMP:
5982 /* If not optimizing, call the library function. */
5983 if (!optimize)
5984 break;
5985
5986 if (arglist == 0
5987 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5988 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5989 || TREE_CHAIN (arglist) == 0
5990 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5991 return const0_rtx;
5992 else if (!HAVE_cmpstrsi)
5993 break;
5994 {
5995 tree arg1 = TREE_VALUE (arglist);
5996 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5997 tree offset;
5998 tree len, len2;
5999
6000 len = c_strlen (arg1);
6001 if (len)
6002 len = size_binop (PLUS_EXPR, integer_one_node, len);
6003 len2 = c_strlen (arg2);
6004 if (len2)
6005 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6006
6007 /* If we don't have a constant length for the first, use the length
6008 of the second, if we know it. We don't require a constant for
6009 this case; some cost analysis could be done if both are available
6010 but neither is constant. For now, assume they're equally cheap.
6011
6012 If both strings have constant lengths, use the smaller. This
6013 could arise if optimization results in strcpy being called with
6014 two fixed strings, or if the code was machine-generated. We should
6015 add some code to the `memcmp' handler below to deal with such
6016 situations, someday. */
6017 if (!len || TREE_CODE (len) != INTEGER_CST)
6018 {
6019 if (len2)
6020 len = len2;
6021 else if (len == 0)
6022 break;
6023 }
6024 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6025 {
6026 if (tree_int_cst_lt (len2, len))
6027 len = len2;
6028 }
6029
906c4e36 6030 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6031 }
6032
6033 /* Drops in. */
6034 case BUILT_IN_MEMCMP:
6035 /* If not optimizing, call the library function. */
6036 if (!optimize)
6037 break;
6038
6039 if (arglist == 0
6040 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6041 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6042 || TREE_CHAIN (arglist) == 0
6043 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6044 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6045 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6046 return const0_rtx;
6047 else if (!HAVE_cmpstrsi)
6048 break;
6049 {
6050 tree arg1 = TREE_VALUE (arglist);
6051 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6052 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6053 rtx result;
6054
6055 int arg1_align
6056 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6057 int arg2_align
6058 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6059 enum machine_mode insn_mode
6060 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6061
6062 /* If we don't have POINTER_TYPE, call the function. */
6063 if (arg1_align == 0 || arg2_align == 0)
6064 {
6065 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6066 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6067 break;
6068 }
6069
6070 /* Make a place to write the result of the instruction. */
6071 result = target;
6072 if (! (result != 0
6073 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6074 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6075 result = gen_reg_rtx (insn_mode);
6076
6077 emit_insn (gen_cmpstrsi (result,
6078 gen_rtx (MEM, BLKmode,
906c4e36
RK
6079 expand_expr (arg1, NULL_RTX, Pmode,
6080 EXPAND_NORMAL)),
bbf6f052 6081 gen_rtx (MEM, BLKmode,
906c4e36
RK
6082 expand_expr (arg2, NULL_RTX, Pmode,
6083 EXPAND_NORMAL)),
6084 expand_expr (len, NULL_RTX, VOIDmode, 0),
6085 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6086
6087 /* Return the value in the proper mode for this function. */
6088 mode = TYPE_MODE (TREE_TYPE (exp));
6089 if (GET_MODE (result) == mode)
6090 return result;
6091 else if (target != 0)
6092 {
6093 convert_move (target, result, 0);
6094 return target;
6095 }
6096 else
6097 return convert_to_mode (mode, result, 0);
6098 }
6099#else
6100 case BUILT_IN_STRCMP:
6101 case BUILT_IN_MEMCMP:
6102 break;
6103#endif
6104
6105 default: /* just do library call, if unknown builtin */
6106 error ("built-in function %s not currently supported",
6107 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6108 }
6109
6110 /* The switch statement above can drop through to cause the function
6111 to be called normally. */
6112
6113 return expand_call (exp, target, ignore);
6114}
6115\f
6116/* Expand code for a post- or pre- increment or decrement
6117 and return the RTX for the result.
6118 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6119
6120static rtx
6121expand_increment (exp, post)
6122 register tree exp;
6123 int post;
6124{
6125 register rtx op0, op1;
6126 register rtx temp, value;
6127 register tree incremented = TREE_OPERAND (exp, 0);
6128 optab this_optab = add_optab;
6129 int icode;
6130 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6131 int op0_is_copy = 0;
6132
6133 /* Stabilize any component ref that might need to be
6134 evaluated more than once below. */
6135 if (TREE_CODE (incremented) == BIT_FIELD_REF
6136 || (TREE_CODE (incremented) == COMPONENT_REF
6137 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6138 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6139 incremented = stabilize_reference (incremented);
6140
6141 /* Compute the operands as RTX.
6142 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 6143 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
6144 and insns were generated in computing it. */
6145
bbf6f052 6146 temp = get_last_insn ();
906c4e36 6147 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
6148
6149 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6150 in place but intead must do sign- or zero-extension during assignment,
6151 so we copy it into a new register and let the code below use it as
6152 a copy.
6153
6154 Note that we can safely modify this SUBREG since it is know not to be
6155 shared (it was made by the expand_expr call above). */
6156
6157 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6158 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6159
94a58076
RS
6160 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6161 && temp != get_last_insn ());
906c4e36 6162 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6163
6164 /* Decide whether incrementing or decrementing. */
6165 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6166 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6167 this_optab = sub_optab;
6168
6169 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6170 then we cannot just increment OP0. We must
6171 therefore contrive to increment the original value.
6172 Then we can return OP0 since it is a copy of the old value. */
6173 if (op0_is_copy)
6174 {
6175 /* This is the easiest way to increment the value wherever it is.
6176 Problems with multiple evaluation of INCREMENTED
6177 are prevented because either (1) it is a component_ref,
6178 in which case it was stabilized above, or (2) it is an array_ref
6179 with constant index in an array in a register, which is
6180 safe to reevaluate. */
6181 tree newexp = build ((this_optab == add_optab
6182 ? PLUS_EXPR : MINUS_EXPR),
6183 TREE_TYPE (exp),
6184 incremented,
6185 TREE_OPERAND (exp, 1));
6186 temp = expand_assignment (incremented, newexp, ! post, 0);
6187 return post ? op0 : temp;
6188 }
6189
6190 /* Convert decrement by a constant into a negative increment. */
6191 if (this_optab == sub_optab
6192 && GET_CODE (op1) == CONST_INT)
6193 {
906c4e36 6194 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
6195 this_optab = add_optab;
6196 }
6197
6198 if (post)
6199 {
6200 /* We have a true reference to the value in OP0.
6201 If there is an insn to add or subtract in this mode, queue it. */
6202
6203#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6204 op0 = stabilize (op0);
6205#endif
6206
6207 icode = (int) this_optab->handlers[(int) mode].insn_code;
6208 if (icode != (int) CODE_FOR_nothing
6209 /* Make sure that OP0 is valid for operands 0 and 1
6210 of the insn we want to queue. */
6211 && (*insn_operand_predicate[icode][0]) (op0, mode)
6212 && (*insn_operand_predicate[icode][1]) (op0, mode))
6213 {
6214 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6215 op1 = force_reg (mode, op1);
6216
6217 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6218 }
6219 }
6220
6221 /* Preincrement, or we can't increment with one simple insn. */
6222 if (post)
6223 /* Save a copy of the value before inc or dec, to return it later. */
6224 temp = value = copy_to_reg (op0);
6225 else
6226 /* Arrange to return the incremented value. */
6227 /* Copy the rtx because expand_binop will protect from the queue,
6228 and the results of that would be invalid for us to return
6229 if our caller does emit_queue before using our result. */
6230 temp = copy_rtx (value = op0);
6231
6232 /* Increment however we can. */
6233 op1 = expand_binop (mode, this_optab, value, op1, op0,
6234 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6235 /* Make sure the value is stored into OP0. */
6236 if (op1 != op0)
6237 emit_move_insn (op0, op1);
6238
6239 return temp;
6240}
6241\f
6242/* Expand all function calls contained within EXP, innermost ones first.
6243 But don't look within expressions that have sequence points.
6244 For each CALL_EXPR, record the rtx for its value
6245 in the CALL_EXPR_RTL field. */
6246
6247static void
6248preexpand_calls (exp)
6249 tree exp;
6250{
6251 register int nops, i;
6252 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6253
6254 if (! do_preexpand_calls)
6255 return;
6256
6257 /* Only expressions and references can contain calls. */
6258
6259 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6260 return;
6261
6262 switch (TREE_CODE (exp))
6263 {
6264 case CALL_EXPR:
6265 /* Do nothing if already expanded. */
6266 if (CALL_EXPR_RTL (exp) != 0)
6267 return;
6268
6269 /* Do nothing to built-in functions. */
6270 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6271 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6272 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 6273 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
6274 return;
6275
6276 case COMPOUND_EXPR:
6277 case COND_EXPR:
6278 case TRUTH_ANDIF_EXPR:
6279 case TRUTH_ORIF_EXPR:
6280 /* If we find one of these, then we can be sure
6281 the adjust will be done for it (since it makes jumps).
6282 Do it now, so that if this is inside an argument
6283 of a function, we don't get the stack adjustment
6284 after some other args have already been pushed. */
6285 do_pending_stack_adjust ();
6286 return;
6287
6288 case BLOCK:
6289 case RTL_EXPR:
6290 case WITH_CLEANUP_EXPR:
6291 return;
6292
6293 case SAVE_EXPR:
6294 if (SAVE_EXPR_RTL (exp) != 0)
6295 return;
6296 }
6297
6298 nops = tree_code_length[(int) TREE_CODE (exp)];
6299 for (i = 0; i < nops; i++)
6300 if (TREE_OPERAND (exp, i) != 0)
6301 {
6302 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6303 if (type == 'e' || type == '<' || type == '1' || type == '2'
6304 || type == 'r')
6305 preexpand_calls (TREE_OPERAND (exp, i));
6306 }
6307}
6308\f
6309/* At the start of a function, record that we have no previously-pushed
6310 arguments waiting to be popped. */
6311
6312void
6313init_pending_stack_adjust ()
6314{
6315 pending_stack_adjust = 0;
6316}
6317
6318/* When exiting from function, if safe, clear out any pending stack adjust
6319 so the adjustment won't get done. */
6320
6321void
6322clear_pending_stack_adjust ()
6323{
6324#ifdef EXIT_IGNORE_STACK
6325 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 6326 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
6327 && ! flag_inline_functions)
6328 pending_stack_adjust = 0;
6329#endif
6330}
6331
6332/* Pop any previously-pushed arguments that have not been popped yet. */
6333
6334void
6335do_pending_stack_adjust ()
6336{
6337 if (inhibit_defer_pop == 0)
6338 {
6339 if (pending_stack_adjust != 0)
906c4e36 6340 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
6341 pending_stack_adjust = 0;
6342 }
6343}
6344
6345/* Expand all cleanups up to OLD_CLEANUPS.
6346 Needed here, and also for language-dependent calls. */
6347
6348void
6349expand_cleanups_to (old_cleanups)
6350 tree old_cleanups;
6351{
6352 while (cleanups_this_call != old_cleanups)
6353 {
906c4e36 6354 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6355 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6356 }
6357}
6358\f
6359/* Expand conditional expressions. */
6360
6361/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6362 LABEL is an rtx of code CODE_LABEL, in this function and all the
6363 functions here. */
6364
6365void
6366jumpifnot (exp, label)
6367 tree exp;
6368 rtx label;
6369{
906c4e36 6370 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
6371}
6372
6373/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6374
6375void
6376jumpif (exp, label)
6377 tree exp;
6378 rtx label;
6379{
906c4e36 6380 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
6381}
6382
6383/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6384 the result is zero, or IF_TRUE_LABEL if the result is one.
6385 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6386 meaning fall through in that case.
6387
e7c33f54
RK
6388 do_jump always does any pending stack adjust except when it does not
6389 actually perform a jump. An example where there is no jump
6390 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6391
bbf6f052
RK
6392 This function is responsible for optimizing cases such as
6393 &&, || and comparison operators in EXP. */
6394
6395void
6396do_jump (exp, if_false_label, if_true_label)
6397 tree exp;
6398 rtx if_false_label, if_true_label;
6399{
6400 register enum tree_code code = TREE_CODE (exp);
6401 /* Some cases need to create a label to jump to
6402 in order to properly fall through.
6403 These cases set DROP_THROUGH_LABEL nonzero. */
6404 rtx drop_through_label = 0;
6405 rtx temp;
6406 rtx comparison = 0;
6407 int i;
6408 tree type;
6409
6410 emit_queue ();
6411
6412 switch (code)
6413 {
6414 case ERROR_MARK:
6415 break;
6416
6417 case INTEGER_CST:
6418 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6419 if (temp)
6420 emit_jump (temp);
6421 break;
6422
6423#if 0
6424 /* This is not true with #pragma weak */
6425 case ADDR_EXPR:
6426 /* The address of something can never be zero. */
6427 if (if_true_label)
6428 emit_jump (if_true_label);
6429 break;
6430#endif
6431
6432 case NOP_EXPR:
6433 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6434 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6435 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6436 goto normal;
6437 case CONVERT_EXPR:
6438 /* If we are narrowing the operand, we have to do the compare in the
6439 narrower mode. */
6440 if ((TYPE_PRECISION (TREE_TYPE (exp))
6441 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6442 goto normal;
6443 case NON_LVALUE_EXPR:
6444 case REFERENCE_EXPR:
6445 case ABS_EXPR:
6446 case NEGATE_EXPR:
6447 case LROTATE_EXPR:
6448 case RROTATE_EXPR:
6449 /* These cannot change zero->non-zero or vice versa. */
6450 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6451 break;
6452
6453#if 0
6454 /* This is never less insns than evaluating the PLUS_EXPR followed by
6455 a test and can be longer if the test is eliminated. */
6456 case PLUS_EXPR:
6457 /* Reduce to minus. */
6458 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6459 TREE_OPERAND (exp, 0),
6460 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6461 TREE_OPERAND (exp, 1))));
6462 /* Process as MINUS. */
6463#endif
6464
6465 case MINUS_EXPR:
6466 /* Non-zero iff operands of minus differ. */
6467 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6468 TREE_OPERAND (exp, 0),
6469 TREE_OPERAND (exp, 1)),
6470 NE, NE);
6471 break;
6472
6473 case BIT_AND_EXPR:
6474 /* If we are AND'ing with a small constant, do this comparison in the
6475 smallest type that fits. If the machine doesn't have comparisons
6476 that small, it will be converted back to the wider comparison.
6477 This helps if we are testing the sign bit of a narrower object.
6478 combine can't do this for us because it can't know whether a
6479 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6480
08af8e09
RK
6481 if (! SLOW_BYTE_ACCESS
6482 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6483 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
6484 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6485 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
6486 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6487 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6488 != CODE_FOR_nothing))
bbf6f052
RK
6489 {
6490 do_jump (convert (type, exp), if_false_label, if_true_label);
6491 break;
6492 }
6493 goto normal;
6494
6495 case TRUTH_NOT_EXPR:
6496 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6497 break;
6498
6499 case TRUTH_ANDIF_EXPR:
6500 if (if_false_label == 0)
6501 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 6502 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
6503 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6504 break;
6505
6506 case TRUTH_ORIF_EXPR:
6507 if (if_true_label == 0)
6508 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 6509 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
6510 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6511 break;
6512
6513 case COMPOUND_EXPR:
6514 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6515 free_temp_slots ();
6516 emit_queue ();
e7c33f54 6517 do_pending_stack_adjust ();
bbf6f052
RK
6518 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6519 break;
6520
6521 case COMPONENT_REF:
6522 case BIT_FIELD_REF:
6523 case ARRAY_REF:
6524 {
6525 int bitsize, bitpos, unsignedp;
6526 enum machine_mode mode;
6527 tree type;
7bb0943f 6528 tree offset;
bbf6f052
RK
6529 int volatilep = 0;
6530
6531 /* Get description of this reference. We don't actually care
6532 about the underlying object here. */
7bb0943f
RS
6533 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6534 &mode, &unsignedp, &volatilep);
bbf6f052
RK
6535
6536 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
6537 if (! SLOW_BYTE_ACCESS
6538 && type != 0 && bitsize >= 0
6539 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6540 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6541 != CODE_FOR_nothing))
bbf6f052
RK
6542 {
6543 do_jump (convert (type, exp), if_false_label, if_true_label);
6544 break;
6545 }
6546 goto normal;
6547 }
6548
6549 case COND_EXPR:
6550 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6551 if (integer_onep (TREE_OPERAND (exp, 1))
6552 && integer_zerop (TREE_OPERAND (exp, 2)))
6553 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6554
6555 else if (integer_zerop (TREE_OPERAND (exp, 1))
6556 && integer_onep (TREE_OPERAND (exp, 2)))
6557 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6558
6559 else
6560 {
6561 register rtx label1 = gen_label_rtx ();
6562 drop_through_label = gen_label_rtx ();
906c4e36 6563 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
6564 /* Now the THEN-expression. */
6565 do_jump (TREE_OPERAND (exp, 1),
6566 if_false_label ? if_false_label : drop_through_label,
6567 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
6568 /* In case the do_jump just above never jumps. */
6569 do_pending_stack_adjust ();
bbf6f052
RK
6570 emit_label (label1);
6571 /* Now the ELSE-expression. */
6572 do_jump (TREE_OPERAND (exp, 2),
6573 if_false_label ? if_false_label : drop_through_label,
6574 if_true_label ? if_true_label : drop_through_label);
6575 }
6576 break;
6577
6578 case EQ_EXPR:
6579 if (integer_zerop (TREE_OPERAND (exp, 1)))
6580 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6581 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6582 == MODE_INT)
6583 &&
6584 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6585 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6586 else
6587 comparison = compare (exp, EQ, EQ);
6588 break;
6589
6590 case NE_EXPR:
6591 if (integer_zerop (TREE_OPERAND (exp, 1)))
6592 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6593 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6594 == MODE_INT)
6595 &&
6596 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6597 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6598 else
6599 comparison = compare (exp, NE, NE);
6600 break;
6601
6602 case LT_EXPR:
6603 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6604 == MODE_INT)
6605 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6606 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6607 else
6608 comparison = compare (exp, LT, LTU);
6609 break;
6610
6611 case LE_EXPR:
6612 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6613 == MODE_INT)
6614 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6615 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6616 else
6617 comparison = compare (exp, LE, LEU);
6618 break;
6619
6620 case GT_EXPR:
6621 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6622 == MODE_INT)
6623 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6624 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6625 else
6626 comparison = compare (exp, GT, GTU);
6627 break;
6628
6629 case GE_EXPR:
6630 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6631 == MODE_INT)
6632 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6633 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6634 else
6635 comparison = compare (exp, GE, GEU);
6636 break;
6637
6638 default:
6639 normal:
906c4e36 6640 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6641#if 0
6642 /* This is not needed any more and causes poor code since it causes
6643 comparisons and tests from non-SI objects to have different code
6644 sequences. */
6645 /* Copy to register to avoid generating bad insns by cse
6646 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6647 if (!cse_not_expected && GET_CODE (temp) == MEM)
6648 temp = copy_to_reg (temp);
6649#endif
6650 do_pending_stack_adjust ();
6651 if (GET_CODE (temp) == CONST_INT)
6652 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6653 else if (GET_CODE (temp) == LABEL_REF)
6654 comparison = const_true_rtx;
6655 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6656 && !can_compare_p (GET_MODE (temp)))
6657 /* Note swapping the labels gives us not-equal. */
6658 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6659 else if (GET_MODE (temp) != VOIDmode)
6660 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
6661 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6662 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
6663 else
6664 abort ();
6665 }
6666
6667 /* Do any postincrements in the expression that was tested. */
6668 emit_queue ();
6669
6670 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6671 straight into a conditional jump instruction as the jump condition.
6672 Otherwise, all the work has been done already. */
6673
6674 if (comparison == const_true_rtx)
6675 {
6676 if (if_true_label)
6677 emit_jump (if_true_label);
6678 }
6679 else if (comparison == const0_rtx)
6680 {
6681 if (if_false_label)
6682 emit_jump (if_false_label);
6683 }
6684 else if (comparison)
6685 do_jump_for_compare (comparison, if_false_label, if_true_label);
6686
6687 free_temp_slots ();
6688
6689 if (drop_through_label)
e7c33f54
RK
6690 {
6691 /* If do_jump produces code that might be jumped around,
6692 do any stack adjusts from that code, before the place
6693 where control merges in. */
6694 do_pending_stack_adjust ();
6695 emit_label (drop_through_label);
6696 }
bbf6f052
RK
6697}
6698\f
6699/* Given a comparison expression EXP for values too wide to be compared
6700 with one insn, test the comparison and jump to the appropriate label.
6701 The code of EXP is ignored; we always test GT if SWAP is 0,
6702 and LT if SWAP is 1. */
6703
6704static void
6705do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6706 tree exp;
6707 int swap;
6708 rtx if_false_label, if_true_label;
6709{
906c4e36
RK
6710 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6711 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6712 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6713 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6714 rtx drop_through_label = 0;
6715 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6716 int i;
6717
6718 if (! if_true_label || ! if_false_label)
6719 drop_through_label = gen_label_rtx ();
6720 if (! if_true_label)
6721 if_true_label = drop_through_label;
6722 if (! if_false_label)
6723 if_false_label = drop_through_label;
6724
6725 /* Compare a word at a time, high order first. */
6726 for (i = 0; i < nwords; i++)
6727 {
6728 rtx comp;
6729 rtx op0_word, op1_word;
6730
6731 if (WORDS_BIG_ENDIAN)
6732 {
6733 op0_word = operand_subword_force (op0, i, mode);
6734 op1_word = operand_subword_force (op1, i, mode);
6735 }
6736 else
6737 {
6738 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6739 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6740 }
6741
6742 /* All but high-order word must be compared as unsigned. */
6743 comp = compare_from_rtx (op0_word, op1_word,
6744 (unsignedp || i > 0) ? GTU : GT,
906c4e36 6745 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
6746 if (comp == const_true_rtx)
6747 emit_jump (if_true_label);
6748 else if (comp != const0_rtx)
906c4e36 6749 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
6750
6751 /* Consider lower words only if these are equal. */
6752 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 6753 NULL_RTX, 0);
bbf6f052
RK
6754 if (comp == const_true_rtx)
6755 emit_jump (if_false_label);
6756 else if (comp != const0_rtx)
906c4e36 6757 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
6758 }
6759
6760 if (if_false_label)
6761 emit_jump (if_false_label);
6762 if (drop_through_label)
6763 emit_label (drop_through_label);
6764}
6765
6766/* Given an EQ_EXPR expression EXP for values too wide to be compared
6767 with one insn, test the comparison and jump to the appropriate label. */
6768
6769static void
6770do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6771 tree exp;
6772 rtx if_false_label, if_true_label;
6773{
906c4e36
RK
6774 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6775 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6776 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6777 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6778 int i;
6779 rtx drop_through_label = 0;
6780
6781 if (! if_false_label)
6782 drop_through_label = if_false_label = gen_label_rtx ();
6783
6784 for (i = 0; i < nwords; i++)
6785 {
6786 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6787 operand_subword_force (op1, i, mode),
cd1b4b44
RK
6788 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6789 word_mode, NULL_RTX, 0);
bbf6f052
RK
6790 if (comp == const_true_rtx)
6791 emit_jump (if_false_label);
6792 else if (comp != const0_rtx)
906c4e36 6793 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6794 }
6795
6796 if (if_true_label)
6797 emit_jump (if_true_label);
6798 if (drop_through_label)
6799 emit_label (drop_through_label);
6800}
6801\f
6802/* Jump according to whether OP0 is 0.
6803 We assume that OP0 has an integer mode that is too wide
6804 for the available compare insns. */
6805
6806static void
6807do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6808 rtx op0;
6809 rtx if_false_label, if_true_label;
6810{
6811 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6812 int i;
6813 rtx drop_through_label = 0;
6814
6815 if (! if_false_label)
6816 drop_through_label = if_false_label = gen_label_rtx ();
6817
6818 for (i = 0; i < nwords; i++)
6819 {
6820 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6821 GET_MODE (op0)),
cd1b4b44 6822 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
6823 if (comp == const_true_rtx)
6824 emit_jump (if_false_label);
6825 else if (comp != const0_rtx)
906c4e36 6826 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6827 }
6828
6829 if (if_true_label)
6830 emit_jump (if_true_label);
6831 if (drop_through_label)
6832 emit_label (drop_through_label);
6833}
6834
6835/* Given a comparison expression in rtl form, output conditional branches to
6836 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6837
6838static void
6839do_jump_for_compare (comparison, if_false_label, if_true_label)
6840 rtx comparison, if_false_label, if_true_label;
6841{
6842 if (if_true_label)
6843 {
6844 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6845 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6846 else
6847 abort ();
6848
6849 if (if_false_label)
6850 emit_jump (if_false_label);
6851 }
6852 else if (if_false_label)
6853 {
6854 rtx insn;
6855 rtx prev = PREV_INSN (get_last_insn ());
6856 rtx branch = 0;
6857
6858 /* Output the branch with the opposite condition. Then try to invert
6859 what is generated. If more than one insn is a branch, or if the
6860 branch is not the last insn written, abort. If we can't invert
6861 the branch, emit make a true label, redirect this jump to that,
6862 emit a jump to the false label and define the true label. */
6863
6864 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6865 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6866 else
6867 abort ();
6868
6869 /* Here we get the insn before what was just emitted.
6870 On some machines, emitting the branch can discard
6871 the previous compare insn and emit a replacement. */
6872 if (prev == 0)
6873 /* If there's only one preceding insn... */
6874 insn = get_insns ();
6875 else
6876 insn = NEXT_INSN (prev);
6877
6878 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6879 if (GET_CODE (insn) == JUMP_INSN)
6880 {
6881 if (branch)
6882 abort ();
6883 branch = insn;
6884 }
6885
6886 if (branch != get_last_insn ())
6887 abort ();
6888
6889 if (! invert_jump (branch, if_false_label))
6890 {
6891 if_true_label = gen_label_rtx ();
6892 redirect_jump (branch, if_true_label);
6893 emit_jump (if_false_label);
6894 emit_label (if_true_label);
6895 }
6896 }
6897}
6898\f
6899/* Generate code for a comparison expression EXP
6900 (including code to compute the values to be compared)
6901 and set (CC0) according to the result.
6902 SIGNED_CODE should be the rtx operation for this comparison for
6903 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6904
6905 We force a stack adjustment unless there are currently
6906 things pushed on the stack that aren't yet used. */
6907
6908static rtx
6909compare (exp, signed_code, unsigned_code)
6910 register tree exp;
6911 enum rtx_code signed_code, unsigned_code;
6912{
906c4e36
RK
6913 register rtx op0
6914 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6915 register rtx op1
6916 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6917 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6918 register enum machine_mode mode = TYPE_MODE (type);
6919 int unsignedp = TREE_UNSIGNED (type);
6920 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6921
6922 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6923 ((mode == BLKmode)
906c4e36 6924 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
6925 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6926}
6927
6928/* Like compare but expects the values to compare as two rtx's.
6929 The decision as to signed or unsigned comparison must be made by the caller.
6930
6931 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6932 compared.
6933
6934 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6935 size of MODE should be used. */
6936
6937rtx
6938compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6939 register rtx op0, op1;
6940 enum rtx_code code;
6941 int unsignedp;
6942 enum machine_mode mode;
6943 rtx size;
6944 int align;
6945{
6946 /* If one operand is constant, make it the second one. */
6947
6948 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6949 {
6950 rtx tem = op0;
6951 op0 = op1;
6952 op1 = tem;
6953 code = swap_condition (code);
6954 }
6955
6956 if (flag_force_mem)
6957 {
6958 op0 = force_not_mem (op0);
6959 op1 = force_not_mem (op1);
6960 }
6961
6962 do_pending_stack_adjust ();
6963
6964 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6965 return simplify_relational_operation (code, mode, op0, op1);
6966
cd1b4b44
RK
6967#if 0
6968 /* There's no need to do this now that combine.c can eliminate lots of
6969 sign extensions. This can be less efficient in certain cases on other
6970 machines.
6971
bbf6f052
RK
6972 /* If this is a signed equality comparison, we can do it as an
6973 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
6974 extension and comparisons with zero are done as unsigned. This is
6975 the case even on machines that can do fast sign extension, since
6976 zero-extension is easier to combinen with other operations than
6977 sign-extension is. If we are comparing against a constant, we must
6978 convert it to what it would look like unsigned. */
bbf6f052 6979 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 6980 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6981 {
6982 if (GET_CODE (op1) == CONST_INT
6983 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 6984 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
6985 unsignedp = 1;
6986 }
cd1b4b44 6987#endif
bbf6f052
RK
6988
6989 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6990
6991 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6992}
6993\f
6994/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
6995 and return an rtx for the result. EXP is either a comparison
6996 or a TRUTH_NOT_EXPR whose operand is a comparison.
6997
bbf6f052
RK
6998 If TARGET is nonzero, store the result there if convenient.
6999
7000 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7001 cheap.
7002
7003 Return zero if there is no suitable set-flag instruction
7004 available on this machine.
7005
7006 Once expand_expr has been called on the arguments of the comparison,
7007 we are committed to doing the store flag, since it is not safe to
7008 re-evaluate the expression. We emit the store-flag insn by calling
7009 emit_store_flag, but only expand the arguments if we have a reason
7010 to believe that emit_store_flag will be successful. If we think that
7011 it will, but it isn't, we have to simulate the store-flag with a
7012 set/jump/set sequence. */
7013
7014static rtx
7015do_store_flag (exp, target, mode, only_cheap)
7016 tree exp;
7017 rtx target;
7018 enum machine_mode mode;
7019 int only_cheap;
7020{
7021 enum rtx_code code;
e7c33f54 7022 tree arg0, arg1, type;
bbf6f052 7023 tree tem;
e7c33f54
RK
7024 enum machine_mode operand_mode;
7025 int invert = 0;
7026 int unsignedp;
bbf6f052
RK
7027 rtx op0, op1;
7028 enum insn_code icode;
7029 rtx subtarget = target;
7030 rtx result, label, pattern, jump_pat;
7031
e7c33f54
RK
7032 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7033 result at the end. We can't simply invert the test since it would
7034 have already been inverted if it were valid. This case occurs for
7035 some floating-point comparisons. */
7036
7037 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7038 invert = 1, exp = TREE_OPERAND (exp, 0);
7039
7040 arg0 = TREE_OPERAND (exp, 0);
7041 arg1 = TREE_OPERAND (exp, 1);
7042 type = TREE_TYPE (arg0);
7043 operand_mode = TYPE_MODE (type);
7044 unsignedp = TREE_UNSIGNED (type);
7045
bbf6f052
RK
7046 /* We won't bother with BLKmode store-flag operations because it would mean
7047 passing a lot of information to emit_store_flag. */
7048 if (operand_mode == BLKmode)
7049 return 0;
7050
d964285c
CH
7051 STRIP_NOPS (arg0);
7052 STRIP_NOPS (arg1);
bbf6f052
RK
7053
7054 /* Get the rtx comparison code to use. We know that EXP is a comparison
7055 operation of some type. Some comparisons against 1 and -1 can be
7056 converted to comparisons with zero. Do so here so that the tests
7057 below will be aware that we have a comparison with zero. These
7058 tests will not catch constants in the first operand, but constants
7059 are rarely passed as the first operand. */
7060
7061 switch (TREE_CODE (exp))
7062 {
7063 case EQ_EXPR:
7064 code = EQ;
7065 break;
7066 case NE_EXPR:
7067 code = NE;
7068 break;
7069 case LT_EXPR:
7070 if (integer_onep (arg1))
7071 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7072 else
7073 code = unsignedp ? LTU : LT;
7074 break;
7075 case LE_EXPR:
7076 if (integer_all_onesp (arg1))
7077 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7078 else
7079 code = unsignedp ? LEU : LE;
7080 break;
7081 case GT_EXPR:
7082 if (integer_all_onesp (arg1))
7083 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7084 else
7085 code = unsignedp ? GTU : GT;
7086 break;
7087 case GE_EXPR:
7088 if (integer_onep (arg1))
7089 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7090 else
7091 code = unsignedp ? GEU : GE;
7092 break;
7093 default:
7094 abort ();
7095 }
7096
7097 /* Put a constant second. */
7098 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7099 {
7100 tem = arg0; arg0 = arg1; arg1 = tem;
7101 code = swap_condition (code);
7102 }
7103
7104 /* If this is an equality or inequality test of a single bit, we can
7105 do this by shifting the bit being tested to the low-order bit and
7106 masking the result with the constant 1. If the condition was EQ,
7107 we xor it with 1. This does not require an scc insn and is faster
7108 than an scc insn even if we have it. */
7109
7110 if ((code == NE || code == EQ)
7111 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7112 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 7113 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7114 {
7115 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 7116 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
7117
7118 if (subtarget == 0 || GET_CODE (subtarget) != REG
7119 || GET_MODE (subtarget) != operand_mode
7120 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7121 subtarget = 0;
7122
7123 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7124
7125 if (bitnum != 0)
7126 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7127 size_int (bitnum), target, 1);
7128
7129 if (GET_MODE (op0) != mode)
7130 op0 = convert_to_mode (mode, op0, 1);
7131
7132 if (bitnum != TYPE_PRECISION (type) - 1)
7133 op0 = expand_and (op0, const1_rtx, target);
7134
e7c33f54 7135 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
7136 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7137 OPTAB_LIB_WIDEN);
7138
7139 return op0;
7140 }
7141
7142 /* Now see if we are likely to be able to do this. Return if not. */
7143 if (! can_compare_p (operand_mode))
7144 return 0;
7145 icode = setcc_gen_code[(int) code];
7146 if (icode == CODE_FOR_nothing
7147 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7148 {
7149 /* We can only do this if it is one of the special cases that
7150 can be handled without an scc insn. */
7151 if ((code == LT && integer_zerop (arg1))
7152 || (! only_cheap && code == GE && integer_zerop (arg1)))
7153 ;
7154 else if (BRANCH_COST >= 0
7155 && ! only_cheap && (code == NE || code == EQ)
7156 && TREE_CODE (type) != REAL_TYPE
7157 && ((abs_optab->handlers[(int) operand_mode].insn_code
7158 != CODE_FOR_nothing)
7159 || (ffs_optab->handlers[(int) operand_mode].insn_code
7160 != CODE_FOR_nothing)))
7161 ;
7162 else
7163 return 0;
7164 }
7165
7166 preexpand_calls (exp);
7167 if (subtarget == 0 || GET_CODE (subtarget) != REG
7168 || GET_MODE (subtarget) != operand_mode
7169 || ! safe_from_p (subtarget, arg1))
7170 subtarget = 0;
7171
7172 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 7173 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7174
7175 if (target == 0)
7176 target = gen_reg_rtx (mode);
7177
d39985fa
RK
7178 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7179 because, if the emit_store_flag does anything it will succeed and
7180 OP0 and OP1 will not be used subsequently. */
7181
7182 result = emit_store_flag (target, code,
7183 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7184 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7185 operand_mode, unsignedp, 1);
bbf6f052
RK
7186
7187 if (result)
e7c33f54
RK
7188 {
7189 if (invert)
7190 result = expand_binop (mode, xor_optab, result, const1_rtx,
7191 result, 0, OPTAB_LIB_WIDEN);
7192 return result;
7193 }
bbf6f052
RK
7194
7195 /* If this failed, we have to do this with set/compare/jump/set code. */
7196 if (target == 0 || GET_CODE (target) != REG
7197 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7198 target = gen_reg_rtx (GET_MODE (target));
7199
e7c33f54 7200 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
7201 result = compare_from_rtx (op0, op1, code, unsignedp,
7202 operand_mode, NULL_RTX, 0);
bbf6f052 7203 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
7204 return (((result == const0_rtx && ! invert)
7205 || (result != const0_rtx && invert))
7206 ? const0_rtx : const1_rtx);
bbf6f052
RK
7207
7208 label = gen_label_rtx ();
7209 if (bcc_gen_fctn[(int) code] == 0)
7210 abort ();
7211
7212 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 7213 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
7214 emit_label (label);
7215
7216 return target;
7217}
7218\f
7219/* Generate a tablejump instruction (used for switch statements). */
7220
7221#ifdef HAVE_tablejump
7222
7223/* INDEX is the value being switched on, with the lowest value
7224 in the table already subtracted.
88d3b7f0 7225 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
7226 RANGE is the length of the jump table.
7227 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7228
7229 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7230 index value is out of range. */
7231
7232void
e87b4f3f 7233do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 7234 rtx index, range, table_label, default_label;
e87b4f3f 7235 enum machine_mode mode;
bbf6f052
RK
7236{
7237 register rtx temp, vector;
7238
88d3b7f0
RS
7239 /* Do an unsigned comparison (in the proper mode) between the index
7240 expression and the value which represents the length of the range.
7241 Since we just finished subtracting the lower bound of the range
7242 from the index expression, this comparison allows us to simultaneously
7243 check that the original index expression value is both greater than
7244 or equal to the minimum value of the range and less than or equal to
7245 the maximum value of the range. */
e87b4f3f 7246
906c4e36 7247 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
bbf6f052 7248 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
7249
7250 /* If index is in range, it must fit in Pmode.
7251 Convert to Pmode so we can index with it. */
7252 if (mode != Pmode)
7253 index = convert_to_mode (Pmode, index, 1);
7254
bbf6f052
RK
7255 /* If flag_force_addr were to affect this address
7256 it could interfere with the tricky assumptions made
7257 about addresses that contain label-refs,
7258 which may be valid only very near the tablejump itself. */
7259 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7260 GET_MODE_SIZE, because this indicates how large insns are. The other
7261 uses should all be Pmode, because they are addresses. This code
7262 could fail if addresses and insns are not the same size. */
7263 index = memory_address_noforce
7264 (CASE_VECTOR_MODE,
7265 gen_rtx (PLUS, Pmode,
7266 gen_rtx (MULT, Pmode, index,
906c4e36 7267 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
7268 gen_rtx (LABEL_REF, Pmode, table_label)));
7269 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7270 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7271 RTX_UNCHANGING_P (vector) = 1;
7272 convert_move (temp, vector, 0);
7273
7274 emit_jump_insn (gen_tablejump (temp, table_label));
7275
7276#ifndef CASE_VECTOR_PC_RELATIVE
7277 /* If we are generating PIC code or if the table is PC-relative, the
7278 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7279 if (! flag_pic)
7280 emit_barrier ();
7281#endif
7282}
7283
7284#endif /* HAVE_tablejump */
This page took 0.789226 seconds and 5 git commands to generate.