]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
*** empty log message ***
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
105/* MOVE_RATIO is the number of move instructions that is better than
106 a block move. */
107
108#ifndef MOVE_RATIO
109#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi)
110#define MOVE_RATIO 2
111#else
112/* A value of around 6 would minimize code size; infinity would minimize
113 execution time. */
114#define MOVE_RATIO 15
115#endif
116#endif
e87b4f3f
RS
117
118/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
119
120#ifndef SLOW_UNALIGNED_ACCESS
121#define SLOW_UNALIGNED_ACCESS 0
122#endif
bbf6f052
RK
123\f
124/* This is run at the start of compiling a function. */
125
126void
127init_expr ()
128{
129 init_queue ();
130
131 pending_stack_adjust = 0;
132 inhibit_defer_pop = 0;
133 cleanups_this_call = 0;
134 saveregs_value = 0;
e87b4f3f 135 forced_labels = 0;
bbf6f052
RK
136}
137
138/* Save all variables describing the current status into the structure *P.
139 This is used before starting a nested function. */
140
141void
142save_expr_status (p)
143 struct function *p;
144{
145 /* Instead of saving the postincrement queue, empty it. */
146 emit_queue ();
147
148 p->pending_stack_adjust = pending_stack_adjust;
149 p->inhibit_defer_pop = inhibit_defer_pop;
150 p->cleanups_this_call = cleanups_this_call;
151 p->saveregs_value = saveregs_value;
e87b4f3f 152 p->forced_labels = forced_labels;
bbf6f052
RK
153
154 pending_stack_adjust = 0;
155 inhibit_defer_pop = 0;
156 cleanups_this_call = 0;
157 saveregs_value = 0;
e87b4f3f 158 forced_labels = 0;
bbf6f052
RK
159}
160
161/* Restore all variables describing the current status from the structure *P.
162 This is used after a nested function. */
163
164void
165restore_expr_status (p)
166 struct function *p;
167{
168 pending_stack_adjust = p->pending_stack_adjust;
169 inhibit_defer_pop = p->inhibit_defer_pop;
170 cleanups_this_call = p->cleanups_this_call;
171 saveregs_value = p->saveregs_value;
e87b4f3f 172 forced_labels = p->forced_labels;
bbf6f052
RK
173}
174\f
175/* Manage the queue of increment instructions to be output
176 for POSTINCREMENT_EXPR expressions, etc. */
177
178static rtx pending_chain;
179
180/* Queue up to increment (or change) VAR later. BODY says how:
181 BODY should be the same thing you would pass to emit_insn
182 to increment right away. It will go to emit_insn later on.
183
184 The value is a QUEUED expression to be used in place of VAR
185 where you want to guarantee the pre-incrementation value of VAR. */
186
187static rtx
188enqueue_insn (var, body)
189 rtx var, body;
190{
191 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
192 var, 0, 0, body, pending_chain);
193 return pending_chain;
194}
195
196/* Use protect_from_queue to convert a QUEUED expression
197 into something that you can put immediately into an instruction.
198 If the queued incrementation has not happened yet,
199 protect_from_queue returns the variable itself.
200 If the incrementation has happened, protect_from_queue returns a temp
201 that contains a copy of the old value of the variable.
202
203 Any time an rtx which might possibly be a QUEUED is to be put
204 into an instruction, it must be passed through protect_from_queue first.
205 QUEUED expressions are not meaningful in instructions.
206
207 Do not pass a value through protect_from_queue and then hold
208 on to it for a while before putting it in an instruction!
209 If the queue is flushed in between, incorrect code will result. */
210
211rtx
212protect_from_queue (x, modify)
213 register rtx x;
214 int modify;
215{
216 register RTX_CODE code = GET_CODE (x);
217
218#if 0 /* A QUEUED can hang around after the queue is forced out. */
219 /* Shortcut for most common case. */
220 if (pending_chain == 0)
221 return x;
222#endif
223
224 if (code != QUEUED)
225 {
226 /* A special hack for read access to (MEM (QUEUED ...))
227 to facilitate use of autoincrement.
228 Make a copy of the contents of the memory location
229 rather than a copy of the address, but not
230 if the value is of mode BLKmode. */
231 if (code == MEM && GET_MODE (x) != BLKmode
232 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
233 {
234 register rtx y = XEXP (x, 0);
235 XEXP (x, 0) = QUEUED_VAR (y);
236 if (QUEUED_INSN (y))
237 {
238 register rtx temp = gen_reg_rtx (GET_MODE (x));
239 emit_insn_before (gen_move_insn (temp, x),
240 QUEUED_INSN (y));
241 return temp;
242 }
243 return x;
244 }
245 /* Otherwise, recursively protect the subexpressions of all
246 the kinds of rtx's that can contain a QUEUED. */
247 if (code == MEM)
248 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
249 else if (code == PLUS || code == MULT)
250 {
251 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
252 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
253 }
254 return x;
255 }
256 /* If the increment has not happened, use the variable itself. */
257 if (QUEUED_INSN (x) == 0)
258 return QUEUED_VAR (x);
259 /* If the increment has happened and a pre-increment copy exists,
260 use that copy. */
261 if (QUEUED_COPY (x) != 0)
262 return QUEUED_COPY (x);
263 /* The increment has happened but we haven't set up a pre-increment copy.
264 Set one up now, and use it. */
265 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
266 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
267 QUEUED_INSN (x));
268 return QUEUED_COPY (x);
269}
270
271/* Return nonzero if X contains a QUEUED expression:
272 if it contains anything that will be altered by a queued increment.
273 We handle only combinations of MEM, PLUS, MINUS and MULT operators
274 since memory addresses generally contain only those. */
275
276static int
277queued_subexp_p (x)
278 rtx x;
279{
280 register enum rtx_code code = GET_CODE (x);
281 switch (code)
282 {
283 case QUEUED:
284 return 1;
285 case MEM:
286 return queued_subexp_p (XEXP (x, 0));
287 case MULT:
288 case PLUS:
289 case MINUS:
290 return queued_subexp_p (XEXP (x, 0))
291 || queued_subexp_p (XEXP (x, 1));
292 }
293 return 0;
294}
295
296/* Perform all the pending incrementations. */
297
298void
299emit_queue ()
300{
301 register rtx p;
302 while (p = pending_chain)
303 {
304 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
305 pending_chain = QUEUED_NEXT (p);
306 }
307}
308
309static void
310init_queue ()
311{
312 if (pending_chain)
313 abort ();
314}
315\f
316/* Copy data from FROM to TO, where the machine modes are not the same.
317 Both modes may be integer, or both may be floating.
318 UNSIGNEDP should be nonzero if FROM is an unsigned type.
319 This causes zero-extension instead of sign-extension. */
320
321void
322convert_move (to, from, unsignedp)
323 register rtx to, from;
324 int unsignedp;
325{
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
332
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
335
336 to = protect_from_queue (to, 1);
337 from = protect_from_queue (from, 0);
338
339 if (to_real != from_real)
340 abort ();
341
342 if (to_mode == from_mode
343 || (from_mode == VOIDmode && CONSTANT_P (from)))
344 {
345 emit_move_insn (to, from);
346 return;
347 }
348
349 if (to_real)
350 {
351#ifdef HAVE_extendsfdf2
352 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
353 {
354 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
355 return;
356 }
357#endif
358#ifdef HAVE_extendsftf2
359 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
360 {
361 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
362 return;
363 }
364#endif
365#ifdef HAVE_extenddftf2
366 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
367 {
368 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
369 return;
370 }
371#endif
372#ifdef HAVE_truncdfsf2
373 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
374 {
375 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
376 return;
377 }
378#endif
379#ifdef HAVE_trunctfsf2
380 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
381 {
382 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
383 return;
384 }
385#endif
386#ifdef HAVE_trunctfdf2
387 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
388 {
389 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
390 return;
391 }
392#endif
393
394 if (from_mode == SFmode && to_mode == DFmode)
395 libcall = extendsfdf2_libfunc;
396 else if (from_mode == DFmode && to_mode == SFmode)
397 libcall = truncdfsf2_libfunc;
398 else
399 /* This conversion is not implemented yet. There aren't any TFmode
400 library calls. */
401 abort ();
402
e87b4f3f 403 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
404 emit_move_insn (to, hard_libcall_value (to_mode));
405 return;
406 }
407
408 /* Now both modes are integers. */
409
410 /* Handle expanding beyond a word. */
411 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
412 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
413 {
414 rtx insns;
415 rtx lowpart;
416 rtx fill_value;
417 rtx lowfrom;
418 int i;
419 enum machine_mode lowpart_mode;
420 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
421
422 /* Try converting directly if the insn is supported. */
423 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
424 != CODE_FOR_nothing)
425 {
426 emit_unop_insn (code, to, from, equiv_code);
427 return;
428 }
429 /* Next, try converting via full word. */
430 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
431 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
432 != CODE_FOR_nothing))
433 {
434 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
435 emit_unop_insn (code, to,
436 gen_lowpart (word_mode, to), equiv_code);
437 return;
438 }
439
440 /* No special multiword conversion insn; do it by hand. */
441 start_sequence ();
442
443 /* Get a copy of FROM widened to a word, if necessary. */
444 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
445 lowpart_mode = word_mode;
446 else
447 lowpart_mode = from_mode;
448
449 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
450
451 lowpart = gen_lowpart (lowpart_mode, to);
452 emit_move_insn (lowpart, lowfrom);
453
454 /* Compute the value to put in each remaining word. */
455 if (unsignedp)
456 fill_value = const0_rtx;
457 else
458 {
459#ifdef HAVE_slt
460 if (HAVE_slt
461 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
462 && STORE_FLAG_VALUE == -1)
463 {
464 emit_cmp_insn (lowfrom, const0_rtx, NE, 0, lowpart_mode, 0, 0);
465 fill_value = gen_reg_rtx (word_mode);
466 emit_insn (gen_slt (fill_value));
467 }
468 else
469#endif
470 {
471 fill_value
472 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
473 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
474 0, 0);
475 fill_value = convert_to_mode (word_mode, fill_value, 1);
476 }
477 }
478
479 /* Fill the remaining words. */
480 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
481 {
482 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
483 rtx subword = operand_subword (to, index, 1, to_mode);
484
485 if (subword == 0)
486 abort ();
487
488 if (fill_value != subword)
489 emit_move_insn (subword, fill_value);
490 }
491
492 insns = get_insns ();
493 end_sequence ();
494
495 emit_no_conflict_block (insns, to, from, 0,
496 gen_rtx (equiv_code, to_mode, from));
497 return;
498 }
499
500 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
501 {
502 convert_move (to, gen_lowpart (word_mode, from), 0);
503 return;
504 }
505
506 /* Handle pointer conversion */ /* SPEE 900220 */
507 if (to_mode == PSImode)
508 {
509 if (from_mode != SImode)
510 from = convert_to_mode (SImode, from, unsignedp);
511
512#ifdef HAVE_truncsipsi
513 if (HAVE_truncsipsi)
514 {
515 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
516 return;
517 }
518#endif /* HAVE_truncsipsi */
519 abort ();
520 }
521
522 if (from_mode == PSImode)
523 {
524 if (to_mode != SImode)
525 {
526 from = convert_to_mode (SImode, from, unsignedp);
527 from_mode = SImode;
528 }
529 else
530 {
531#ifdef HAVE_extendpsisi
532 if (HAVE_extendpsisi)
533 {
534 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
535 return;
536 }
537#endif /* HAVE_extendpsisi */
538 abort ();
539 }
540 }
541
542 /* Now follow all the conversions between integers
543 no more than a word long. */
544
545 /* For truncation, usually we can just refer to FROM in a narrower mode. */
546 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
547 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
548 GET_MODE_BITSIZE (from_mode))
549 && ((GET_CODE (from) == MEM
550 && ! MEM_VOLATILE_P (from)
551 && ! mode_dependent_address_p (XEXP (from, 0)))
552 || GET_CODE (from) == REG
553 || GET_CODE (from) == SUBREG))
554 {
555 emit_move_insn (to, gen_lowpart (to_mode, from));
556 return;
557 }
558
559 /* For truncation, usually we can just refer to FROM in a narrower mode. */
560 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
561 {
562 /* Convert directly if that works. */
563 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
564 != CODE_FOR_nothing)
565 {
566 emit_unop_insn (code, to, from, equiv_code);
567 return;
568 }
569 else
570 {
571 enum machine_mode intermediate;
572
573 /* Search for a mode to convert via. */
574 for (intermediate = from_mode; intermediate != VOIDmode;
575 intermediate = GET_MODE_WIDER_MODE (intermediate))
576 if ((can_extend_p (to_mode, intermediate, unsignedp)
577 != CODE_FOR_nothing)
578 && (can_extend_p (intermediate, from_mode, unsignedp)
579 != CODE_FOR_nothing))
580 {
581 convert_move (to, convert_to_mode (intermediate, from,
582 unsignedp), unsignedp);
583 return;
584 }
585
586 /* No suitable intermediate mode. */
587 abort ();
588 }
589 }
590
591 /* Support special truncate insns for certain modes. */
592
593 if (from_mode == DImode && to_mode == SImode)
594 {
595#ifdef HAVE_truncdisi2
596 if (HAVE_truncdisi2)
597 {
598 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
599 return;
600 }
601#endif
602 convert_move (to, force_reg (from_mode, from), unsignedp);
603 return;
604 }
605
606 if (from_mode == DImode && to_mode == HImode)
607 {
608#ifdef HAVE_truncdihi2
609 if (HAVE_truncdihi2)
610 {
611 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
612 return;
613 }
614#endif
615 convert_move (to, force_reg (from_mode, from), unsignedp);
616 return;
617 }
618
619 if (from_mode == DImode && to_mode == QImode)
620 {
621#ifdef HAVE_truncdiqi2
622 if (HAVE_truncdiqi2)
623 {
624 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628 convert_move (to, force_reg (from_mode, from), unsignedp);
629 return;
630 }
631
632 if (from_mode == SImode && to_mode == HImode)
633 {
634#ifdef HAVE_truncsihi2
635 if (HAVE_truncsihi2)
636 {
637 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
638 return;
639 }
640#endif
641 convert_move (to, force_reg (from_mode, from), unsignedp);
642 return;
643 }
644
645 if (from_mode == SImode && to_mode == QImode)
646 {
647#ifdef HAVE_truncsiqi2
648 if (HAVE_truncsiqi2)
649 {
650 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654 convert_move (to, force_reg (from_mode, from), unsignedp);
655 return;
656 }
657
658 if (from_mode == HImode && to_mode == QImode)
659 {
660#ifdef HAVE_trunchiqi2
661 if (HAVE_trunchiqi2)
662 {
663 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
664 return;
665 }
666#endif
667 convert_move (to, force_reg (from_mode, from), unsignedp);
668 return;
669 }
670
671 /* Handle truncation of volatile memrefs, and so on;
672 the things that couldn't be truncated directly,
673 and for which there was no special instruction. */
674 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
675 {
676 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
677 emit_move_insn (to, temp);
678 return;
679 }
680
681 /* Mode combination is not recognized. */
682 abort ();
683}
684
685/* Return an rtx for a value that would result
686 from converting X to mode MODE.
687 Both X and MODE may be floating, or both integer.
688 UNSIGNEDP is nonzero if X is an unsigned value.
689 This can be done by referring to a part of X in place
690 or by copying to a new temporary with conversion. */
691
692rtx
693convert_to_mode (mode, x, unsignedp)
694 enum machine_mode mode;
695 rtx x;
696 int unsignedp;
697{
698 register rtx temp;
699
700 x = protect_from_queue (x, 0);
701
702 if (mode == GET_MODE (x))
703 return x;
704
705 /* There is one case that we must handle specially: If we are converting
706 a CONST_INT into a mode whose size is twice HOST_BITS_PER_INT and
707 we are to interpret the constant as unsigned, gen_lowpart will do
708 the wrong if the constant appears negative. What we want to do is
709 make the high-order word of the constant zero, not all ones. */
710
711 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
712 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_INT
713 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
714 return immed_double_const (INTVAL (x), 0, mode);
715
716 /* We can do this with a gen_lowpart if both desired and current modes
717 are integer, and this is either a constant integer, a register, or a
718 non-volatile MEM. Except for the constant case, we must be narrowing
719 the operand. */
720
721 if (GET_CODE (x) == CONST_INT
722 || (GET_MODE_CLASS (mode) == MODE_INT
723 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
724 && (GET_CODE (x) == CONST_DOUBLE
725 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
726 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
727 || GET_CODE (x) == REG)))))
728 return gen_lowpart (mode, x);
729
730 temp = gen_reg_rtx (mode);
731 convert_move (temp, x, unsignedp);
732 return temp;
733}
734\f
735/* Generate several move instructions to copy LEN bytes
736 from block FROM to block TO. (These are MEM rtx's with BLKmode).
737 The caller must pass FROM and TO
738 through protect_from_queue before calling.
739 ALIGN (in bytes) is maximum alignment we can assume. */
740
741struct move_by_pieces
742{
743 rtx to;
744 rtx to_addr;
745 int autinc_to;
746 int explicit_inc_to;
747 rtx from;
748 rtx from_addr;
749 int autinc_from;
750 int explicit_inc_from;
751 int len;
752 int offset;
753 int reverse;
754};
755
756static void move_by_pieces_1 ();
757static int move_by_pieces_ninsns ();
758
759static void
760move_by_pieces (to, from, len, align)
761 rtx to, from;
762 int len, align;
763{
764 struct move_by_pieces data;
765 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 766 int max_size = MOVE_MAX + 1;
bbf6f052
RK
767
768 data.offset = 0;
769 data.to_addr = to_addr;
770 data.from_addr = from_addr;
771 data.to = to;
772 data.from = from;
773 data.autinc_to
774 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
775 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
776 data.autinc_from
777 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
778 || GET_CODE (from_addr) == POST_INC
779 || GET_CODE (from_addr) == POST_DEC);
780
781 data.explicit_inc_from = 0;
782 data.explicit_inc_to = 0;
783 data.reverse
784 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
785 if (data.reverse) data.offset = len;
786 data.len = len;
787
788 /* If copying requires more than two move insns,
789 copy addresses to registers (to make displacements shorter)
790 and use post-increment if available. */
791 if (!(data.autinc_from && data.autinc_to)
792 && move_by_pieces_ninsns (len, align) > 2)
793 {
794#ifdef HAVE_PRE_DECREMENT
795 if (data.reverse && ! data.autinc_from)
796 {
797 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
798 data.autinc_from = 1;
799 data.explicit_inc_from = -1;
800 }
801#endif
802#ifdef HAVE_POST_INCREMENT
803 if (! data.autinc_from)
804 {
805 data.from_addr = copy_addr_to_reg (from_addr);
806 data.autinc_from = 1;
807 data.explicit_inc_from = 1;
808 }
809#endif
810 if (!data.autinc_from && CONSTANT_P (from_addr))
811 data.from_addr = copy_addr_to_reg (from_addr);
812#ifdef HAVE_PRE_DECREMENT
813 if (data.reverse && ! data.autinc_to)
814 {
815 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
816 data.autinc_to = 1;
817 data.explicit_inc_to = -1;
818 }
819#endif
820#ifdef HAVE_POST_INCREMENT
821 if (! data.reverse && ! data.autinc_to)
822 {
823 data.to_addr = copy_addr_to_reg (to_addr);
824 data.autinc_to = 1;
825 data.explicit_inc_to = 1;
826 }
827#endif
828 if (!data.autinc_to && CONSTANT_P (to_addr))
829 data.to_addr = copy_addr_to_reg (to_addr);
830 }
831
e87b4f3f
RS
832 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
833 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 834 align = MOVE_MAX;
bbf6f052
RK
835
836 /* First move what we can in the largest integer mode, then go to
837 successively smaller modes. */
838
839 while (max_size > 1)
840 {
841 enum machine_mode mode = VOIDmode, tmode;
842 enum insn_code icode;
843
e7c33f54
RK
844 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
845 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
846 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
847 mode = tmode;
848
849 if (mode == VOIDmode)
850 break;
851
852 icode = mov_optab->handlers[(int) mode].insn_code;
853 if (icode != CODE_FOR_nothing
854 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
855 GET_MODE_SIZE (mode)))
856 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
857
858 max_size = GET_MODE_SIZE (mode);
859 }
860
861 /* The code above should have handled everything. */
862 if (data.len != 0)
863 abort ();
864}
865
866/* Return number of insns required to move L bytes by pieces.
867 ALIGN (in bytes) is maximum alignment we can assume. */
868
869static int
870move_by_pieces_ninsns (l, align)
871 unsigned int l;
872 int align;
873{
874 register int n_insns = 0;
e87b4f3f 875 int max_size = MOVE_MAX + 1;
bbf6f052 876
e87b4f3f
RS
877 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
878 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 879 align = MOVE_MAX;
bbf6f052
RK
880
881 while (max_size > 1)
882 {
883 enum machine_mode mode = VOIDmode, tmode;
884 enum insn_code icode;
885
e7c33f54
RK
886 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
887 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
888 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
889 mode = tmode;
890
891 if (mode == VOIDmode)
892 break;
893
894 icode = mov_optab->handlers[(int) mode].insn_code;
895 if (icode != CODE_FOR_nothing
896 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
897 GET_MODE_SIZE (mode)))
898 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
899
900 max_size = GET_MODE_SIZE (mode);
901 }
902
903 return n_insns;
904}
905
906/* Subroutine of move_by_pieces. Move as many bytes as appropriate
907 with move instructions for mode MODE. GENFUN is the gen_... function
908 to make a move insn for that mode. DATA has all the other info. */
909
910static void
911move_by_pieces_1 (genfun, mode, data)
912 rtx (*genfun) ();
913 enum machine_mode mode;
914 struct move_by_pieces *data;
915{
916 register int size = GET_MODE_SIZE (mode);
917 register rtx to1, from1;
918
919 while (data->len >= size)
920 {
921 if (data->reverse) data->offset -= size;
922
923 to1 = (data->autinc_to
924 ? gen_rtx (MEM, mode, data->to_addr)
925 : change_address (data->to, mode,
926 plus_constant (data->to_addr, data->offset)));
927 from1 =
928 (data->autinc_from
929 ? gen_rtx (MEM, mode, data->from_addr)
930 : change_address (data->from, mode,
931 plus_constant (data->from_addr, data->offset)));
932
933#ifdef HAVE_PRE_DECREMENT
934 if (data->explicit_inc_to < 0)
e87b4f3f
RS
935 emit_insn (gen_add2_insn (data->to_addr,
936 gen_rtx (CONST_INT, VOIDmode, -size)));
bbf6f052 937 if (data->explicit_inc_from < 0)
e87b4f3f
RS
938 emit_insn (gen_add2_insn (data->from_addr,
939 gen_rtx (CONST_INT, VOIDmode, -size)));
bbf6f052
RK
940#endif
941
942 emit_insn ((*genfun) (to1, from1));
943#ifdef HAVE_POST_INCREMENT
944 if (data->explicit_inc_to > 0)
945 emit_insn (gen_add2_insn (data->to_addr,
946 gen_rtx (CONST_INT, VOIDmode, size)));
947 if (data->explicit_inc_from > 0)
948 emit_insn (gen_add2_insn (data->from_addr,
949 gen_rtx (CONST_INT, VOIDmode, size)));
950#endif
951
952 if (! data->reverse) data->offset += size;
953
954 data->len -= size;
955 }
956}
957\f
958/* Emit code to move a block Y to a block X.
959 This may be done with string-move instructions,
960 with multiple scalar move instructions, or with a library call.
961
962 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
963 with mode BLKmode.
964 SIZE is an rtx that says how long they are.
965 ALIGN is the maximum alignment we can assume they have,
966 measured in bytes. */
967
968void
969emit_block_move (x, y, size, align)
970 rtx x, y;
971 rtx size;
972 int align;
973{
974 if (GET_MODE (x) != BLKmode)
975 abort ();
976
977 if (GET_MODE (y) != BLKmode)
978 abort ();
979
980 x = protect_from_queue (x, 1);
981 y = protect_from_queue (y, 0);
982
983 if (GET_CODE (x) != MEM)
984 abort ();
985 if (GET_CODE (y) != MEM)
986 abort ();
987 if (size == 0)
988 abort ();
989
990 if (GET_CODE (size) == CONST_INT
991 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
992 < MOVE_RATIO))
993 move_by_pieces (x, y, INTVAL (size), align);
994 else
995 {
996 /* Try the most limited insn first, because there's no point
997 including more than one in the machine description unless
998 the more limited one has some advantage. */
999#ifdef HAVE_movstrqi
1000 if (HAVE_movstrqi
1001 && GET_CODE (size) == CONST_INT
1002 && ((unsigned) INTVAL (size)
1003 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1004 {
1005 rtx insn = gen_movstrqi (x, y, size,
1006 gen_rtx (CONST_INT, VOIDmode, align));
1007 if (insn)
1008 {
1009 emit_insn (insn);
1010 return;
1011 }
1012 }
1013#endif
1014#ifdef HAVE_movstrhi
1015 if (HAVE_movstrhi
1016 && GET_CODE (size) == CONST_INT
1017 && ((unsigned) INTVAL (size)
1018 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1019 {
1020 rtx insn = gen_movstrhi (x, y, size,
1021 gen_rtx (CONST_INT, VOIDmode, align));
1022 if (insn)
1023 {
1024 emit_insn (insn);
1025 return;
1026 }
1027 }
1028#endif
1029#ifdef HAVE_movstrsi
1030 if (HAVE_movstrsi)
1031 {
1032 rtx insn = gen_movstrsi (x, y, size,
1033 gen_rtx (CONST_INT, VOIDmode, align));
1034 if (insn)
1035 {
1036 emit_insn (insn);
1037 return;
1038 }
1039 }
1040#endif
1041#ifdef HAVE_movstrdi
1042 if (HAVE_movstrdi)
1043 {
1044 rtx insn = gen_movstrdi (x, y, size,
1045 gen_rtx (CONST_INT, VOIDmode, align));
1046 if (insn)
1047 {
1048 emit_insn (insn);
1049 return;
1050 }
1051 }
1052#endif
1053
1054#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1055 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1056 VOIDmode, 3, XEXP (x, 0), Pmode,
1057 XEXP (y, 0), Pmode,
1058 size, Pmode);
1059#else
e87b4f3f 1060 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1061 VOIDmode, 3, XEXP (y, 0), Pmode,
1062 XEXP (x, 0), Pmode,
1063 size, Pmode);
1064#endif
1065 }
1066}
1067\f
1068/* Copy all or part of a value X into registers starting at REGNO.
1069 The number of registers to be filled is NREGS. */
1070
1071void
1072move_block_to_reg (regno, x, nregs, mode)
1073 int regno;
1074 rtx x;
1075 int nregs;
1076 enum machine_mode mode;
1077{
1078 int i;
1079 rtx pat, last;
1080
1081 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1082 x = validize_mem (force_const_mem (mode, x));
1083
1084 /* See if the machine can do this with a load multiple insn. */
1085#ifdef HAVE_load_multiple
1086 last = get_last_insn ();
1087 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1088 gen_rtx (CONST_INT, VOIDmode, nregs));
1089 if (pat)
1090 {
1091 emit_insn (pat);
1092 return;
1093 }
1094 else
1095 delete_insns_since (last);
1096#endif
1097
1098 for (i = 0; i < nregs; i++)
1099 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1100 operand_subword_force (x, i, mode));
1101}
1102
1103/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1104 The number of registers to be filled is NREGS. */
1105
1106void
1107move_block_from_reg (regno, x, nregs)
1108 int regno;
1109 rtx x;
1110 int nregs;
1111{
1112 int i;
1113 rtx pat, last;
1114
1115 /* See if the machine can do this with a store multiple insn. */
1116#ifdef HAVE_store_multiple
1117 last = get_last_insn ();
1118 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1119 gen_rtx (CONST_INT, VOIDmode, nregs));
1120 if (pat)
1121 {
1122 emit_insn (pat);
1123 return;
1124 }
1125 else
1126 delete_insns_since (last);
1127#endif
1128
1129 for (i = 0; i < nregs; i++)
1130 {
1131 rtx tem = operand_subword (x, i, 1, BLKmode);
1132
1133 if (tem == 0)
1134 abort ();
1135
1136 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1137 }
1138}
1139
1140/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1141
1142void
1143use_regs (regno, nregs)
1144 int regno;
1145 int nregs;
1146{
1147 int i;
1148
1149 for (i = 0; i < nregs; i++)
1150 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1151}
1152\f
1153/* Write zeros through the storage of OBJECT.
1154 If OBJECT has BLKmode, SIZE is its length in bytes. */
1155
1156void
1157clear_storage (object, size)
1158 rtx object;
1159 int size;
1160{
1161 if (GET_MODE (object) == BLKmode)
1162 {
1163#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1164 emit_library_call (memset_libfunc, 1,
bbf6f052
RK
1165 VOIDmode, 3,
1166 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1167 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1168#else
e87b4f3f 1169 emit_library_call (bzero_libfunc, 1,
bbf6f052
RK
1170 VOIDmode, 2,
1171 XEXP (object, 0), Pmode,
1172 gen_rtx (CONST_INT, VOIDmode, size), Pmode);
1173#endif
1174 }
1175 else
1176 emit_move_insn (object, const0_rtx);
1177}
1178
1179/* Generate code to copy Y into X.
1180 Both Y and X must have the same mode, except that
1181 Y can be a constant with VOIDmode.
1182 This mode cannot be BLKmode; use emit_block_move for that.
1183
1184 Return the last instruction emitted. */
1185
1186rtx
1187emit_move_insn (x, y)
1188 rtx x, y;
1189{
1190 enum machine_mode mode = GET_MODE (x);
1191 int i;
1192
1193 x = protect_from_queue (x, 1);
1194 y = protect_from_queue (y, 0);
1195
1196 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1197 abort ();
1198
1199 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1200 y = force_const_mem (mode, y);
1201
1202 /* If X or Y are memory references, verify that their addresses are valid
1203 for the machine. */
1204 if (GET_CODE (x) == MEM
1205 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1206 && ! push_operand (x, GET_MODE (x)))
1207 || (flag_force_addr
1208 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1209 x = change_address (x, VOIDmode, XEXP (x, 0));
1210
1211 if (GET_CODE (y) == MEM
1212 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1213 || (flag_force_addr
1214 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1215 y = change_address (y, VOIDmode, XEXP (y, 0));
1216
1217 if (mode == BLKmode)
1218 abort ();
1219
1220 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1221 return
1222 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1223
1224 /* This will handle any multi-word mode that lacks a move_insn pattern.
1225 However, you will get better code if you define such patterns,
1226 even if they must turn into multiple assembler instructions. */
1227 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1228 {
1229 rtx last_insn = 0;
1230
1231 for (i = 0;
1232 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1233 i++)
1234 {
1235 rtx xpart = operand_subword (x, i, 1, mode);
1236 rtx ypart = operand_subword (y, i, 1, mode);
1237
1238 /* If we can't get a part of Y, put Y into memory if it is a
1239 constant. Otherwise, force it into a register. If we still
1240 can't get a part of Y, abort. */
1241 if (ypart == 0 && CONSTANT_P (y))
1242 {
1243 y = force_const_mem (mode, y);
1244 ypart = operand_subword (y, i, 1, mode);
1245 }
1246 else if (ypart == 0)
1247 ypart = operand_subword_force (y, i, mode);
1248
1249 if (xpart == 0 || ypart == 0)
1250 abort ();
1251
1252 last_insn = emit_move_insn (xpart, ypart);
1253 }
1254 return last_insn;
1255 }
1256 else
1257 abort ();
1258}
1259\f
1260/* Pushing data onto the stack. */
1261
1262/* Push a block of length SIZE (perhaps variable)
1263 and return an rtx to address the beginning of the block.
1264 Note that it is not possible for the value returned to be a QUEUED.
1265 The value may be virtual_outgoing_args_rtx.
1266
1267 EXTRA is the number of bytes of padding to push in addition to SIZE.
1268 BELOW nonzero means this padding comes at low addresses;
1269 otherwise, the padding comes at high addresses. */
1270
1271rtx
1272push_block (size, extra, below)
1273 rtx size;
1274 int extra, below;
1275{
1276 register rtx temp;
1277 if (CONSTANT_P (size))
1278 anti_adjust_stack (plus_constant (size, extra));
1279 else if (GET_CODE (size) == REG && extra == 0)
1280 anti_adjust_stack (size);
1281 else
1282 {
1283 rtx temp = copy_to_mode_reg (Pmode, size);
1284 if (extra != 0)
1285 temp = expand_binop (Pmode, add_optab,
1286 temp,
1287 gen_rtx (CONST_INT, VOIDmode, extra),
1288 temp, 0, OPTAB_LIB_WIDEN);
1289 anti_adjust_stack (temp);
1290 }
1291
1292#ifdef STACK_GROWS_DOWNWARD
1293 temp = virtual_outgoing_args_rtx;
1294 if (extra != 0 && below)
1295 temp = plus_constant (temp, extra);
1296#else
1297 if (GET_CODE (size) == CONST_INT)
1298 temp = plus_constant (virtual_outgoing_args_rtx,
1299 - INTVAL (size) - (below ? 0 : extra));
1300 else if (extra != 0 && !below)
1301 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1302 negate_rtx (Pmode, plus_constant (size, extra)));
1303 else
1304 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1305 negate_rtx (Pmode, size));
1306#endif
1307
1308 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1309}
1310
1311static rtx
1312gen_push_operand ()
1313{
1314 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1315}
1316
1317/* Generate code to push X onto the stack, assuming it has mode MODE and
1318 type TYPE.
1319 MODE is redundant except when X is a CONST_INT (since they don't
1320 carry mode info).
1321 SIZE is an rtx for the size of data to be copied (in bytes),
1322 needed only if X is BLKmode.
1323
1324 ALIGN (in bytes) is maximum alignment we can assume.
1325
1326 If PARTIAL is nonzero, then copy that many of the first words
1327 of X into registers starting with REG, and push the rest of X.
1328 The amount of space pushed is decreased by PARTIAL words,
1329 rounded *down* to a multiple of PARM_BOUNDARY.
1330 REG must be a hard register in this case.
1331
1332 EXTRA is the amount in bytes of extra space to leave next to this arg.
1333 This is ignored if an argument block has already been allocted.
1334
1335 On a machine that lacks real push insns, ARGS_ADDR is the address of
1336 the bottom of the argument block for this call. We use indexing off there
1337 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1338 argument block has not been preallocated.
1339
1340 ARGS_SO_FAR is the size of args previously pushed for this call. */
1341
1342void
1343emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1344 args_addr, args_so_far)
1345 register rtx x;
1346 enum machine_mode mode;
1347 tree type;
1348 rtx size;
1349 int align;
1350 int partial;
1351 rtx reg;
1352 int extra;
1353 rtx args_addr;
1354 rtx args_so_far;
1355{
1356 rtx xinner;
1357 enum direction stack_direction
1358#ifdef STACK_GROWS_DOWNWARD
1359 = downward;
1360#else
1361 = upward;
1362#endif
1363
1364 /* Decide where to pad the argument: `downward' for below,
1365 `upward' for above, or `none' for don't pad it.
1366 Default is below for small data on big-endian machines; else above. */
1367 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1368
1369 /* Invert direction if stack is post-update. */
1370 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1371 if (where_pad != none)
1372 where_pad = (where_pad == downward ? upward : downward);
1373
1374 xinner = x = protect_from_queue (x, 0);
1375
1376 if (mode == BLKmode)
1377 {
1378 /* Copy a block into the stack, entirely or partially. */
1379
1380 register rtx temp;
1381 int used = partial * UNITS_PER_WORD;
1382 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1383 int skip;
1384
1385 if (size == 0)
1386 abort ();
1387
1388 used -= offset;
1389
1390 /* USED is now the # of bytes we need not copy to the stack
1391 because registers will take care of them. */
1392
1393 if (partial != 0)
1394 xinner = change_address (xinner, BLKmode,
1395 plus_constant (XEXP (xinner, 0), used));
1396
1397 /* If the partial register-part of the arg counts in its stack size,
1398 skip the part of stack space corresponding to the registers.
1399 Otherwise, start copying to the beginning of the stack space,
1400 by setting SKIP to 0. */
1401#ifndef REG_PARM_STACK_SPACE
1402 skip = 0;
1403#else
1404 skip = used;
1405#endif
1406
1407#ifdef PUSH_ROUNDING
1408 /* Do it with several push insns if that doesn't take lots of insns
1409 and if there is no difficulty with push insns that skip bytes
1410 on the stack for alignment purposes. */
1411 if (args_addr == 0
1412 && GET_CODE (size) == CONST_INT
1413 && skip == 0
1414 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1415 < MOVE_RATIO)
bbf6f052
RK
1416 /* Here we avoid the case of a structure whose weak alignment
1417 forces many pushes of a small amount of data,
1418 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1419 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1420 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1421 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1422 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1423 {
1424 /* Push padding now if padding above and stack grows down,
1425 or if padding below and stack grows up.
1426 But if space already allocated, this has already been done. */
1427 if (extra && args_addr == 0
1428 && where_pad != none && where_pad != stack_direction)
1429 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1430
1431 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1432 INTVAL (size) - used, align);
1433 }
1434 else
1435#endif /* PUSH_ROUNDING */
1436 {
1437 /* Otherwise make space on the stack and copy the data
1438 to the address of that space. */
1439
1440 /* Deduct words put into registers from the size we must copy. */
1441 if (partial != 0)
1442 {
1443 if (GET_CODE (size) == CONST_INT)
1444 size = gen_rtx (CONST_INT, VOIDmode, INTVAL (size) - used);
1445 else
1446 size = expand_binop (GET_MODE (size), sub_optab, size,
1447 gen_rtx (CONST_INT, VOIDmode, used),
1448 0, 0, OPTAB_LIB_WIDEN);
1449 }
1450
1451 /* Get the address of the stack space.
1452 In this case, we do not deal with EXTRA separately.
1453 A single stack adjust will do. */
1454 if (! args_addr)
1455 {
1456 temp = push_block (size, extra, where_pad == downward);
1457 extra = 0;
1458 }
1459 else if (GET_CODE (args_so_far) == CONST_INT)
1460 temp = memory_address (BLKmode,
1461 plus_constant (args_addr,
1462 skip + INTVAL (args_so_far)));
1463 else
1464 temp = memory_address (BLKmode,
1465 plus_constant (gen_rtx (PLUS, Pmode,
1466 args_addr, args_so_far),
1467 skip));
1468
1469 /* TEMP is the address of the block. Copy the data there. */
1470 if (GET_CODE (size) == CONST_INT
1471 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1472 < MOVE_RATIO))
1473 {
1474 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1475 INTVAL (size), align);
1476 goto ret;
1477 }
1478 /* Try the most limited insn first, because there's no point
1479 including more than one in the machine description unless
1480 the more limited one has some advantage. */
1481#ifdef HAVE_movstrqi
1482 if (HAVE_movstrqi
1483 && GET_CODE (size) == CONST_INT
1484 && ((unsigned) INTVAL (size)
1485 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1486 {
1487 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1488 xinner, size,
1489 gen_rtx (CONST_INT, VOIDmode, align)));
1490 goto ret;
1491 }
1492#endif
1493#ifdef HAVE_movstrhi
1494 if (HAVE_movstrhi
1495 && GET_CODE (size) == CONST_INT
1496 && ((unsigned) INTVAL (size)
1497 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1498 {
1499 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1500 xinner, size,
1501 gen_rtx (CONST_INT, VOIDmode, align)));
1502 goto ret;
1503 }
1504#endif
1505#ifdef HAVE_movstrsi
1506 if (HAVE_movstrsi)
1507 {
1508 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1509 xinner, size,
1510 gen_rtx (CONST_INT, VOIDmode, align)));
1511 goto ret;
1512 }
1513#endif
1514#ifdef HAVE_movstrdi
1515 if (HAVE_movstrdi)
1516 {
1517 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1518 xinner, size,
1519 gen_rtx (CONST_INT, VOIDmode, align)));
1520 goto ret;
1521 }
1522#endif
1523
1524#ifndef ACCUMULATE_OUTGOING_ARGS
1525 /* If the source is referenced relative to the stack pointer,
1526 copy it to another register to stabilize it. We do not need
1527 to do this if we know that we won't be changing sp. */
1528
1529 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1530 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1531 temp = copy_to_reg (temp);
1532#endif
1533
1534 /* Make inhibit_defer_pop nonzero around the library call
1535 to force it to pop the bcopy-arguments right away. */
1536 NO_DEFER_POP;
1537#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1538 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1539 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1540 size, Pmode);
1541#else
e87b4f3f 1542 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1543 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1544 size, Pmode);
1545#endif
1546 OK_DEFER_POP;
1547 }
1548 }
1549 else if (partial > 0)
1550 {
1551 /* Scalar partly in registers. */
1552
1553 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1554 int i;
1555 int not_stack;
1556 /* # words of start of argument
1557 that we must make space for but need not store. */
1558 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1559 int args_offset = INTVAL (args_so_far);
1560 int skip;
1561
1562 /* Push padding now if padding above and stack grows down,
1563 or if padding below and stack grows up.
1564 But if space already allocated, this has already been done. */
1565 if (extra && args_addr == 0
1566 && where_pad != none && where_pad != stack_direction)
1567 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1568
1569 /* If we make space by pushing it, we might as well push
1570 the real data. Otherwise, we can leave OFFSET nonzero
1571 and leave the space uninitialized. */
1572 if (args_addr == 0)
1573 offset = 0;
1574
1575 /* Now NOT_STACK gets the number of words that we don't need to
1576 allocate on the stack. */
1577 not_stack = partial - offset;
1578
1579 /* If the partial register-part of the arg counts in its stack size,
1580 skip the part of stack space corresponding to the registers.
1581 Otherwise, start copying to the beginning of the stack space,
1582 by setting SKIP to 0. */
1583#ifndef REG_PARM_STACK_SPACE
1584 skip = 0;
1585#else
1586 skip = not_stack;
1587#endif
1588
1589 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1590 x = validize_mem (force_const_mem (mode, x));
1591
1592 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1593 SUBREGs of such registers are not allowed. */
1594 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1595 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1596 x = copy_to_reg (x);
1597
1598 /* Loop over all the words allocated on the stack for this arg. */
1599 /* We can do it by words, because any scalar bigger than a word
1600 has a size a multiple of a word. */
1601#ifndef PUSH_ARGS_REVERSED
1602 for (i = not_stack; i < size; i++)
1603#else
1604 for (i = size - 1; i >= not_stack; i--)
1605#endif
1606 if (i >= not_stack + offset)
1607 emit_push_insn (operand_subword_force (x, i, mode),
1608 word_mode, 0, 0, align, 0, 0, 0, args_addr,
1609 gen_rtx (CONST_INT, VOIDmode,
1610 args_offset + ((i - not_stack + skip)
1611 * UNITS_PER_WORD)));
1612 }
1613 else
1614 {
1615 rtx addr;
1616
1617 /* Push padding now if padding above and stack grows down,
1618 or if padding below and stack grows up.
1619 But if space already allocated, this has already been done. */
1620 if (extra && args_addr == 0
1621 && where_pad != none && where_pad != stack_direction)
1622 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1623
1624#ifdef PUSH_ROUNDING
1625 if (args_addr == 0)
1626 addr = gen_push_operand ();
1627 else
1628#endif
1629 if (GET_CODE (args_so_far) == CONST_INT)
1630 addr
1631 = memory_address (mode,
1632 plus_constant (args_addr, INTVAL (args_so_far)));
1633 else
1634 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1635 args_so_far));
1636
1637 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1638 }
1639
1640 ret:
1641 /* If part should go in registers, copy that part
1642 into the appropriate registers. Do this now, at the end,
1643 since mem-to-mem copies above may do function calls. */
1644 if (partial > 0)
1645 move_block_to_reg (REGNO (reg), x, partial, mode);
1646
1647 if (extra && args_addr == 0 && where_pad == stack_direction)
1648 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));
1649}
1650\f
1651/* Output a library call to function FUN (a SYMBOL_REF rtx)
1652 (emitting the queue unless NO_QUEUE is nonzero),
1653 for a value of mode OUTMODE,
1654 with NARGS different arguments, passed as alternating rtx values
1655 and machine_modes to convert them to.
1656 The rtx values should have been passed through protect_from_queue already.
1657
1658 NO_QUEUE will be true if and only if the library call is a `const' call
1659 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1660 to the variable is_const in expand_call. */
1661
1662void
1663emit_library_call (va_alist)
1664 va_dcl
1665{
1666 va_list p;
1667 struct args_size args_size;
1668 register int argnum;
1669 enum machine_mode outmode;
1670 int nargs;
1671 rtx fun;
1672 rtx orgfun;
1673 int inc;
1674 int count;
1675 rtx argblock = 0;
1676 CUMULATIVE_ARGS args_so_far;
1677 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1678 struct args_size offset; struct args_size size; };
1679 struct arg *argvec;
1680 int old_inhibit_defer_pop = inhibit_defer_pop;
1681 int no_queue = 0;
1682 rtx use_insns;
1683
1684 va_start (p);
1685 orgfun = fun = va_arg (p, rtx);
1686 no_queue = va_arg (p, int);
1687 outmode = va_arg (p, enum machine_mode);
1688 nargs = va_arg (p, int);
1689
1690 /* Copy all the libcall-arguments out of the varargs data
1691 and into a vector ARGVEC.
1692
1693 Compute how to pass each argument. We only support a very small subset
1694 of the full argument passing conventions to limit complexity here since
1695 library functions shouldn't have many args. */
1696
1697 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1698
1699 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1700
1701 args_size.constant = 0;
1702 args_size.var = 0;
1703
1704 for (count = 0; count < nargs; count++)
1705 {
1706 rtx val = va_arg (p, rtx);
1707 enum machine_mode mode = va_arg (p, enum machine_mode);
1708
1709 /* We cannot convert the arg value to the mode the library wants here;
1710 must do it earlier where we know the signedness of the arg. */
1711 if (mode == BLKmode
1712 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1713 abort ();
1714
1715 /* On some machines, there's no way to pass a float to a library fcn.
1716 Pass it as a double instead. */
1717#ifdef LIBGCC_NEEDS_DOUBLE
1718 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1719 val = convert_to_mode (DFmode, val), mode = DFmode;
1720#endif
1721
1722 /* Make sure it is a reasonable operand for a move or push insn. */
1723 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1724 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1725 val = force_operand (val, 0);
1726
1727 argvec[count].value = val;
1728 argvec[count].mode = mode;
1729
1730#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1731 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, (tree)0, 1))
1732 abort ();
1733#endif
1734
1735 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, (tree)0, 1);
1736 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
1737 abort ();
1738#ifdef FUNCTION_ARG_PARTIAL_NREGS
1739 argvec[count].partial
1740 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, (tree)0, 1);
1741#else
1742 argvec[count].partial = 0;
1743#endif
1744
1745 locate_and_pad_parm (mode, 0,
1746 argvec[count].reg && argvec[count].partial == 0,
1747 0, &args_size, &argvec[count].offset,
1748 &argvec[count].size);
1749
1750 if (argvec[count].size.var)
1751 abort ();
1752
1753#ifndef REG_PARM_STACK_SPACE
1754 if (argvec[count].partial)
1755 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
1756#endif
1757
1758 if (argvec[count].reg == 0 || argvec[count].partial != 0
1759#ifdef REG_PARM_STACK_SPACE
1760 || 1
1761#endif
1762 )
1763 args_size.constant += argvec[count].size.constant;
1764
1765#ifdef ACCUMULATE_OUTGOING_ARGS
1766 /* If this arg is actually passed on the stack, it might be
1767 clobbering something we already put there (this library call might
1768 be inside the evaluation of an argument to a function whose call
1769 requires the stack). This will only occur when the library call
1770 has sufficient args to run out of argument registers. Abort in
1771 this case; if this ever occurs, code must be added to save and
1772 restore the arg slot. */
1773
1774 if (argvec[count].reg == 0 || argvec[count].partial != 0)
1775 abort ();
1776#endif
1777
1778 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
1779 }
1780 va_end (p);
1781
1782 /* If this machine requires an external definition for library
1783 functions, write one out. */
1784 assemble_external_libcall (fun);
1785
1786#ifdef STACK_BOUNDARY
1787 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1788 / STACK_BYTES) * STACK_BYTES);
1789#endif
1790
1791#ifdef REG_PARM_STACK_SPACE
1792 args_size.constant = MAX (args_size.constant,
1793 REG_PARM_STACK_SPACE ((tree) 0));
1794#endif
1795
1796#ifdef ACCUMULATE_OUTGOING_ARGS
1797 if (args_size.constant > current_function_outgoing_args_size)
1798 current_function_outgoing_args_size = args_size.constant;
1799 args_size.constant = 0;
1800#endif
1801
1802#ifndef PUSH_ROUNDING
1803 argblock = push_block (gen_rtx (CONST_INT, VOIDmode, args_size.constant),
1804 0, 0);
1805#endif
1806
1807#ifdef PUSH_ARGS_REVERSED
1808 inc = -1;
1809 argnum = nargs - 1;
1810#else
1811 inc = 1;
1812 argnum = 0;
1813#endif
1814
1815 /* Push the args that need to be pushed. */
1816
1817 for (count = 0; count < nargs; count++, argnum += inc)
1818 {
1819 register enum machine_mode mode = argvec[argnum].mode;
1820 register rtx val = argvec[argnum].value;
1821 rtx reg = argvec[argnum].reg;
1822 int partial = argvec[argnum].partial;
1823
1824 if (! (reg != 0 && partial == 0))
1825 emit_push_insn (val, mode, 0, 0, 0, partial, reg, 0, argblock,
1826 gen_rtx (CONST_INT, VOIDmode,
1827 argvec[count].offset.constant));
1828 NO_DEFER_POP;
1829 }
1830
1831#ifdef PUSH_ARGS_REVERSED
1832 argnum = nargs - 1;
1833#else
1834 argnum = 0;
1835#endif
1836
1837 /* Now load any reg parms into their regs. */
1838
1839 for (count = 0; count < nargs; count++, argnum += inc)
1840 {
1841 register enum machine_mode mode = argvec[argnum].mode;
1842 register rtx val = argvec[argnum].value;
1843 rtx reg = argvec[argnum].reg;
1844 int partial = argvec[argnum].partial;
1845
1846 if (reg != 0 && partial == 0)
1847 emit_move_insn (reg, val);
1848 NO_DEFER_POP;
1849 }
1850
1851 /* For version 1.37, try deleting this entirely. */
1852 if (! no_queue)
1853 emit_queue ();
1854
1855 /* Any regs containing parms remain in use through the call. */
1856 start_sequence ();
1857 for (count = 0; count < nargs; count++)
1858 if (argvec[count].reg != 0)
1859 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
1860
1861 use_insns = get_insns ();
1862 end_sequence ();
1863
1864 fun = prepare_call_address (fun, 0, &use_insns);
1865
1866 /* Don't allow popping to be deferred, since then
1867 cse'ing of library calls could delete a call and leave the pop. */
1868 NO_DEFER_POP;
1869
1870 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
1871 will set inhibit_defer_pop to that value. */
1872
1873 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
1874 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1875 outmode != VOIDmode ? hard_libcall_value (outmode) : 0,
1876 old_inhibit_defer_pop + 1, use_insns, no_queue);
1877
1878 /* Now restore inhibit_defer_pop to its actual original value. */
1879 OK_DEFER_POP;
1880}
1881\f
1882/* Expand an assignment that stores the value of FROM into TO.
1883 If WANT_VALUE is nonzero, return an rtx for the value of TO.
1884 (This may contain a QUEUED rtx.)
1885 Otherwise, the returned value is not meaningful.
1886
1887 SUGGEST_REG is no longer actually used.
1888 It used to mean, copy the value through a register
1889 and return that register, if that is possible.
1890 But now we do this if WANT_VALUE.
1891
1892 If the value stored is a constant, we return the constant. */
1893
1894rtx
1895expand_assignment (to, from, want_value, suggest_reg)
1896 tree to, from;
1897 int want_value;
1898 int suggest_reg;
1899{
1900 register rtx to_rtx = 0;
1901 rtx result;
1902
1903 /* Don't crash if the lhs of the assignment was erroneous. */
1904
1905 if (TREE_CODE (to) == ERROR_MARK)
1906 return expand_expr (from, 0, VOIDmode, 0);
1907
1908 /* Assignment of a structure component needs special treatment
1909 if the structure component's rtx is not simply a MEM.
1910 Assignment of an array element at a constant index
1911 has the same problem. */
1912
1913 if (TREE_CODE (to) == COMPONENT_REF
1914 || TREE_CODE (to) == BIT_FIELD_REF
1915 || (TREE_CODE (to) == ARRAY_REF
1916 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
1917 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
1918 {
1919 enum machine_mode mode1;
1920 int bitsize;
1921 int bitpos;
1922 int unsignedp;
1923 int volatilep = 0;
1924 tree tem = get_inner_reference (to, &bitsize, &bitpos,
1925 &mode1, &unsignedp, &volatilep);
1926
1927 /* If we are going to use store_bit_field and extract_bit_field,
1928 make sure to_rtx will be safe for multiple use. */
1929
1930 if (mode1 == VOIDmode && want_value)
1931 tem = stabilize_reference (tem);
1932
1933 to_rtx = expand_expr (tem, 0, VOIDmode, 0);
1934 if (volatilep)
1935 {
1936 if (GET_CODE (to_rtx) == MEM)
1937 MEM_VOLATILE_P (to_rtx) = 1;
1938#if 0 /* This was turned off because, when a field is volatile
1939 in an object which is not volatile, the object may be in a register,
1940 and then we would abort over here. */
1941 else
1942 abort ();
1943#endif
1944 }
1945
1946 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
1947 (want_value
1948 /* Spurious cast makes HPUX compiler happy. */
1949 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
1950 : VOIDmode),
1951 unsignedp,
1952 /* Required alignment of containing datum. */
1953 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
1954 int_size_in_bytes (TREE_TYPE (tem)));
1955 preserve_temp_slots (result);
1956 free_temp_slots ();
1957
1958 return result;
1959 }
1960
1961 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
1962 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
1963
1964 if (to_rtx == 0)
1965 to_rtx = expand_expr (to, 0, VOIDmode, 0);
1966
1967 /* In case we are returning the contents of an object which overlaps
1968 the place the value is being stored, use a safe function when copying
1969 a value through a pointer into a structure value return block. */
1970 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
1971 && current_function_returns_struct
1972 && !current_function_returns_pcc_struct)
1973 {
1974 rtx from_rtx = expand_expr (from, 0, VOIDmode, 0);
1975 rtx size = expr_size (from);
1976
1977#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f 1978 emit_library_call (memcpy_libfunc, 1,
bbf6f052
RK
1979 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
1980 XEXP (from_rtx, 0), Pmode,
1981 size, Pmode);
1982#else
e87b4f3f 1983 emit_library_call (bcopy_libfunc, 1,
bbf6f052
RK
1984 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
1985 XEXP (to_rtx, 0), Pmode,
1986 size, Pmode);
1987#endif
1988
1989 preserve_temp_slots (to_rtx);
1990 free_temp_slots ();
1991 return to_rtx;
1992 }
1993
1994 /* Compute FROM and store the value in the rtx we got. */
1995
1996 result = store_expr (from, to_rtx, want_value);
1997 preserve_temp_slots (result);
1998 free_temp_slots ();
1999 return result;
2000}
2001
2002/* Generate code for computing expression EXP,
2003 and storing the value into TARGET.
2004 Returns TARGET or an equivalent value.
2005 TARGET may contain a QUEUED rtx.
2006
2007 If SUGGEST_REG is nonzero, copy the value through a register
2008 and return that register, if that is possible.
2009
2010 If the value stored is a constant, we return the constant. */
2011
2012rtx
2013store_expr (exp, target, suggest_reg)
2014 register tree exp;
2015 register rtx target;
2016 int suggest_reg;
2017{
2018 register rtx temp;
2019 int dont_return_target = 0;
2020
2021 if (TREE_CODE (exp) == COMPOUND_EXPR)
2022 {
2023 /* Perform first part of compound expression, then assign from second
2024 part. */
2025 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2026 emit_queue ();
2027 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2028 }
2029 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2030 {
2031 /* For conditional expression, get safe form of the target. Then
2032 test the condition, doing the appropriate assignment on either
2033 side. This avoids the creation of unnecessary temporaries.
2034 For non-BLKmode, it is more efficient not to do this. */
2035
2036 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2037
2038 emit_queue ();
2039 target = protect_from_queue (target, 1);
2040
2041 NO_DEFER_POP;
2042 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2043 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2044 emit_queue ();
2045 emit_jump_insn (gen_jump (lab2));
2046 emit_barrier ();
2047 emit_label (lab1);
2048 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2049 emit_queue ();
2050 emit_label (lab2);
2051 OK_DEFER_POP;
2052 return target;
2053 }
2054 else if (suggest_reg && GET_CODE (target) == MEM
2055 && GET_MODE (target) != BLKmode)
2056 /* If target is in memory and caller wants value in a register instead,
2057 arrange that. Pass TARGET as target for expand_expr so that,
2058 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2059 We know expand_expr will not use the target in that case. */
2060 {
2061 temp = expand_expr (exp, cse_not_expected ? 0 : target,
2062 GET_MODE (target), 0);
2063 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2064 temp = copy_to_reg (temp);
2065 dont_return_target = 1;
2066 }
2067 else if (queued_subexp_p (target))
2068 /* If target contains a postincrement, it is not safe
2069 to use as the returned value. It would access the wrong
2070 place by the time the queued increment gets output.
2071 So copy the value through a temporary and use that temp
2072 as the result. */
2073 {
2074 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2075 {
2076 /* Expand EXP into a new pseudo. */
2077 temp = gen_reg_rtx (GET_MODE (target));
2078 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2079 }
2080 else
2081 temp = expand_expr (exp, 0, GET_MODE (target), 0);
2082 dont_return_target = 1;
2083 }
2084 else
2085 {
2086 temp = expand_expr (exp, target, GET_MODE (target), 0);
2087 /* DO return TARGET if it's a specified hardware register.
2088 expand_return relies on this. */
2089 if (!(target && GET_CODE (target) == REG
2090 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2091 && CONSTANT_P (temp))
2092 dont_return_target = 1;
2093 }
2094
2095 /* If value was not generated in the target, store it there.
2096 Convert the value to TARGET's type first if nec. */
2097
2098 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2099 {
2100 target = protect_from_queue (target, 1);
2101 if (GET_MODE (temp) != GET_MODE (target)
2102 && GET_MODE (temp) != VOIDmode)
2103 {
2104 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2105 if (dont_return_target)
2106 {
2107 /* In this case, we will return TEMP,
2108 so make sure it has the proper mode.
2109 But don't forget to store the value into TARGET. */
2110 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2111 emit_move_insn (target, temp);
2112 }
2113 else
2114 convert_move (target, temp, unsignedp);
2115 }
2116
2117 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2118 {
2119 /* Handle copying a string constant into an array.
2120 The string constant may be shorter than the array.
2121 So copy just the string's actual length, and clear the rest. */
2122 rtx size;
2123
e87b4f3f
RS
2124 /* Get the size of the data type of the string,
2125 which is actually the size of the target. */
2126 size = expr_size (exp);
2127 if (GET_CODE (size) == CONST_INT
2128 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2129 emit_block_move (target, temp, size,
2130 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2131 else
bbf6f052 2132 {
e87b4f3f
RS
2133 /* Compute the size of the data to copy from the string. */
2134 tree copy_size
2135 = fold (build (MIN_EXPR, sizetype,
2136 size_binop (CEIL_DIV_EXPR,
2137 TYPE_SIZE (TREE_TYPE (exp)),
2138 size_int (BITS_PER_UNIT)),
2139 convert (sizetype,
2140 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2141 rtx copy_size_rtx = expand_expr (copy_size, 0, VOIDmode, 0);
2142 rtx label = 0;
2143
2144 /* Copy that much. */
2145 emit_block_move (target, temp, copy_size_rtx,
2146 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2147
2148 /* Figure out how much is left in TARGET
2149 that we have to clear. */
2150 if (GET_CODE (copy_size_rtx) == CONST_INT)
2151 {
2152 temp = plus_constant (XEXP (target, 0),
2153 TREE_STRING_LENGTH (exp));
2154 size = plus_constant (size,
2155 - TREE_STRING_LENGTH (exp));
2156 }
2157 else
2158 {
2159 enum machine_mode size_mode = Pmode;
2160
2161 temp = force_reg (Pmode, XEXP (target, 0));
2162 temp = expand_binop (size_mode, add_optab, temp,
2163 copy_size_rtx, 0, 0, OPTAB_LIB_WIDEN);
2164
2165 size = expand_binop (size_mode, sub_optab, size,
2166 copy_size_rtx, 0, 0, OPTAB_LIB_WIDEN);
2167
2168 emit_cmp_insn (size, const0_rtx, LT, 0,
2169 GET_MODE (size), 0, 0);
2170 label = gen_label_rtx ();
2171 emit_jump_insn (gen_blt (label));
2172 }
2173
2174 if (size != const0_rtx)
2175 {
bbf6f052 2176#ifdef TARGET_MEM_FUNCTIONS
e87b4f3f
RS
2177 emit_library_call (memset_libfunc, 1, VOIDmode, 3,
2178 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2179#else
e87b4f3f
RS
2180 emit_library_call (bzero_libfunc, 1, VOIDmode, 2,
2181 temp, Pmode, size, Pmode);
bbf6f052 2182#endif
e87b4f3f
RS
2183 }
2184 if (label)
2185 emit_label (label);
bbf6f052
RK
2186 }
2187 }
2188 else if (GET_MODE (temp) == BLKmode)
2189 emit_block_move (target, temp, expr_size (exp),
2190 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2191 else
2192 emit_move_insn (target, temp);
2193 }
2194 if (dont_return_target)
2195 return temp;
2196 return target;
2197}
2198\f
2199/* Store the value of constructor EXP into the rtx TARGET.
2200 TARGET is either a REG or a MEM. */
2201
2202static void
2203store_constructor (exp, target)
2204 tree exp;
2205 rtx target;
2206{
4af3895e
JVA
2207 tree type = TREE_TYPE (exp);
2208
bbf6f052
RK
2209 /* We know our target cannot conflict, since safe_from_p has been called. */
2210#if 0
2211 /* Don't try copying piece by piece into a hard register
2212 since that is vulnerable to being clobbered by EXP.
2213 Instead, construct in a pseudo register and then copy it all. */
2214 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2215 {
2216 rtx temp = gen_reg_rtx (GET_MODE (target));
2217 store_constructor (exp, temp);
2218 emit_move_insn (target, temp);
2219 return;
2220 }
2221#endif
2222
4af3895e 2223 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2224 {
2225 register tree elt;
2226
4af3895e
JVA
2227 /* Inform later passes that the whole union value is dead. */
2228 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2229 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2230
2231 /* If we are building a static constructor into a register,
2232 set the initial value as zero so we can fold the value into
2233 a constant. */
2234 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2235 emit_move_insn (target, const0_rtx);
2236
bbf6f052
RK
2237 /* If the constructor has fewer fields than the structure,
2238 clear the whole structure first. */
2239 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2240 != list_length (TYPE_FIELDS (type)))
2241 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2242 else
2243 /* Inform later passes that the old value is dead. */
2244 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2245
2246 /* Store each element of the constructor into
2247 the corresponding field of TARGET. */
2248
2249 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2250 {
2251 register tree field = TREE_PURPOSE (elt);
2252 register enum machine_mode mode;
2253 int bitsize;
2254 int bitpos;
2255 int unsignedp;
2256
2257 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2258 unsignedp = TREE_UNSIGNED (field);
2259 mode = DECL_MODE (field);
2260 if (DECL_BIT_FIELD (field))
2261 mode = VOIDmode;
2262
2263 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2264 /* ??? This case remains to be written. */
2265 abort ();
2266
2267 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2268
2269 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2270 /* The alignment of TARGET is
2271 at least what its type requires. */
2272 VOIDmode, 0,
4af3895e
JVA
2273 TYPE_ALIGN (type) / BITS_PER_UNIT,
2274 int_size_in_bytes (type));
bbf6f052
RK
2275 }
2276 }
4af3895e 2277 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2278 {
2279 register tree elt;
2280 register int i;
4af3895e 2281 tree domain = TYPE_DOMAIN (type);
bbf6f052
RK
2282 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2283 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2284 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2285
2286 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2287 clear the whole structure first. Similarly if this this is
2288 static constructor of a non-BLKmode object. */
bbf6f052 2289
4af3895e
JVA
2290 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2291 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2292 clear_storage (target, maxelt - minelt + 1);
2293 else
2294 /* Inform later passes that the old value is dead. */
2295 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2296
2297 /* Store each element of the constructor into
2298 the corresponding element of TARGET, determined
2299 by counting the elements. */
2300 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2301 elt;
2302 elt = TREE_CHAIN (elt), i++)
2303 {
2304 register enum machine_mode mode;
2305 int bitsize;
2306 int bitpos;
2307 int unsignedp;
2308
2309 mode = TYPE_MODE (elttype);
2310 bitsize = GET_MODE_BITSIZE (mode);
2311 unsignedp = TREE_UNSIGNED (elttype);
2312
2313 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2314
2315 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2316 /* The alignment of TARGET is
2317 at least what its type requires. */
2318 VOIDmode, 0,
4af3895e
JVA
2319 TYPE_ALIGN (type) / BITS_PER_UNIT,
2320 int_size_in_bytes (type));
bbf6f052
RK
2321 }
2322 }
2323
2324 else
2325 abort ();
2326}
2327
2328/* Store the value of EXP (an expression tree)
2329 into a subfield of TARGET which has mode MODE and occupies
2330 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2331 If MODE is VOIDmode, it means that we are storing into a bit-field.
2332
2333 If VALUE_MODE is VOIDmode, return nothing in particular.
2334 UNSIGNEDP is not used in this case.
2335
2336 Otherwise, return an rtx for the value stored. This rtx
2337 has mode VALUE_MODE if that is convenient to do.
2338 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2339
2340 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2341 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2342
2343static rtx
2344store_field (target, bitsize, bitpos, mode, exp, value_mode,
2345 unsignedp, align, total_size)
2346 rtx target;
2347 int bitsize, bitpos;
2348 enum machine_mode mode;
2349 tree exp;
2350 enum machine_mode value_mode;
2351 int unsignedp;
2352 int align;
2353 int total_size;
2354{
2355 int width_mask = 0;
2356
2357 if (bitsize < HOST_BITS_PER_INT)
2358 width_mask = (1 << bitsize) - 1;
2359
2360 /* If we are storing into an unaligned field of an aligned union that is
2361 in a register, we may have the mode of TARGET being an integer mode but
2362 MODE == BLKmode. In that case, get an aligned object whose size and
2363 alignment are the same as TARGET and store TARGET into it (we can avoid
2364 the store if the field being stored is the entire width of TARGET). Then
2365 call ourselves recursively to store the field into a BLKmode version of
2366 that object. Finally, load from the object into TARGET. This is not
2367 very efficient in general, but should only be slightly more expensive
2368 than the otherwise-required unaligned accesses. Perhaps this can be
2369 cleaned up later. */
2370
2371 if (mode == BLKmode
2372 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2373 {
2374 rtx object = assign_stack_temp (GET_MODE (target),
2375 GET_MODE_SIZE (GET_MODE (target)), 0);
2376 rtx blk_object = copy_rtx (object);
2377
2378 PUT_MODE (blk_object, BLKmode);
2379
2380 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2381 emit_move_insn (object, target);
2382
2383 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2384 align, total_size);
2385
2386 emit_move_insn (target, object);
2387
2388 return target;
2389 }
2390
2391 /* If the structure is in a register or if the component
2392 is a bit field, we cannot use addressing to access it.
2393 Use bit-field techniques or SUBREG to store in it. */
2394
2395 if (mode == VOIDmode || GET_CODE (target) == REG
2396 || GET_CODE (target) == SUBREG)
2397 {
2398 rtx temp = expand_expr (exp, 0, VOIDmode, 0);
2399 /* Store the value in the bitfield. */
2400 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2401 if (value_mode != VOIDmode)
2402 {
2403 /* The caller wants an rtx for the value. */
2404 /* If possible, avoid refetching from the bitfield itself. */
2405 if (width_mask != 0
2406 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2407 return expand_and (temp,
2408 gen_rtx (CONST_INT, VOIDmode, width_mask), 0);
2409 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2410 0, value_mode, 0, align, total_size);
2411 }
2412 return const0_rtx;
2413 }
2414 else
2415 {
2416 rtx addr = XEXP (target, 0);
2417 rtx to_rtx;
2418
2419 /* If a value is wanted, it must be the lhs;
2420 so make the address stable for multiple use. */
2421
2422 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2423 && ! CONSTANT_ADDRESS_P (addr)
2424 /* A frame-pointer reference is already stable. */
2425 && ! (GET_CODE (addr) == PLUS
2426 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2427 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2428 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2429 addr = copy_to_reg (addr);
2430
2431 /* Now build a reference to just the desired component. */
2432
2433 to_rtx = change_address (target, mode,
2434 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2435 MEM_IN_STRUCT_P (to_rtx) = 1;
2436
2437 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2438 }
2439}
2440\f
2441/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2442 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2443 ARRAY_REFs at constant positions and find the ultimate containing object,
2444 which we return.
2445
2446 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2447 bit position, and *PUNSIGNEDP to the signedness of the field.
2448
2449 If any of the extraction expressions is volatile,
2450 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2451
2452 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2453 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2454 is redundant.
2455
2456 If the field describes a variable-sized object, *PMODE is set to
2457 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2458 this case, but the address of the object can be found. */
bbf6f052
RK
2459
2460tree
2461get_inner_reference (exp, pbitsize, pbitpos, pmode, punsignedp, pvolatilep)
2462 tree exp;
2463 int *pbitsize;
2464 int *pbitpos;
2465 enum machine_mode *pmode;
2466 int *punsignedp;
2467 int *pvolatilep;
2468{
2469 tree size_tree = 0;
2470 enum machine_mode mode = VOIDmode;
2471
2472 if (TREE_CODE (exp) == COMPONENT_REF)
2473 {
2474 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2475 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2476 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2477 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2478 }
2479 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2480 {
2481 size_tree = TREE_OPERAND (exp, 1);
2482 *punsignedp = TREE_UNSIGNED (exp);
2483 }
2484 else
2485 {
2486 mode = TYPE_MODE (TREE_TYPE (exp));
2487 *pbitsize = GET_MODE_BITSIZE (mode);
2488 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2489 }
2490
2491 if (size_tree)
2492 {
2493 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2494 mode = BLKmode, *pbitsize = -1;
2495 else
2496 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2497 }
2498
2499 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2500 and find the ultimate containing object. */
2501
2502 *pbitpos = 0;
2503
2504 while (1)
2505 {
2506 if (TREE_CODE (exp) == COMPONENT_REF)
2507 {
2508 tree field = TREE_OPERAND (exp, 1);
2509
2510 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2511 /* ??? This case remains to be written. */
2512 abort ();
2513
2514 *pbitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2515 if (TREE_THIS_VOLATILE (exp))
2516 *pvolatilep = 1;
2517 }
2518 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2519 {
2520 if (TREE_CODE (TREE_OPERAND (exp, 2)) != INTEGER_CST)
2521 /* ??? This case remains to be written. */
2522 abort ();
2523
2524 *pbitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
2525 if (TREE_THIS_VOLATILE (exp))
2526 *pvolatilep = 1;
2527 }
2528 else if (TREE_CODE (exp) == ARRAY_REF
2529 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2530 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2531 {
2532 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2533 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2534 if (TREE_THIS_VOLATILE (exp))
2535 *pvolatilep = 1;
2536 }
2537 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2538 && ! ((TREE_CODE (exp) == NOP_EXPR
2539 || TREE_CODE (exp) == CONVERT_EXPR)
2540 && (TYPE_MODE (TREE_TYPE (exp))
2541 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2542 break;
2543 exp = TREE_OPERAND (exp, 0);
2544 }
2545
2546 /* If this was a bit-field, see if there is a mode that allows direct
2547 access in case EXP is in memory. */
2548 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2549 {
2550 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2551 if (mode == BLKmode)
2552 mode = VOIDmode;
2553 }
2554
2555 *pmode = mode;
2556
2557 return exp;
2558}
2559\f
2560/* Given an rtx VALUE that may contain additions and multiplications,
2561 return an equivalent value that just refers to a register or memory.
2562 This is done by generating instructions to perform the arithmetic
2563 and returning a pseudo-register containing the value. */
2564
2565rtx
2566force_operand (value, target)
2567 rtx value, target;
2568{
2569 register optab binoptab = 0;
2570 /* Use a temporary to force order of execution of calls to
2571 `force_operand'. */
2572 rtx tmp;
2573 register rtx op2;
2574 /* Use subtarget as the target for operand 0 of a binary operation. */
2575 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2576
2577 if (GET_CODE (value) == PLUS)
2578 binoptab = add_optab;
2579 else if (GET_CODE (value) == MINUS)
2580 binoptab = sub_optab;
2581 else if (GET_CODE (value) == MULT)
2582 {
2583 op2 = XEXP (value, 1);
2584 if (!CONSTANT_P (op2)
2585 && !(GET_CODE (op2) == REG && op2 != subtarget))
2586 subtarget = 0;
2587 tmp = force_operand (XEXP (value, 0), subtarget);
2588 return expand_mult (GET_MODE (value), tmp,
2589 force_operand (op2, 0),
2590 target, 0);
2591 }
2592
2593 if (binoptab)
2594 {
2595 op2 = XEXP (value, 1);
2596 if (!CONSTANT_P (op2)
2597 && !(GET_CODE (op2) == REG && op2 != subtarget))
2598 subtarget = 0;
2599 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2600 {
2601 binoptab = add_optab;
2602 op2 = negate_rtx (GET_MODE (value), op2);
2603 }
2604
2605 /* Check for an addition with OP2 a constant integer and our first
2606 operand a PLUS of a virtual register and something else. In that
2607 case, we want to emit the sum of the virtual register and the
2608 constant first and then add the other value. This allows virtual
2609 register instantiation to simply modify the constant rather than
2610 creating another one around this addition. */
2611 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2612 && GET_CODE (XEXP (value, 0)) == PLUS
2613 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2614 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2615 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2616 {
2617 rtx temp = expand_binop (GET_MODE (value), binoptab,
2618 XEXP (XEXP (value, 0), 0), op2,
2619 subtarget, 0, OPTAB_LIB_WIDEN);
2620 return expand_binop (GET_MODE (value), binoptab, temp,
2621 force_operand (XEXP (XEXP (value, 0), 1), 0),
2622 target, 0, OPTAB_LIB_WIDEN);
2623 }
2624
2625 tmp = force_operand (XEXP (value, 0), subtarget);
2626 return expand_binop (GET_MODE (value), binoptab, tmp,
2627 force_operand (op2, 0),
2628 target, 0, OPTAB_LIB_WIDEN);
2629 /* We give UNSIGNEP = 0 to expand_binop
2630 because the only operations we are expanding here are signed ones. */
2631 }
2632 return value;
2633}
2634\f
2635/* Subroutine of expand_expr:
2636 save the non-copied parts (LIST) of an expr (LHS), and return a list
2637 which can restore these values to their previous values,
2638 should something modify their storage. */
2639
2640static tree
2641save_noncopied_parts (lhs, list)
2642 tree lhs;
2643 tree list;
2644{
2645 tree tail;
2646 tree parts = 0;
2647
2648 for (tail = list; tail; tail = TREE_CHAIN (tail))
2649 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2650 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2651 else
2652 {
2653 tree part = TREE_VALUE (tail);
2654 tree part_type = TREE_TYPE (part);
2655 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part, 0);
2656 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2657 int_size_in_bytes (part_type), 0);
2658 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2659 target = change_address (target, TYPE_MODE (part_type), 0);
2660 parts = tree_cons (to_be_saved,
2661 build (RTL_EXPR, part_type, 0, (tree) target),
2662 parts);
2663 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
2664 }
2665 return parts;
2666}
2667
2668/* Subroutine of expand_expr:
2669 record the non-copied parts (LIST) of an expr (LHS), and return a list
2670 which specifies the initial values of these parts. */
2671
2672static tree
2673init_noncopied_parts (lhs, list)
2674 tree lhs;
2675 tree list;
2676{
2677 tree tail;
2678 tree parts = 0;
2679
2680 for (tail = list; tail; tail = TREE_CHAIN (tail))
2681 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2682 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
2683 else
2684 {
2685 tree part = TREE_VALUE (tail);
2686 tree part_type = TREE_TYPE (part);
2687 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part, 0);
2688 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
2689 }
2690 return parts;
2691}
2692
2693/* Subroutine of expand_expr: return nonzero iff there is no way that
2694 EXP can reference X, which is being modified. */
2695
2696static int
2697safe_from_p (x, exp)
2698 rtx x;
2699 tree exp;
2700{
2701 rtx exp_rtl = 0;
2702 int i, nops;
2703
2704 if (x == 0)
2705 return 1;
2706
2707 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
2708 find the underlying pseudo. */
2709 if (GET_CODE (x) == SUBREG)
2710 {
2711 x = SUBREG_REG (x);
2712 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2713 return 0;
2714 }
2715
2716 /* If X is a location in the outgoing argument area, it is always safe. */
2717 if (GET_CODE (x) == MEM
2718 && (XEXP (x, 0) == virtual_outgoing_args_rtx
2719 || (GET_CODE (XEXP (x, 0)) == PLUS
2720 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
2721 return 1;
2722
2723 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2724 {
2725 case 'd':
2726 exp_rtl = DECL_RTL (exp);
2727 break;
2728
2729 case 'c':
2730 return 1;
2731
2732 case 'x':
2733 if (TREE_CODE (exp) == TREE_LIST)
2734 return (safe_from_p (x, TREE_VALUE (exp))
2735 && (TREE_CHAIN (exp) == 0
2736 || safe_from_p (x, TREE_CHAIN (exp))));
2737 else
2738 return 0;
2739
2740 case '1':
2741 return safe_from_p (x, TREE_OPERAND (exp, 0));
2742
2743 case '2':
2744 case '<':
2745 return (safe_from_p (x, TREE_OPERAND (exp, 0))
2746 && safe_from_p (x, TREE_OPERAND (exp, 1)));
2747
2748 case 'e':
2749 case 'r':
2750 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
2751 the expression. If it is set, we conflict iff we are that rtx or
2752 both are in memory. Otherwise, we check all operands of the
2753 expression recursively. */
2754
2755 switch (TREE_CODE (exp))
2756 {
2757 case ADDR_EXPR:
2758 return staticp (TREE_OPERAND (exp, 0));
2759
2760 case INDIRECT_REF:
2761 if (GET_CODE (x) == MEM)
2762 return 0;
2763 break;
2764
2765 case CALL_EXPR:
2766 exp_rtl = CALL_EXPR_RTL (exp);
2767 if (exp_rtl == 0)
2768 {
2769 /* Assume that the call will clobber all hard registers and
2770 all of memory. */
2771 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2772 || GET_CODE (x) == MEM)
2773 return 0;
2774 }
2775
2776 break;
2777
2778 case RTL_EXPR:
2779 exp_rtl = RTL_EXPR_RTL (exp);
2780 if (exp_rtl == 0)
2781 /* We don't know what this can modify. */
2782 return 0;
2783
2784 break;
2785
2786 case WITH_CLEANUP_EXPR:
2787 exp_rtl = RTL_EXPR_RTL (exp);
2788 break;
2789
2790 case SAVE_EXPR:
2791 exp_rtl = SAVE_EXPR_RTL (exp);
2792 break;
2793
8129842c
RS
2794 case BIND_EXPR:
2795 /* The only operand we look at is operand 1. The rest aren't
2796 part of the expression. */
2797 return safe_from_p (x, TREE_OPERAND (exp, 1));
2798
bbf6f052
RK
2799 case METHOD_CALL_EXPR:
2800 /* This takes a rtx argument, but shouldn't appear here. */
2801 abort ();
2802 }
2803
2804 /* If we have an rtx, we do not need to scan our operands. */
2805 if (exp_rtl)
2806 break;
2807
2808 nops = tree_code_length[(int) TREE_CODE (exp)];
2809 for (i = 0; i < nops; i++)
2810 if (TREE_OPERAND (exp, i) != 0
2811 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
2812 return 0;
2813 }
2814
2815 /* If we have an rtl, find any enclosed object. Then see if we conflict
2816 with it. */
2817 if (exp_rtl)
2818 {
2819 if (GET_CODE (exp_rtl) == SUBREG)
2820 {
2821 exp_rtl = SUBREG_REG (exp_rtl);
2822 if (GET_CODE (exp_rtl) == REG
2823 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
2824 return 0;
2825 }
2826
2827 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
2828 are memory and EXP is not readonly. */
2829 return ! (rtx_equal_p (x, exp_rtl)
2830 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
2831 && ! TREE_READONLY (exp)));
2832 }
2833
2834 /* If we reach here, it is safe. */
2835 return 1;
2836}
2837
2838/* Subroutine of expand_expr: return nonzero iff EXP is an
2839 expression whose type is statically determinable. */
2840
2841static int
2842fixed_type_p (exp)
2843 tree exp;
2844{
2845 if (TREE_CODE (exp) == PARM_DECL
2846 || TREE_CODE (exp) == VAR_DECL
2847 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
2848 || TREE_CODE (exp) == COMPONENT_REF
2849 || TREE_CODE (exp) == ARRAY_REF)
2850 return 1;
2851 return 0;
2852}
2853\f
2854/* expand_expr: generate code for computing expression EXP.
2855 An rtx for the computed value is returned. The value is never null.
2856 In the case of a void EXP, const0_rtx is returned.
2857
2858 The value may be stored in TARGET if TARGET is nonzero.
2859 TARGET is just a suggestion; callers must assume that
2860 the rtx returned may not be the same as TARGET.
2861
2862 If TARGET is CONST0_RTX, it means that the value will be ignored.
2863
2864 If TMODE is not VOIDmode, it suggests generating the
2865 result in mode TMODE. But this is done only when convenient.
2866 Otherwise, TMODE is ignored and the value generated in its natural mode.
2867 TMODE is just a suggestion; callers must assume that
2868 the rtx returned may not have mode TMODE.
2869
2870 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
2871 with a constant address even if that address is not normally legitimate.
2872 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
2873
2874 If MODIFIER is EXPAND_SUM then when EXP is an addition
2875 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
2876 or a nest of (PLUS ...) and (MINUS ...) where the terms are
2877 products as above, or REG or MEM, or constant.
2878 Ordinarily in such cases we would output mul or add instructions
2879 and then return a pseudo reg containing the sum.
2880
2881 EXPAND_INITIALIZER is much like EXPAND_SUM except that
2882 it also marks a label as absolutely required (it can't be dead).
2883 This is used for outputting expressions used in intializers. */
2884
2885rtx
2886expand_expr (exp, target, tmode, modifier)
2887 register tree exp;
2888 rtx target;
2889 enum machine_mode tmode;
2890 enum expand_modifier modifier;
2891{
2892 register rtx op0, op1, temp;
2893 tree type = TREE_TYPE (exp);
2894 int unsignedp = TREE_UNSIGNED (type);
2895 register enum machine_mode mode = TYPE_MODE (type);
2896 register enum tree_code code = TREE_CODE (exp);
2897 optab this_optab;
2898 /* Use subtarget as the target for operand 0 of a binary operation. */
2899 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2900 rtx original_target = target;
2901 int ignore = target == const0_rtx;
2902 tree context;
2903
2904 /* Don't use hard regs as subtargets, because the combiner
2905 can only handle pseudo regs. */
2906 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
2907 subtarget = 0;
2908 /* Avoid subtargets inside loops,
2909 since they hide some invariant expressions. */
2910 if (preserve_subexpressions_p ())
2911 subtarget = 0;
2912
2913 if (ignore) target = 0, original_target = 0;
2914
2915 /* If will do cse, generate all results into pseudo registers
2916 since 1) that allows cse to find more things
2917 and 2) otherwise cse could produce an insn the machine
2918 cannot support. */
2919
2920 if (! cse_not_expected && mode != BLKmode && target
2921 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
2922 target = subtarget;
2923
2924 /* Ensure we reference a volatile object even if value is ignored. */
2925 if (ignore && TREE_THIS_VOLATILE (exp)
2926 && mode != VOIDmode && mode != BLKmode)
2927 {
2928 target = gen_reg_rtx (mode);
2929 temp = expand_expr (exp, target, VOIDmode, modifier);
2930 if (temp != target)
2931 emit_move_insn (target, temp);
2932 return target;
2933 }
2934
2935 switch (code)
2936 {
2937 case LABEL_DECL:
b552441b
RS
2938 {
2939 tree function = decl_function_context (exp);
2940 /* Handle using a label in a containing function. */
2941 if (function != current_function_decl && function != 0)
2942 {
2943 struct function *p = find_function_data (function);
2944 /* Allocate in the memory associated with the function
2945 that the label is in. */
2946 push_obstacks (p->function_obstack,
2947 p->function_maybepermanent_obstack);
2948
2949 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
2950 label_rtx (exp), p->forced_labels);
2951 pop_obstacks ();
2952 }
2953 else if (modifier == EXPAND_INITIALIZER)
2954 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
2955 label_rtx (exp), forced_labels);
2956 return gen_rtx (MEM, FUNCTION_MODE,
2957 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
2958 }
bbf6f052
RK
2959
2960 case PARM_DECL:
2961 if (DECL_RTL (exp) == 0)
2962 {
2963 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 2964 return CONST0_RTX (mode);
bbf6f052
RK
2965 }
2966
2967 case FUNCTION_DECL:
2968 case VAR_DECL:
2969 case RESULT_DECL:
2970 if (DECL_RTL (exp) == 0)
2971 abort ();
2972 /* Ensure variable marked as used
2973 even if it doesn't go through a parser. */
2974 TREE_USED (exp) = 1;
2975 /* Handle variables inherited from containing functions. */
2976 context = decl_function_context (exp);
2977
2978 /* We treat inline_function_decl as an alias for the current function
2979 because that is the inline function whose vars, types, etc.
2980 are being merged into the current function.
2981 See expand_inline_function. */
2982 if (context != 0 && context != current_function_decl
2983 && context != inline_function_decl
2984 /* If var is static, we don't need a static chain to access it. */
2985 && ! (GET_CODE (DECL_RTL (exp)) == MEM
2986 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
2987 {
2988 rtx addr;
2989
2990 /* Mark as non-local and addressable. */
2991 TREE_NONLOCAL (exp) = 1;
2992 mark_addressable (exp);
2993 if (GET_CODE (DECL_RTL (exp)) != MEM)
2994 abort ();
2995 addr = XEXP (DECL_RTL (exp), 0);
2996 if (GET_CODE (addr) == MEM)
2997 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
2998 else
2999 addr = fix_lexical_addr (addr, exp);
3000 return change_address (DECL_RTL (exp), mode, addr);
3001 }
4af3895e 3002
bbf6f052
RK
3003 /* This is the case of an array whose size is to be determined
3004 from its initializer, while the initializer is still being parsed.
3005 See expand_decl. */
3006 if (GET_CODE (DECL_RTL (exp)) == MEM
3007 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3008 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3009 XEXP (DECL_RTL (exp), 0));
3010 if (GET_CODE (DECL_RTL (exp)) == MEM
3011 && modifier != EXPAND_CONST_ADDRESS
3012 && modifier != EXPAND_SUM
3013 && modifier != EXPAND_INITIALIZER)
3014 {
3015 /* DECL_RTL probably contains a constant address.
3016 On RISC machines where a constant address isn't valid,
3017 make some insns to get that address into a register. */
3018 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3019 || (flag_force_addr
3020 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3021 return change_address (DECL_RTL (exp), VOIDmode,
3022 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3023 }
3024 return DECL_RTL (exp);
3025
3026 case INTEGER_CST:
3027 return immed_double_const (TREE_INT_CST_LOW (exp),
3028 TREE_INT_CST_HIGH (exp),
3029 mode);
3030
3031 case CONST_DECL:
3032 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3033
3034 case REAL_CST:
3035 /* If optimized, generate immediate CONST_DOUBLE
3036 which will be turned into memory by reload if necessary.
3037
3038 We used to force a register so that loop.c could see it. But
3039 this does not allow gen_* patterns to perform optimizations with
3040 the constants. It also produces two insns in cases like "x = 1.0;".
3041 On most machines, floating-point constants are not permitted in
3042 many insns, so we'd end up copying it to a register in any case.
3043
3044 Now, we do the copying in expand_binop, if appropriate. */
3045 return immed_real_const (exp);
3046
3047 case COMPLEX_CST:
3048 case STRING_CST:
3049 if (! TREE_CST_RTL (exp))
3050 output_constant_def (exp);
3051
3052 /* TREE_CST_RTL probably contains a constant address.
3053 On RISC machines where a constant address isn't valid,
3054 make some insns to get that address into a register. */
3055 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3056 && modifier != EXPAND_CONST_ADDRESS
3057 && modifier != EXPAND_INITIALIZER
3058 && modifier != EXPAND_SUM
3059 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3060 return change_address (TREE_CST_RTL (exp), VOIDmode,
3061 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3062 return TREE_CST_RTL (exp);
3063
3064 case SAVE_EXPR:
3065 context = decl_function_context (exp);
3066 /* We treat inline_function_decl as an alias for the current function
3067 because that is the inline function whose vars, types, etc.
3068 are being merged into the current function.
3069 See expand_inline_function. */
3070 if (context == current_function_decl || context == inline_function_decl)
3071 context = 0;
3072
3073 /* If this is non-local, handle it. */
3074 if (context)
3075 {
3076 temp = SAVE_EXPR_RTL (exp);
3077 if (temp && GET_CODE (temp) == REG)
3078 {
3079 put_var_into_stack (exp);
3080 temp = SAVE_EXPR_RTL (exp);
3081 }
3082 if (temp == 0 || GET_CODE (temp) != MEM)
3083 abort ();
3084 return change_address (temp, mode,
3085 fix_lexical_addr (XEXP (temp, 0), exp));
3086 }
3087 if (SAVE_EXPR_RTL (exp) == 0)
3088 {
3089 if (mode == BLKmode)
3090 temp
3091 = assign_stack_temp (mode,
3092 int_size_in_bytes (TREE_TYPE (exp)), 0);
3093 else
3094 temp = gen_reg_rtx (mode);
3095 SAVE_EXPR_RTL (exp) = temp;
3096 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3097 if (!optimize && GET_CODE (temp) == REG)
3098 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3099 save_expr_regs);
3100 }
3101 return SAVE_EXPR_RTL (exp);
3102
3103 case EXIT_EXPR:
3104 /* Exit the current loop if the body-expression is true. */
3105 {
3106 rtx label = gen_label_rtx ();
3107 do_jump (TREE_OPERAND (exp, 0), label, 0);
3108 expand_exit_loop (0);
3109 emit_label (label);
3110 }
3111 return const0_rtx;
3112
3113 case LOOP_EXPR:
3114 expand_start_loop (1);
3115 expand_expr_stmt (TREE_OPERAND (exp, 0));
3116 expand_end_loop ();
3117
3118 return const0_rtx;
3119
3120 case BIND_EXPR:
3121 {
3122 tree vars = TREE_OPERAND (exp, 0);
3123 int vars_need_expansion = 0;
3124
3125 /* Need to open a binding contour here because
3126 if there are any cleanups they most be contained here. */
3127 expand_start_bindings (0);
3128
3129 /* Mark the corresponding BLOCK for output. */
3130 if (TREE_OPERAND (exp, 2) != 0)
3131 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
3132
3133 /* If VARS have not yet been expanded, expand them now. */
3134 while (vars)
3135 {
3136 if (DECL_RTL (vars) == 0)
3137 {
3138 vars_need_expansion = 1;
3139 expand_decl (vars);
3140 }
3141 expand_decl_init (vars);
3142 vars = TREE_CHAIN (vars);
3143 }
3144
3145 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3146
3147 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3148
3149 return temp;
3150 }
3151
3152 case RTL_EXPR:
3153 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3154 abort ();
3155 emit_insns (RTL_EXPR_SEQUENCE (exp));
3156 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3157 return RTL_EXPR_RTL (exp);
3158
3159 case CONSTRUCTOR:
4af3895e
JVA
3160 /* All elts simple constants => refer to a constant in memory. But
3161 if this is a non-BLKmode mode, let it store a field at a time
3162 since that should make a CONST_INT or CONST_DOUBLE when we
3163 fold. */
3164 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3165 {
3166 rtx constructor = output_constant_def (exp);
b552441b
RS
3167 if (modifier != EXPAND_CONST_ADDRESS
3168 && modifier != EXPAND_INITIALIZER
3169 && modifier != EXPAND_SUM
3170 && !memory_address_p (GET_MODE (constructor),
3171 XEXP (constructor, 0)))
bbf6f052
RK
3172 constructor = change_address (constructor, VOIDmode,
3173 XEXP (constructor, 0));
3174 return constructor;
3175 }
3176
3177 if (ignore)
3178 {
3179 tree elt;
3180 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3181 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3182 return const0_rtx;
3183 }
3184 else
3185 {
3186 if (target == 0 || ! safe_from_p (target, exp))
3187 {
3188 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3189 target = gen_reg_rtx (mode);
3190 else
3191 {
3192 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3193 if (target)
3194 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3195 target = safe_target;
3196 }
3197 }
3198 store_constructor (exp, target);
3199 return target;
3200 }
3201
3202 case INDIRECT_REF:
3203 {
3204 tree exp1 = TREE_OPERAND (exp, 0);
3205 tree exp2;
3206
3207 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3208 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3209 This code has the same general effect as simply doing
3210 expand_expr on the save expr, except that the expression PTR
3211 is computed for use as a memory address. This means different
3212 code, suitable for indexing, may be generated. */
3213 if (TREE_CODE (exp1) == SAVE_EXPR
3214 && SAVE_EXPR_RTL (exp1) == 0
3215 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3216 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3217 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3218 {
3219 temp = expand_expr (TREE_OPERAND (exp1, 0), 0, VOIDmode, EXPAND_SUM);
3220 op0 = memory_address (mode, temp);
3221 op0 = copy_all_regs (op0);
3222 SAVE_EXPR_RTL (exp1) = op0;
3223 }
3224 else
3225 {
3226 op0 = expand_expr (exp1, 0, VOIDmode, EXPAND_SUM);
3227 op0 = memory_address (mode, op0);
3228 }
8c8a8e34
JW
3229
3230 temp = gen_rtx (MEM, mode, op0);
3231 /* If address was computed by addition,
3232 mark this as an element of an aggregate. */
3233 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3234 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3235 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3236 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3237 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3238 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3239 || (TREE_CODE (exp1) == ADDR_EXPR
3240 && (exp2 = TREE_OPERAND (exp1, 0))
3241 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3242 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3243 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3244 MEM_IN_STRUCT_P (temp) = 1;
3245 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
bbf6f052
RK
3246#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3247 a location is accessed through a pointer to const does not mean
3248 that the value there can never change. */
8c8a8e34 3249 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3250#endif
8c8a8e34
JW
3251 return temp;
3252 }
bbf6f052
RK
3253
3254 case ARRAY_REF:
3255 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3256 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3257 {
3258 /* Nonconstant array index or nonconstant element size.
3259 Generate the tree for *(&array+index) and expand that,
3260 except do it in a language-independent way
3261 and don't complain about non-lvalue arrays.
3262 `mark_addressable' should already have been called
3263 for any array for which this case will be reached. */
3264
3265 /* Don't forget the const or volatile flag from the array element. */
3266 tree variant_type = build_type_variant (type,
3267 TREE_READONLY (exp),
3268 TREE_THIS_VOLATILE (exp));
3269 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3270 TREE_OPERAND (exp, 0));
3271 tree index = TREE_OPERAND (exp, 1);
3272 tree elt;
3273
3274 /* Convert the integer argument to a type the same size as a pointer
3275 so the multiply won't overflow spuriously. */
3276 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3277 index = convert (type_for_size (POINTER_SIZE, 0), index);
3278
3279 /* Don't think the address has side effects
3280 just because the array does.
3281 (In some cases the address might have side effects,
3282 and we fail to record that fact here. However, it should not
3283 matter, since expand_expr should not care.) */
3284 TREE_SIDE_EFFECTS (array_adr) = 0;
3285
3286 elt = build1 (INDIRECT_REF, type,
3287 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3288 array_adr,
3289 fold (build (MULT_EXPR,
3290 TYPE_POINTER_TO (variant_type),
3291 index, size_in_bytes (type))))));
3292
3293 /* Volatility, etc., of new expression is same as old expression. */
3294 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3295 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3296 TREE_READONLY (elt) = TREE_READONLY (exp);
3297
3298 return expand_expr (elt, target, tmode, modifier);
3299 }
3300
3301 /* Fold an expression like: "foo"[2].
3302 This is not done in fold so it won't happen inside &. */
3303 {
3304 int i;
3305 tree arg0 = TREE_OPERAND (exp, 0);
3306 tree arg1 = TREE_OPERAND (exp, 1);
3307
3308 if (TREE_CODE (arg0) == STRING_CST
3309 && TREE_CODE (arg1) == INTEGER_CST
3310 && !TREE_INT_CST_HIGH (arg1)
3311 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3312 {
3313 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3314 {
3315 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3316 TREE_TYPE (exp) = integer_type_node;
3317 return expand_expr (exp, target, tmode, modifier);
3318 }
3319 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3320 {
3321 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3322 TREE_TYPE (exp) = integer_type_node;
3323 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3324 }
3325 }
3326 }
3327
3328 /* If this is a constant index into a constant array,
4af3895e
JVA
3329 just get the value from the array. Handle both the cases when
3330 we have an explicit constructor and when our operand is a variable
3331 that was declared const. */
3332
3333 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3334 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3335 {
3336 tree index = fold (TREE_OPERAND (exp, 1));
3337 if (TREE_CODE (index) == INTEGER_CST
3338 && TREE_INT_CST_HIGH (index) == 0)
3339 {
3340 int i = TREE_INT_CST_LOW (index);
3341 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3342
3343 while (elem && i--)
3344 elem = TREE_CHAIN (elem);
3345 if (elem)
3346 return expand_expr (fold (TREE_VALUE (elem)), target,
3347 tmode, modifier);
3348 }
3349 }
3350
3351 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3352 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3353 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3354 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3355 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3356 && optimize >= 1
3357 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3358 != ERROR_MARK))
bbf6f052
RK
3359 {
3360 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3361 if (TREE_CODE (index) == INTEGER_CST
3362 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3363 {
3364 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3365 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3366
8c8a8e34
JW
3367 if (TREE_CODE (init) == CONSTRUCTOR)
3368 {
3369 tree elem = CONSTRUCTOR_ELTS (init);
3370
3371 while (elem && i--)
3372 elem = TREE_CHAIN (elem);
3373 if (elem)
3374 return expand_expr (fold (TREE_VALUE (elem)), target,
3375 tmode, modifier);
3376 }
3377 else if (TREE_CODE (init) == STRING_CST
3378 && i < TREE_STRING_LENGTH (init))
3379 {
3380 temp = gen_rtx (CONST_INT, VOIDmode,
3381 TREE_STRING_POINTER (init)[i]);
3382 return convert_to_mode (mode, temp, 0);
3383 }
bbf6f052
RK
3384 }
3385 }
3386 /* Treat array-ref with constant index as a component-ref. */
3387
3388 case COMPONENT_REF:
3389 case BIT_FIELD_REF:
4af3895e
JVA
3390 /* If the operand is a CONSTRUCTOR, we can just extract the
3391 appropriate field if it is present. */
3392 if (code != ARRAY_REF
3393 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3394 {
3395 tree elt;
3396
3397 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3398 elt = TREE_CHAIN (elt))
3399 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3400 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3401 }
3402
bbf6f052
RK
3403 {
3404 enum machine_mode mode1;
3405 int bitsize;
3406 int bitpos;
3407 int volatilep = 0;
3408 tree tem = get_inner_reference (exp, &bitsize, &bitpos,
3409 &mode1, &unsignedp, &volatilep);
3410
3411 /* In some cases, we will be offsetting OP0's address by a constant.
3412 So get it as a sum, if possible. If we will be using it
3413 directly in an insn, we validate it. */
3414 op0 = expand_expr (tem, 0, VOIDmode, EXPAND_SUM);
3415
8c8a8e34
JW
3416 /* If this is a constant, put it into a register if it is a
3417 legimate constant and memory if it isn't. */
3418 if (CONSTANT_P (op0))
3419 {
3420 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3421 if (LEGITIMATE_CONSTANT_P (op0))
3422 op0 = force_reg (mode, op0);
3423 else
3424 op0 = validize_mem (force_const_mem (mode, op0));
3425 }
3426
bbf6f052
RK
3427 /* Don't forget about volatility even if this is a bitfield. */
3428 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3429 {
3430 op0 = copy_rtx (op0);
3431 MEM_VOLATILE_P (op0) = 1;
3432 }
3433
3434 if (mode1 == VOIDmode
3435 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3436 {
3437 /* In cases where an aligned union has an unaligned object
3438 as a field, we might be extracting a BLKmode value from
3439 an integer-mode (e.g., SImode) object. Handle this case
3440 by doing the extract into an object as wide as the field
3441 (which we know to be the width of a basic mode), then
3442 storing into memory, and changing the mode to BLKmode. */
3443 enum machine_mode ext_mode = mode;
3444
3445 if (ext_mode == BLKmode)
3446 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3447
3448 if (ext_mode == BLKmode)
3449 abort ();
3450
3451 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3452 unsignedp, target, ext_mode, ext_mode,
3453 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3454 int_size_in_bytes (TREE_TYPE (tem)));
3455 if (mode == BLKmode)
3456 {
3457 rtx new = assign_stack_temp (ext_mode,
3458 bitsize / BITS_PER_UNIT, 0);
3459
3460 emit_move_insn (new, op0);
3461 op0 = copy_rtx (new);
3462 PUT_MODE (op0, BLKmode);
3463 }
3464
3465 return op0;
3466 }
3467
3468 /* Get a reference to just this component. */
3469 if (modifier == EXPAND_CONST_ADDRESS
3470 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3471 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3472 (bitpos / BITS_PER_UNIT)));
3473 else
3474 op0 = change_address (op0, mode1,
3475 plus_constant (XEXP (op0, 0),
3476 (bitpos / BITS_PER_UNIT)));
3477 MEM_IN_STRUCT_P (op0) = 1;
3478 MEM_VOLATILE_P (op0) |= volatilep;
3479 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3480 return op0;
3481 if (target == 0)
3482 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3483 convert_move (target, op0, unsignedp);
3484 return target;
3485 }
3486
3487 case OFFSET_REF:
3488 {
3489 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3490 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3491 op0 = expand_expr (addr, 0, VOIDmode, EXPAND_SUM);
3492 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3493 MEM_IN_STRUCT_P (temp) = 1;
3494 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3495#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3496 a location is accessed through a pointer to const does not mean
3497 that the value there can never change. */
3498 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3499#endif
3500 return temp;
3501 }
3502
3503 /* Intended for a reference to a buffer of a file-object in Pascal.
3504 But it's not certain that a special tree code will really be
3505 necessary for these. INDIRECT_REF might work for them. */
3506 case BUFFER_REF:
3507 abort ();
3508
3509 case WITH_CLEANUP_EXPR:
3510 if (RTL_EXPR_RTL (exp) == 0)
3511 {
3512 RTL_EXPR_RTL (exp)
3513 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
3514 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2), cleanups_this_call);
3515 /* That's it for this cleanup. */
3516 TREE_OPERAND (exp, 2) = 0;
3517 }
3518 return RTL_EXPR_RTL (exp);
3519
3520 case CALL_EXPR:
3521 /* Check for a built-in function. */
3522 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
3523 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
3524 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3525 return expand_builtin (exp, target, subtarget, tmode, ignore);
3526 /* If this call was expanded already by preexpand_calls,
3527 just return the result we got. */
3528 if (CALL_EXPR_RTL (exp) != 0)
3529 return CALL_EXPR_RTL (exp);
8129842c 3530 return expand_call (exp, target, ignore);
bbf6f052
RK
3531
3532 case NON_LVALUE_EXPR:
3533 case NOP_EXPR:
3534 case CONVERT_EXPR:
3535 case REFERENCE_EXPR:
3536 if (TREE_CODE (type) == VOID_TYPE || ignore)
3537 {
3538 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3539 return const0_rtx;
3540 }
3541 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
3542 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
3543 if (TREE_CODE (type) == UNION_TYPE)
3544 {
3545 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
3546 if (target == 0)
3547 {
3548 if (mode == BLKmode)
3549 {
3550 if (TYPE_SIZE (type) == 0
3551 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3552 abort ();
3553 target = assign_stack_temp (BLKmode,
3554 (TREE_INT_CST_LOW (TYPE_SIZE (type))
3555 + BITS_PER_UNIT - 1)
3556 / BITS_PER_UNIT, 0);
3557 }
3558 else
3559 target = gen_reg_rtx (mode);
3560 }
3561 if (GET_CODE (target) == MEM)
3562 /* Store data into beginning of memory target. */
3563 store_expr (TREE_OPERAND (exp, 0),
3564 change_address (target, TYPE_MODE (valtype), 0), 0);
3565 else if (GET_CODE (target) == REG)
3566 /* Store this field into a union of the proper type. */
3567 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
3568 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
3569 VOIDmode, 0, 1,
3570 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
3571 else
3572 abort ();
3573
3574 /* Return the entire union. */
3575 return target;
3576 }
3577 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, mode, 0);
3578 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
3579 return op0;
3580 if (flag_force_mem && GET_CODE (op0) == MEM)
3581 op0 = copy_to_reg (op0);
3582
3583 if (target == 0)
3584 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3585 else
3586 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3587 return target;
3588
3589 case PLUS_EXPR:
3590 /* We come here from MINUS_EXPR when the second operand is a constant. */
3591 plus_expr:
3592 this_optab = add_optab;
3593
3594 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
3595 something else, make sure we add the register to the constant and
3596 then to the other thing. This case can occur during strength
3597 reduction and doing it this way will produce better code if the
3598 frame pointer or argument pointer is eliminated.
3599
3600 fold-const.c will ensure that the constant is always in the inner
3601 PLUS_EXPR, so the only case we need to do anything about is if
3602 sp, ap, or fp is our second argument, in which case we must swap
3603 the innermost first argument and our second argument. */
3604
3605 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3606 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
3607 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
3608 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
3609 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
3610 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
3611 {
3612 tree t = TREE_OPERAND (exp, 1);
3613
3614 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3615 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
3616 }
3617
3618 /* If the result is to be Pmode and we are adding an integer to
3619 something, we might be forming a constant. So try to use
3620 plus_constant. If it produces a sum and we can't accept it,
3621 use force_operand. This allows P = &ARR[const] to generate
3622 efficient code on machines where a SYMBOL_REF is not a valid
3623 address.
3624
3625 If this is an EXPAND_SUM call, always return the sum. */
3626 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
3627 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3628 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3629 || mode == Pmode))
3630 {
3631 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
3632 EXPAND_SUM);
3633 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
3634 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3635 op1 = force_operand (op1, target);
3636 return op1;
3637 }
3638
3639 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3640 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
3641 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
3642 || mode == Pmode))
3643 {
3644 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
3645 EXPAND_SUM);
3646 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
3647 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3648 op0 = force_operand (op0, target);
3649 return op0;
3650 }
3651
3652 /* No sense saving up arithmetic to be done
3653 if it's all in the wrong mode to form part of an address.
3654 And force_operand won't know whether to sign-extend or
3655 zero-extend. */
3656 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3657 || mode != Pmode) goto binop;
3658
3659 preexpand_calls (exp);
3660 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3661 subtarget = 0;
3662
3663 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
3664 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3665
3666 /* Make sure any term that's a sum with a constant comes last. */
3667 if (GET_CODE (op0) == PLUS
3668 && CONSTANT_P (XEXP (op0, 1)))
3669 {
3670 temp = op0;
3671 op0 = op1;
3672 op1 = temp;
3673 }
3674 /* If adding to a sum including a constant,
3675 associate it to put the constant outside. */
3676 if (GET_CODE (op1) == PLUS
3677 && CONSTANT_P (XEXP (op1, 1)))
3678 {
3679 rtx constant_term = const0_rtx;
3680
3681 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
3682 if (temp != 0)
3683 op0 = temp;
3684 else
3685 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
3686
3687 /* Let's also eliminate constants from op0 if possible. */
3688 op0 = eliminate_constant_term (op0, &constant_term);
3689
3690 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3691 their sum should be a constant. Form it into OP1, since the
3692 result we want will then be OP0 + OP1. */
3693
3694 temp = simplify_binary_operation (PLUS, mode, constant_term,
3695 XEXP (op1, 1));
3696 if (temp != 0)
3697 op1 = temp;
3698 else
3699 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
3700 }
3701
3702 /* Put a constant term last and put a multiplication first. */
3703 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
3704 temp = op1, op1 = op0, op0 = temp;
3705
3706 temp = simplify_binary_operation (PLUS, mode, op0, op1);
3707 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
3708
3709 case MINUS_EXPR:
3710 /* Handle difference of two symbolic constants,
3711 for the sake of an initializer. */
3712 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3713 && really_constant_p (TREE_OPERAND (exp, 0))
3714 && really_constant_p (TREE_OPERAND (exp, 1)))
3715 {
3716 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, modifier);
3717 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, modifier);
3718 return gen_rtx (MINUS, mode, op0, op1);
3719 }
3720 /* Convert A - const to A + (-const). */
3721 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3722 {
3723 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
3724 fold (build1 (NEGATE_EXPR, type,
3725 TREE_OPERAND (exp, 1))));
3726 goto plus_expr;
3727 }
3728 this_optab = sub_optab;
3729 goto binop;
3730
3731 case MULT_EXPR:
3732 preexpand_calls (exp);
3733 /* If first operand is constant, swap them.
3734 Thus the following special case checks need only
3735 check the second operand. */
3736 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
3737 {
3738 register tree t1 = TREE_OPERAND (exp, 0);
3739 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
3740 TREE_OPERAND (exp, 1) = t1;
3741 }
3742
3743 /* Attempt to return something suitable for generating an
3744 indexed address, for machines that support that. */
3745
3746 if (modifier == EXPAND_SUM && mode == Pmode
3747 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3748 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT)
3749 {
3750 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
3751
3752 /* Apply distributive law if OP0 is x+c. */
3753 if (GET_CODE (op0) == PLUS
3754 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
3755 return gen_rtx (PLUS, mode,
3756 gen_rtx (MULT, mode, XEXP (op0, 0),
3757 gen_rtx (CONST_INT, VOIDmode,
3758 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
3759 gen_rtx (CONST_INT, VOIDmode,
3760 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3761 * INTVAL (XEXP (op0, 1)))));
3762
3763 if (GET_CODE (op0) != REG)
3764 op0 = force_operand (op0, 0);
3765 if (GET_CODE (op0) != REG)
3766 op0 = copy_to_mode_reg (mode, op0);
3767
3768 return gen_rtx (MULT, mode, op0,
3769 gen_rtx (CONST_INT, VOIDmode,
3770 TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
3771 }
3772
3773 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3774 subtarget = 0;
3775
3776 /* Check for multiplying things that have been extended
3777 from a narrower type. If this machine supports multiplying
3778 in that narrower type with a result in the desired type,
3779 do it that way, and avoid the explicit type-conversion. */
3780 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
3781 && TREE_CODE (type) == INTEGER_TYPE
3782 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3783 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
3784 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3785 && int_fits_type_p (TREE_OPERAND (exp, 1),
3786 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3787 /* Don't use a widening multiply if a shift will do. */
3788 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
3789 > HOST_BITS_PER_INT)
3790 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
3791 ||
3792 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
3793 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
3794 ==
3795 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
3796 /* If both operands are extended, they must either both
3797 be zero-extended or both be sign-extended. */
3798 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
3799 ==
3800 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
3801 {
3802 enum machine_mode innermode
3803 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
3804 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
3805 ? umul_widen_optab : smul_widen_optab);
3806 if (mode == GET_MODE_WIDER_MODE (innermode)
3807 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3808 {
3809 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
3810 0, VOIDmode, 0);
3811 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
3812 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3813 else
3814 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
3815 0, VOIDmode, 0);
3816 goto binop2;
3817 }
3818 }
3819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3820 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3821 return expand_mult (mode, op0, op1, target, unsignedp);
3822
3823 case TRUNC_DIV_EXPR:
3824 case FLOOR_DIV_EXPR:
3825 case CEIL_DIV_EXPR:
3826 case ROUND_DIV_EXPR:
3827 case EXACT_DIV_EXPR:
3828 preexpand_calls (exp);
3829 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3830 subtarget = 0;
3831 /* Possible optimization: compute the dividend with EXPAND_SUM
3832 then if the divisor is constant can optimize the case
3833 where some terms of the dividend have coeffs divisible by it. */
3834 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3835 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3836 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
3837
3838 case RDIV_EXPR:
3839 this_optab = flodiv_optab;
3840 goto binop;
3841
3842 case TRUNC_MOD_EXPR:
3843 case FLOOR_MOD_EXPR:
3844 case CEIL_MOD_EXPR:
3845 case ROUND_MOD_EXPR:
3846 preexpand_calls (exp);
3847 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
3848 subtarget = 0;
3849 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3850 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3851 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
3852
3853 case FIX_ROUND_EXPR:
3854 case FIX_FLOOR_EXPR:
3855 case FIX_CEIL_EXPR:
3856 abort (); /* Not used for C. */
3857
3858 case FIX_TRUNC_EXPR:
3859 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
3860 if (target == 0)
3861 target = gen_reg_rtx (mode);
3862 expand_fix (target, op0, unsignedp);
3863 return target;
3864
3865 case FLOAT_EXPR:
3866 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
3867 if (target == 0)
3868 target = gen_reg_rtx (mode);
3869 /* expand_float can't figure out what to do if FROM has VOIDmode.
3870 So give it the correct mode. With -O, cse will optimize this. */
3871 if (GET_MODE (op0) == VOIDmode)
3872 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
3873 op0);
3874 expand_float (target, op0,
3875 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
3876 return target;
3877
3878 case NEGATE_EXPR:
3879 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3880 temp = expand_unop (mode, neg_optab, op0, target, 0);
3881 if (temp == 0)
3882 abort ();
3883 return temp;
3884
3885 case ABS_EXPR:
3886 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3887
3888 /* Unsigned abs is simply the operand. Testing here means we don't
3889 risk generating incorrect code below. */
3890 if (TREE_UNSIGNED (type))
3891 return op0;
3892
3893 /* First try to do it with a special abs instruction. */
3894 temp = expand_unop (mode, abs_optab, op0, target, 0);
3895 if (temp != 0)
3896 return temp;
3897
3898 /* If this machine has expensive jumps, we can do integer absolute
3899 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
3900 where W is the width of MODE. */
3901
3902 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
3903 {
3904 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
3905 size_int (GET_MODE_BITSIZE (mode) - 1),
3906 0, 0);
3907
3908 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
3909 OPTAB_LIB_WIDEN);
3910 if (temp != 0)
3911 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
3912 OPTAB_LIB_WIDEN);
3913
3914 if (temp != 0)
3915 return temp;
3916 }
3917
3918 /* If that does not win, use conditional jump and negate. */
3919 target = original_target;
3920 temp = gen_label_rtx ();
3921 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
3922 || (GET_CODE (target) == REG
3923 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3924 target = gen_reg_rtx (mode);
3925 emit_move_insn (target, op0);
3926 emit_cmp_insn (target,
3927 expand_expr (convert (type, integer_zero_node),
3928 0, VOIDmode, 0),
3929 GE, 0, mode, 0, 0);
3930 NO_DEFER_POP;
3931 emit_jump_insn (gen_bge (temp));
3932 op0 = expand_unop (mode, neg_optab, target, target, 0);
3933 if (op0 != target)
3934 emit_move_insn (target, op0);
3935 emit_label (temp);
3936 OK_DEFER_POP;
3937 return target;
3938
3939 case MAX_EXPR:
3940 case MIN_EXPR:
3941 target = original_target;
3942 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
3943 || (GET_CODE (target) == REG
3944 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3945 target = gen_reg_rtx (mode);
3946 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
3947 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
3948
3949 /* First try to do it with a special MIN or MAX instruction.
3950 If that does not win, use a conditional jump to select the proper
3951 value. */
3952 this_optab = (TREE_UNSIGNED (type)
3953 ? (code == MIN_EXPR ? umin_optab : umax_optab)
3954 : (code == MIN_EXPR ? smin_optab : smax_optab));
3955
3956 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
3957 OPTAB_WIDEN);
3958 if (temp != 0)
3959 return temp;
3960
3961 if (target != op0)
3962 emit_move_insn (target, op0);
3963 op0 = gen_label_rtx ();
3964 if (code == MAX_EXPR)
3965 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
3966 ? compare_from_rtx (target, op1, GEU, 1, mode, 0, 0)
3967 : compare_from_rtx (target, op1, GE, 0, mode, 0, 0));
3968 else
3969 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
3970 ? compare_from_rtx (target, op1, LEU, 1, mode, 0, 0)
3971 : compare_from_rtx (target, op1, LE, 0, mode, 0, 0));
3972 if (temp == const0_rtx)
3973 emit_move_insn (target, op1);
3974 else if (temp != const_true_rtx)
3975 {
3976 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
3977 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
3978 else
3979 abort ();
3980 emit_move_insn (target, op1);
3981 }
3982 emit_label (op0);
3983 return target;
3984
3985/* ??? Can optimize when the operand of this is a bitwise operation,
3986 by using a different bitwise operation. */
3987 case BIT_NOT_EXPR:
3988 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3989 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
3990 if (temp == 0)
3991 abort ();
3992 return temp;
3993
3994 case FFS_EXPR:
3995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
3996 temp = expand_unop (mode, ffs_optab, op0, target, 1);
3997 if (temp == 0)
3998 abort ();
3999 return temp;
4000
4001/* ??? Can optimize bitwise operations with one arg constant.
4002 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4003 and (a bitwise1 b) bitwise2 b (etc)
4004 but that is probably not worth while. */
4005
4006/* BIT_AND_EXPR is for bitwise anding.
4007 TRUTH_AND_EXPR is for anding two boolean values
4008 when we want in all cases to compute both of them.
4009 In general it is fastest to do TRUTH_AND_EXPR by
4010 computing both operands as actual zero-or-1 values
4011 and then bitwise anding. In cases where there cannot
4012 be any side effects, better code would be made by
4013 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4014 but the question is how to recognize those cases. */
4015
4016 case TRUTH_AND_EXPR:
4017 case BIT_AND_EXPR:
4018 this_optab = and_optab;
4019 goto binop;
4020
4021/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4022 case TRUTH_OR_EXPR:
4023 case BIT_IOR_EXPR:
4024 this_optab = ior_optab;
4025 goto binop;
4026
4027 case BIT_XOR_EXPR:
4028 this_optab = xor_optab;
4029 goto binop;
4030
4031 case LSHIFT_EXPR:
4032 case RSHIFT_EXPR:
4033 case LROTATE_EXPR:
4034 case RROTATE_EXPR:
4035 preexpand_calls (exp);
4036 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4037 subtarget = 0;
4038 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4039 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4040 unsignedp);
4041
4042/* Could determine the answer when only additive constants differ.
4043 Also, the addition of one can be handled by changing the condition. */
4044 case LT_EXPR:
4045 case LE_EXPR:
4046 case GT_EXPR:
4047 case GE_EXPR:
4048 case EQ_EXPR:
4049 case NE_EXPR:
4050 preexpand_calls (exp);
4051 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4052 if (temp != 0)
4053 return temp;
4054 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4055 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4056 && original_target
4057 && GET_CODE (original_target) == REG
4058 && (GET_MODE (original_target)
4059 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4060 {
4061 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4062 if (temp != original_target)
4063 temp = copy_to_reg (temp);
4064 op1 = gen_label_rtx ();
4065 emit_cmp_insn (temp, const0_rtx, EQ, 0,
4066 GET_MODE (temp), unsignedp, 0);
4067 emit_jump_insn (gen_beq (op1));
4068 emit_move_insn (temp, const1_rtx);
4069 emit_label (op1);
4070 return temp;
4071 }
4072 /* If no set-flag instruction, must generate a conditional
4073 store into a temporary variable. Drop through
4074 and handle this like && and ||. */
4075
4076 case TRUTH_ANDIF_EXPR:
4077 case TRUTH_ORIF_EXPR:
4078 if (target == 0 || ! safe_from_p (target, exp)
4079 /* Make sure we don't have a hard reg (such as function's return
4080 value) live across basic blocks, if not optimizing. */
4081 || (!optimize && GET_CODE (target) == REG
4082 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4083 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4084 emit_clr_insn (target);
4085 op1 = gen_label_rtx ();
4086 jumpifnot (exp, op1);
4087 emit_0_to_1_insn (target);
4088 emit_label (op1);
4089 return target;
4090
4091 case TRUTH_NOT_EXPR:
4092 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4093 /* The parser is careful to generate TRUTH_NOT_EXPR
4094 only with operands that are always zero or one. */
4095 temp = expand_binop (mode, xor_optab, op0,
4096 gen_rtx (CONST_INT, mode, 1),
4097 target, 1, OPTAB_LIB_WIDEN);
4098 if (temp == 0)
4099 abort ();
4100 return temp;
4101
4102 case COMPOUND_EXPR:
4103 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4104 emit_queue ();
4105 return expand_expr (TREE_OPERAND (exp, 1),
4106 (ignore ? const0_rtx : target),
4107 VOIDmode, 0);
4108
4109 case COND_EXPR:
4110 {
4111 /* Note that COND_EXPRs whose type is a structure or union
4112 are required to be constructed to contain assignments of
4113 a temporary variable, so that we can evaluate them here
4114 for side effect only. If type is void, we must do likewise. */
4115
4116 /* If an arm of the branch requires a cleanup,
4117 only that cleanup is performed. */
4118
4119 tree singleton = 0;
4120 tree binary_op = 0, unary_op = 0;
4121 tree old_cleanups = cleanups_this_call;
4122 cleanups_this_call = 0;
4123
4124 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4125 convert it to our mode, if necessary. */
4126 if (integer_onep (TREE_OPERAND (exp, 1))
4127 && integer_zerop (TREE_OPERAND (exp, 2))
4128 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4129 {
4130 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4131 if (GET_MODE (op0) == mode)
4132 return op0;
4133 if (target == 0)
4134 target = gen_reg_rtx (mode);
4135 convert_move (target, op0, unsignedp);
4136 return target;
4137 }
4138
4139 /* If we are not to produce a result, we have no target. Otherwise,
4140 if a target was specified use it; it will not be used as an
4141 intermediate target unless it is safe. If no target, use a
4142 temporary. */
4143
4144 if (mode == VOIDmode || ignore)
4145 temp = 0;
4146 else if (original_target
4147 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4148 temp = original_target;
4149 else if (mode == BLKmode)
4150 {
4151 if (TYPE_SIZE (type) == 0
4152 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4153 abort ();
4154 temp = assign_stack_temp (BLKmode,
4155 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4156 + BITS_PER_UNIT - 1)
4157 / BITS_PER_UNIT, 0);
4158 }
4159 else
4160 temp = gen_reg_rtx (mode);
4161
4162 /* Check for X ? A + B : A. If we have this, we can copy
4163 A to the output and conditionally add B. Similarly for unary
4164 operations. Don't do this if X has side-effects because
4165 those side effects might affect A or B and the "?" operation is
4166 a sequence point in ANSI. (We test for side effects later.) */
4167
4168 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4169 && operand_equal_p (TREE_OPERAND (exp, 2),
4170 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4171 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4172 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4173 && operand_equal_p (TREE_OPERAND (exp, 1),
4174 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4175 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4176 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4177 && operand_equal_p (TREE_OPERAND (exp, 2),
4178 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4179 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4180 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4181 && operand_equal_p (TREE_OPERAND (exp, 1),
4182 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4183 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4184
4185 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4186 operation, do this as A + (X != 0). Similarly for other simple
4187 binary operators. */
4188 if (singleton && binary_op
4189 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4190 && (TREE_CODE (binary_op) == PLUS_EXPR
4191 || TREE_CODE (binary_op) == MINUS_EXPR
4192 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4193 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4194 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4195 && integer_onep (TREE_OPERAND (binary_op, 1))
4196 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4197 {
4198 rtx result;
4199 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4200 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4201 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4202 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4203 : and_optab);
4204
4205 /* If we had X ? A : A + 1, do this as A + (X == 0).
4206
4207 We have to invert the truth value here and then put it
4208 back later if do_store_flag fails. We cannot simply copy
4209 TREE_OPERAND (exp, 0) to another variable and modify that
4210 because invert_truthvalue can modify the tree pointed to
4211 by its argument. */
4212 if (singleton == TREE_OPERAND (exp, 1))
4213 TREE_OPERAND (exp, 0)
4214 = invert_truthvalue (TREE_OPERAND (exp, 0));
4215
4216 result = do_store_flag (TREE_OPERAND (exp, 0),
4217 safe_from_p (temp, singleton) ? temp : 0,
4218 mode, BRANCH_COST <= 1);
4219
4220 if (result)
4221 {
4222 op1 = expand_expr (singleton, 0, VOIDmode, 0);
4223 return expand_binop (mode, boptab, op1, result, temp,
4224 unsignedp, OPTAB_LIB_WIDEN);
4225 }
4226 else if (singleton == TREE_OPERAND (exp, 1))
4227 TREE_OPERAND (exp, 0)
4228 = invert_truthvalue (TREE_OPERAND (exp, 0));
4229 }
4230
4231 NO_DEFER_POP;
4232 op0 = gen_label_rtx ();
4233
4234 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4235 {
4236 if (temp != 0)
4237 {
4238 /* If the target conflicts with the other operand of the
4239 binary op, we can't use it. Also, we can't use the target
4240 if it is a hard register, because evaluating the condition
4241 might clobber it. */
4242 if ((binary_op
4243 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4244 || (GET_CODE (temp) == REG
4245 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4246 temp = gen_reg_rtx (mode);
4247 store_expr (singleton, temp, 0);
4248 }
4249 else
4250 expand_expr (singleton, ignore ? const1_rtx : 0, VOIDmode, 0);
4251 if (cleanups_this_call)
4252 {
4253 sorry ("aggregate value in COND_EXPR");
4254 cleanups_this_call = 0;
4255 }
4256 if (singleton == TREE_OPERAND (exp, 1))
4257 jumpif (TREE_OPERAND (exp, 0), op0);
4258 else
4259 jumpifnot (TREE_OPERAND (exp, 0), op0);
4260
4261 if (binary_op && temp == 0)
4262 /* Just touch the other operand. */
4263 expand_expr (TREE_OPERAND (binary_op, 1),
4264 ignore ? const0_rtx : 0, VOIDmode, 0);
4265 else if (binary_op)
4266 store_expr (build (TREE_CODE (binary_op), type,
4267 make_tree (type, temp),
4268 TREE_OPERAND (binary_op, 1)),
4269 temp, 0);
4270 else
4271 store_expr (build1 (TREE_CODE (unary_op), type,
4272 make_tree (type, temp)),
4273 temp, 0);
4274 op1 = op0;
4275 }
4276#if 0
4277 /* This is now done in jump.c and is better done there because it
4278 produces shorter register lifetimes. */
4279
4280 /* Check for both possibilities either constants or variables
4281 in registers (but not the same as the target!). If so, can
4282 save branches by assigning one, branching, and assigning the
4283 other. */
4284 else if (temp && GET_MODE (temp) != BLKmode
4285 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4286 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4287 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4288 && DECL_RTL (TREE_OPERAND (exp, 1))
4289 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4290 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4291 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4292 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4293 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4294 && DECL_RTL (TREE_OPERAND (exp, 2))
4295 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4296 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4297 {
4298 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4299 temp = gen_reg_rtx (mode);
4300 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4301 jumpifnot (TREE_OPERAND (exp, 0), op0);
4302 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4303 op1 = op0;
4304 }
4305#endif
4306 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4307 comparison operator. If we have one of these cases, set the
4308 output to A, branch on A (cse will merge these two references),
4309 then set the output to FOO. */
4310 else if (temp
4311 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4312 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4313 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4314 TREE_OPERAND (exp, 1), 0)
4315 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4316 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4317 {
4318 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4319 temp = gen_reg_rtx (mode);
4320 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4321 jumpif (TREE_OPERAND (exp, 0), op0);
4322 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4323 op1 = op0;
4324 }
4325 else if (temp
4326 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4327 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4328 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4329 TREE_OPERAND (exp, 2), 0)
4330 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4331 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4332 {
4333 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4334 temp = gen_reg_rtx (mode);
4335 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4336 jumpifnot (TREE_OPERAND (exp, 0), op0);
4337 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4338 op1 = op0;
4339 }
4340 else
4341 {
4342 op1 = gen_label_rtx ();
4343 jumpifnot (TREE_OPERAND (exp, 0), op0);
4344 if (temp != 0)
4345 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4346 else
4347 expand_expr (TREE_OPERAND (exp, 1), ignore ? const0_rtx : 0,
4348 VOIDmode, 0);
4349 if (cleanups_this_call)
4350 {
4351 sorry ("aggregate value in COND_EXPR");
4352 cleanups_this_call = 0;
4353 }
4354
4355 emit_queue ();
4356 emit_jump_insn (gen_jump (op1));
4357 emit_barrier ();
4358 emit_label (op0);
4359 if (temp != 0)
4360 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4361 else
4362 expand_expr (TREE_OPERAND (exp, 2), ignore ? const0_rtx : 0,
4363 VOIDmode, 0);
4364 }
4365
4366 if (cleanups_this_call)
4367 {
4368 sorry ("aggregate value in COND_EXPR");
4369 cleanups_this_call = 0;
4370 }
4371
4372 emit_queue ();
4373 emit_label (op1);
4374 OK_DEFER_POP;
4375 cleanups_this_call = old_cleanups;
4376 return temp;
4377 }
4378
4379 case TARGET_EXPR:
4380 {
4381 /* Something needs to be initialized, but we didn't know
4382 where that thing was when building the tree. For example,
4383 it could be the return value of a function, or a parameter
4384 to a function which lays down in the stack, or a temporary
4385 variable which must be passed by reference.
4386
4387 We guarantee that the expression will either be constructed
4388 or copied into our original target. */
4389
4390 tree slot = TREE_OPERAND (exp, 0);
4391
4392 if (TREE_CODE (slot) != VAR_DECL)
4393 abort ();
4394
4395 if (target == 0)
4396 {
4397 if (DECL_RTL (slot) != 0)
4398 target = DECL_RTL (slot);
4399 else
4400 {
4401 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4402 /* All temp slots at this level must not conflict. */
4403 preserve_temp_slots (target);
4404 DECL_RTL (slot) = target;
4405 }
4406
4407#if 0
4408 /* Since SLOT is not known to the called function
4409 to belong to its stack frame, we must build an explicit
4410 cleanup. This case occurs when we must build up a reference
4411 to pass the reference as an argument. In this case,
4412 it is very likely that such a reference need not be
4413 built here. */
4414
4415 if (TREE_OPERAND (exp, 2) == 0)
4416 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4417 if (TREE_OPERAND (exp, 2))
4418 cleanups_this_call = tree_cons (0, TREE_OPERAND (exp, 2),
4419 cleanups_this_call);
4420#endif
4421 }
4422 else
4423 {
4424 /* This case does occur, when expanding a parameter which
4425 needs to be constructed on the stack. The target
4426 is the actual stack address that we want to initialize.
4427 The function we call will perform the cleanup in this case. */
4428
4429 DECL_RTL (slot) = target;
4430 }
4431
4432 return expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4433 }
4434
4435 case INIT_EXPR:
4436 {
4437 tree lhs = TREE_OPERAND (exp, 0);
4438 tree rhs = TREE_OPERAND (exp, 1);
4439 tree noncopied_parts = 0;
4440 tree lhs_type = TREE_TYPE (lhs);
4441
4442 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4443 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4444 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4445 TYPE_NONCOPIED_PARTS (lhs_type));
4446 while (noncopied_parts != 0)
4447 {
4448 expand_assignment (TREE_VALUE (noncopied_parts),
4449 TREE_PURPOSE (noncopied_parts), 0, 0);
4450 noncopied_parts = TREE_CHAIN (noncopied_parts);
4451 }
4452 return temp;
4453 }
4454
4455 case MODIFY_EXPR:
4456 {
4457 /* If lhs is complex, expand calls in rhs before computing it.
4458 That's so we don't compute a pointer and save it over a call.
4459 If lhs is simple, compute it first so we can give it as a
4460 target if the rhs is just a call. This avoids an extra temp and copy
4461 and that prevents a partial-subsumption which makes bad code.
4462 Actually we could treat component_ref's of vars like vars. */
4463
4464 tree lhs = TREE_OPERAND (exp, 0);
4465 tree rhs = TREE_OPERAND (exp, 1);
4466 tree noncopied_parts = 0;
4467 tree lhs_type = TREE_TYPE (lhs);
4468
4469 temp = 0;
4470
4471 if (TREE_CODE (lhs) != VAR_DECL
4472 && TREE_CODE (lhs) != RESULT_DECL
4473 && TREE_CODE (lhs) != PARM_DECL)
4474 preexpand_calls (exp);
4475
4476 /* Check for |= or &= of a bitfield of size one into another bitfield
4477 of size 1. In this case, (unless we need the result of the
4478 assignment) we can do this more efficiently with a
4479 test followed by an assignment, if necessary.
4480
4481 ??? At this point, we can't get a BIT_FIELD_REF here. But if
4482 things change so we do, this code should be enhanced to
4483 support it. */
4484 if (ignore
4485 && TREE_CODE (lhs) == COMPONENT_REF
4486 && (TREE_CODE (rhs) == BIT_IOR_EXPR
4487 || TREE_CODE (rhs) == BIT_AND_EXPR)
4488 && TREE_OPERAND (rhs, 0) == lhs
4489 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
4490 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
4491 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
4492 {
4493 rtx label = gen_label_rtx ();
4494
4495 do_jump (TREE_OPERAND (rhs, 1),
4496 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
4497 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
4498 expand_assignment (lhs, convert (TREE_TYPE (rhs),
4499 (TREE_CODE (rhs) == BIT_IOR_EXPR
4500 ? integer_one_node
4501 : integer_zero_node)),
4502 0, 0);
e7c33f54 4503 do_pending_stack_adjust ();
bbf6f052
RK
4504 emit_label (label);
4505 return const0_rtx;
4506 }
4507
4508 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
4509 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
4510 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
4511 TYPE_NONCOPIED_PARTS (lhs_type));
4512
4513 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4514 while (noncopied_parts != 0)
4515 {
4516 expand_assignment (TREE_PURPOSE (noncopied_parts),
4517 TREE_VALUE (noncopied_parts), 0, 0);
4518 noncopied_parts = TREE_CHAIN (noncopied_parts);
4519 }
4520 return temp;
4521 }
4522
4523 case PREINCREMENT_EXPR:
4524 case PREDECREMENT_EXPR:
4525 return expand_increment (exp, 0);
4526
4527 case POSTINCREMENT_EXPR:
4528 case POSTDECREMENT_EXPR:
4529 /* Faster to treat as pre-increment if result is not used. */
4530 return expand_increment (exp, ! ignore);
4531
4532 case ADDR_EXPR:
4533 /* Are we taking the address of a nested function? */
4534 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
4535 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
4536 {
4537 op0 = trampoline_address (TREE_OPERAND (exp, 0));
4538 op0 = force_operand (op0, target);
4539 }
4540 else
4541 {
4542 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode,
4543 (modifier == EXPAND_INITIALIZER
4544 ? modifier : EXPAND_CONST_ADDRESS));
4545 if (GET_CODE (op0) != MEM)
4546 abort ();
4547
4548 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4549 return XEXP (op0, 0);
4550 op0 = force_operand (XEXP (op0, 0), target);
4551 }
4552 if (flag_force_addr && GET_CODE (op0) != REG)
4553 return force_reg (Pmode, op0);
4554 return op0;
4555
4556 case ENTRY_VALUE_EXPR:
4557 abort ();
4558
4559 case ERROR_MARK:
4560 return const0_rtx;
4561
4562 default:
4563 return (*lang_expand_expr) (exp, target, tmode, modifier);
4564 }
4565
4566 /* Here to do an ordinary binary operator, generating an instruction
4567 from the optab already placed in `this_optab'. */
4568 binop:
4569 preexpand_calls (exp);
4570 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4571 subtarget = 0;
4572 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4573 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
4574 binop2:
4575 temp = expand_binop (mode, this_optab, op0, op1, target,
4576 unsignedp, OPTAB_LIB_WIDEN);
4577 if (temp == 0)
4578 abort ();
4579 return temp;
4580}
4581\f
e87b4f3f
RS
4582/* Return the alignment in bits of EXP, a pointer valued expression.
4583 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
4584 The alignment returned is, by default, the alignment of the thing that
4585 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
4586
4587 Otherwise, look at the expression to see if we can do better, i.e., if the
4588 expression is actually pointing at an object whose alignment is tighter. */
4589
4590static int
4591get_pointer_alignment (exp, max_align)
4592 tree exp;
4593 unsigned max_align;
4594{
4595 unsigned align, inner;
4596
4597 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4598 return 0;
4599
4600 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4601 align = MIN (align, max_align);
4602
4603 while (1)
4604 {
4605 switch (TREE_CODE (exp))
4606 {
4607 case NOP_EXPR:
4608 case CONVERT_EXPR:
4609 case NON_LVALUE_EXPR:
4610 exp = TREE_OPERAND (exp, 0);
4611 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
4612 return align;
4613 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
4614 inner = MIN (inner, max_align);
4615 align = MAX (align, inner);
4616 break;
4617
4618 case PLUS_EXPR:
4619 /* If sum of pointer + int, restrict our maximum alignment to that
4620 imposed by the integer. If not, we can't do any better than
4621 ALIGN. */
4622 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
4623 return align;
4624
e87b4f3f
RS
4625 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
4626 & (max_align - 1))
4627 != 0)
bbf6f052
RK
4628 max_align >>= 1;
4629
4630 exp = TREE_OPERAND (exp, 0);
4631 break;
4632
4633 case ADDR_EXPR:
4634 /* See what we are pointing at and look at its alignment. */
4635 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
4636 if (TREE_CODE (exp) == FUNCTION_DECL)
4637 align = MAX (align, FUNCTION_BOUNDARY);
4638 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
4639 align = MAX (align, DECL_ALIGN (exp));
4640#ifdef CONSTANT_ALIGNMENT
4641 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
4642 align = CONSTANT_ALIGNMENT (exp, align);
4643#endif
4644 return MIN (align, max_align);
4645
4646 default:
4647 return align;
4648 }
4649 }
4650}
4651\f
4652/* Return the tree node and offset if a given argument corresponds to
4653 a string constant. */
4654
4655static tree
4656string_constant (arg, ptr_offset)
4657 tree arg;
4658 tree *ptr_offset;
4659{
4660 STRIP_NOPS (arg);
4661
4662 if (TREE_CODE (arg) == ADDR_EXPR
4663 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
4664 {
4665 *ptr_offset = integer_zero_node;
4666 return TREE_OPERAND (arg, 0);
4667 }
4668 else if (TREE_CODE (arg) == PLUS_EXPR)
4669 {
4670 tree arg0 = TREE_OPERAND (arg, 0);
4671 tree arg1 = TREE_OPERAND (arg, 1);
4672
4673 STRIP_NOPS (arg0);
4674 STRIP_NOPS (arg1);
4675
4676 if (TREE_CODE (arg0) == ADDR_EXPR
4677 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
4678 {
4679 *ptr_offset = arg1;
4680 return TREE_OPERAND (arg0, 0);
4681 }
4682 else if (TREE_CODE (arg1) == ADDR_EXPR
4683 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
4684 {
4685 *ptr_offset = arg0;
4686 return TREE_OPERAND (arg1, 0);
4687 }
4688 }
4689
4690 return 0;
4691}
4692
4693/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
4694 way, because it could contain a zero byte in the middle.
4695 TREE_STRING_LENGTH is the size of the character array, not the string.
4696
4697 Unfortunately, string_constant can't access the values of const char
4698 arrays with initializers, so neither can we do so here. */
4699
4700static tree
4701c_strlen (src)
4702 tree src;
4703{
4704 tree offset_node;
4705 int offset, max;
4706 char *ptr;
4707
4708 src = string_constant (src, &offset_node);
4709 if (src == 0)
4710 return 0;
4711 max = TREE_STRING_LENGTH (src);
4712 ptr = TREE_STRING_POINTER (src);
4713 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
4714 {
4715 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
4716 compute the offset to the following null if we don't know where to
4717 start searching for it. */
4718 int i;
4719 for (i = 0; i < max; i++)
4720 if (ptr[i] == 0)
4721 return 0;
4722 /* We don't know the starting offset, but we do know that the string
4723 has no internal zero bytes. We can assume that the offset falls
4724 within the bounds of the string; otherwise, the programmer deserves
4725 what he gets. Subtract the offset from the length of the string,
4726 and return that. */
4727 /* This would perhaps not be valid if we were dealing with named
4728 arrays in addition to literal string constants. */
4729 return size_binop (MINUS_EXPR, size_int (max), offset_node);
4730 }
4731
4732 /* We have a known offset into the string. Start searching there for
4733 a null character. */
4734 if (offset_node == 0)
4735 offset = 0;
4736 else
4737 {
4738 /* Did we get a long long offset? If so, punt. */
4739 if (TREE_INT_CST_HIGH (offset_node) != 0)
4740 return 0;
4741 offset = TREE_INT_CST_LOW (offset_node);
4742 }
4743 /* If the offset is known to be out of bounds, warn, and call strlen at
4744 runtime. */
4745 if (offset < 0 || offset > max)
4746 {
4747 warning ("offset outside bounds of constant string");
4748 return 0;
4749 }
4750 /* Use strlen to search for the first zero byte. Since any strings
4751 constructed with build_string will have nulls appended, we win even
4752 if we get handed something like (char[4])"abcd".
4753
4754 Since OFFSET is our starting index into the string, no further
4755 calculation is needed. */
4756 return size_int (strlen (ptr + offset));
4757}
4758\f
4759/* Expand an expression EXP that calls a built-in function,
4760 with result going to TARGET if that's convenient
4761 (and in mode MODE if that's convenient).
4762 SUBTARGET may be used as the target for computing one of EXP's operands.
4763 IGNORE is nonzero if the value is to be ignored. */
4764
4765static rtx
4766expand_builtin (exp, target, subtarget, mode, ignore)
4767 tree exp;
4768 rtx target;
4769 rtx subtarget;
4770 enum machine_mode mode;
4771 int ignore;
4772{
4773 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4774 tree arglist = TREE_OPERAND (exp, 1);
4775 rtx op0;
e7c33f54 4776 rtx lab1, lab2, insns;
bbf6f052
RK
4777 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
4778
4779 switch (DECL_FUNCTION_CODE (fndecl))
4780 {
4781 case BUILT_IN_ABS:
4782 case BUILT_IN_LABS:
4783 case BUILT_IN_FABS:
4784 /* build_function_call changes these into ABS_EXPR. */
4785 abort ();
4786
e87b4f3f
RS
4787 case BUILT_IN_FSQRT:
4788 /* If not optimizing, call the library function. */
8c8a8e34 4789 if (! optimize)
e87b4f3f
RS
4790 break;
4791
4792 if (arglist == 0
4793 /* Arg could be non-integer if user redeclared this fcn wrong. */
4794 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4795 return const0_rtx;
4796
4797 /* Compute the argument. */
4798 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
4799
4800 /* Make a suitable register to place result in. */
4801 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4802
4803 /* Test the argument to make sure it is in the proper domain for
4804 the sqrt function. If it is not in the domain, branch to a
4805 library call. */
8c8a8e34
JW
4806 start_sequence ();
4807 lab1 = gen_label_rtx ();
4808 lab2 = gen_label_rtx ();
e7c33f54
RK
4809
4810 /* By default check the arguments. If flag_fast_math is turned on,
4af3895e
JVA
4811 then assume sqrt will always be called with valid arguments.
4812 Note changing the test below from "> 0" to ">= 0" would cause
4813 incorrect results when computing sqrt(-0.0). */
4814
e7c33f54
RK
4815 if (! flag_fast_math)
4816 {
8c8a8e34 4817 /* By checking op > 0 we are able to catch all of the
e7c33f54 4818 IEEE special cases with a single if conditional. */
8c8a8e34
JW
4819 emit_cmp_insn (op0, CONST0_RTX (GET_MODE (op0)), GT, 0,
4820 GET_MODE (op0), 0, 0);
e7c33f54
RK
4821 emit_jump_insn (gen_bgt (lab1));
4822
4af3895e
JVA
4823 /* The argument was not in the domain; do this via library call.
4824 Pop the arguments right away in case the call gets deleted. */
4825 NO_DEFER_POP;
8129842c 4826 expand_call (exp, target, 0);
4af3895e 4827 OK_DEFER_POP;
e7c33f54
RK
4828
4829 /* Branch around open coded version */
4830 emit_jump_insn (gen_jump (lab2));
4831 }
4832
4833 emit_label (lab1);
4834 /* Arg is in the domain, compute sqrt, into TARGET.
e87b4f3f
RS
4835 Set TARGET to wherever the result comes back. */
4836 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8c8a8e34 4837 sqrt_optab, op0, target, 0);
e7c33f54
RK
4838
4839 /* If we were unable to expand via the builtin, stop the
4840 sequence (without outputting the insns) and break, causing
4841 a call the the library function. */
e87b4f3f 4842 if (target == 0)
e7c33f54 4843 {
8c8a8e34 4844 end_sequence ();
e7c33f54
RK
4845 break;
4846 }
4847 emit_label (lab2);
e87b4f3f
RS
4848
4849
e7c33f54 4850 /* Output the entire sequence. */
8c8a8e34
JW
4851 insns = get_insns ();
4852 end_sequence ();
4853 emit_insns (insns);
e7c33f54
RK
4854
4855 return target;
4856
bbf6f052
RK
4857 case BUILT_IN_SAVEREGS:
4858 /* Don't do __builtin_saveregs more than once in a function.
4859 Save the result of the first call and reuse it. */
4860 if (saveregs_value != 0)
4861 return saveregs_value;
4862 {
4863 /* When this function is called, it means that registers must be
4864 saved on entry to this function. So we migrate the
4865 call to the first insn of this function. */
4866 rtx temp;
4867 rtx seq;
4868 rtx valreg, saved_valreg;
4869
4870 /* Now really call the function. `expand_call' does not call
4871 expand_builtin, so there is no danger of infinite recursion here. */
4872 start_sequence ();
4873
4874#ifdef EXPAND_BUILTIN_SAVEREGS
4875 /* Do whatever the machine needs done in this case. */
4876 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
4877#else
4878 /* The register where the function returns its value
4879 is likely to have something else in it, such as an argument.
4880 So preserve that register around the call. */
4881 if (value_mode != VOIDmode)
4882 {
4883 valreg = hard_libcall_value (value_mode);
4884 saved_valreg = gen_reg_rtx (value_mode);
4885 emit_move_insn (saved_valreg, valreg);
4886 }
4887
4888 /* Generate the call, putting the value in a pseudo. */
4889 temp = expand_call (exp, target, ignore);
4890
4891 if (value_mode != VOIDmode)
4892 emit_move_insn (valreg, saved_valreg);
4893#endif
4894
4895 seq = get_insns ();
4896 end_sequence ();
4897
4898 saveregs_value = temp;
4899
4900 /* This won't work inside a SEQUENCE--it really has to be
4901 at the start of the function. */
4902 if (in_sequence_p ())
4903 {
4904 /* Better to do this than to crash. */
4905 error ("`va_start' used within `({...})'");
4906 return temp;
4907 }
4908
4909 /* Put the sequence after the NOTE that starts the function. */
4910 emit_insns_before (seq, NEXT_INSN (get_insns ()));
4911 return temp;
4912 }
4913
4914 /* __builtin_args_info (N) returns word N of the arg space info
4915 for the current function. The number and meanings of words
4916 is controlled by the definition of CUMULATIVE_ARGS. */
4917 case BUILT_IN_ARGS_INFO:
4918 {
4919 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4920 int i;
4921 int *word_ptr = (int *) &current_function_args_info;
4922 tree type, elts, result;
4923
4924 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
4925 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
4926 __FILE__, __LINE__);
4927
4928 if (arglist != 0)
4929 {
4930 tree arg = TREE_VALUE (arglist);
4931 if (TREE_CODE (arg) != INTEGER_CST)
4932 error ("argument of __builtin_args_info must be constant");
4933 else
4934 {
4935 int wordnum = TREE_INT_CST_LOW (arg);
4936
4937 if (wordnum < 0 || wordnum >= nwords)
4938 error ("argument of __builtin_args_info out of range");
4939 else
4940 return gen_rtx (CONST_INT, VOIDmode, word_ptr[wordnum]);
4941 }
4942 }
4943 else
4944 error ("missing argument in __builtin_args_info");
4945
4946 return const0_rtx;
4947
4948#if 0
4949 for (i = 0; i < nwords; i++)
4950 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
4951
4952 type = build_array_type (integer_type_node,
4953 build_index_type (build_int_2 (nwords, 0)));
4954 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
4955 TREE_CONSTANT (result) = 1;
4956 TREE_STATIC (result) = 1;
4957 result = build (INDIRECT_REF, build_pointer_type (type), result);
4958 TREE_CONSTANT (result) = 1;
4959 return expand_expr (result, 0, VOIDmode, 0);
4960#endif
4961 }
4962
4963 /* Return the address of the first anonymous stack arg. */
4964 case BUILT_IN_NEXT_ARG:
4965 {
4966 tree fntype = TREE_TYPE (current_function_decl);
4967 if (!(TYPE_ARG_TYPES (fntype) != 0
4968 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4969 != void_type_node)))
4970 {
4971 error ("`va_start' used in function with fixed args");
4972 return const0_rtx;
4973 }
4974 }
4975
4976 return expand_binop (Pmode, add_optab,
4977 current_function_internal_arg_pointer,
4978 current_function_arg_offset_rtx,
4979 0, 0, OPTAB_LIB_WIDEN);
4980
4981 case BUILT_IN_CLASSIFY_TYPE:
4982 if (arglist != 0)
4983 {
4984 tree type = TREE_TYPE (TREE_VALUE (arglist));
4985 enum tree_code code = TREE_CODE (type);
4986 if (code == VOID_TYPE)
4987 return gen_rtx (CONST_INT, VOIDmode, void_type_class);
4988 if (code == INTEGER_TYPE)
4989 return gen_rtx (CONST_INT, VOIDmode, integer_type_class);
4990 if (code == CHAR_TYPE)
4991 return gen_rtx (CONST_INT, VOIDmode, char_type_class);
4992 if (code == ENUMERAL_TYPE)
4993 return gen_rtx (CONST_INT, VOIDmode, enumeral_type_class);
4994 if (code == BOOLEAN_TYPE)
4995 return gen_rtx (CONST_INT, VOIDmode, boolean_type_class);
4996 if (code == POINTER_TYPE)
4997 return gen_rtx (CONST_INT, VOIDmode, pointer_type_class);
4998 if (code == REFERENCE_TYPE)
4999 return gen_rtx (CONST_INT, VOIDmode, reference_type_class);
5000 if (code == OFFSET_TYPE)
5001 return gen_rtx (CONST_INT, VOIDmode, offset_type_class);
5002 if (code == REAL_TYPE)
5003 return gen_rtx (CONST_INT, VOIDmode, real_type_class);
5004 if (code == COMPLEX_TYPE)
5005 return gen_rtx (CONST_INT, VOIDmode, complex_type_class);
5006 if (code == FUNCTION_TYPE)
5007 return gen_rtx (CONST_INT, VOIDmode, function_type_class);
5008 if (code == METHOD_TYPE)
5009 return gen_rtx (CONST_INT, VOIDmode, method_type_class);
5010 if (code == RECORD_TYPE)
5011 return gen_rtx (CONST_INT, VOIDmode, record_type_class);
5012 if (code == UNION_TYPE)
5013 return gen_rtx (CONST_INT, VOIDmode, union_type_class);
5014 if (code == ARRAY_TYPE)
5015 return gen_rtx (CONST_INT, VOIDmode, array_type_class);
5016 if (code == STRING_TYPE)
5017 return gen_rtx (CONST_INT, VOIDmode, string_type_class);
5018 if (code == SET_TYPE)
5019 return gen_rtx (CONST_INT, VOIDmode, set_type_class);
5020 if (code == FILE_TYPE)
5021 return gen_rtx (CONST_INT, VOIDmode, file_type_class);
5022 if (code == LANG_TYPE)
5023 return gen_rtx (CONST_INT, VOIDmode, lang_type_class);
5024 }
5025 return gen_rtx (CONST_INT, VOIDmode, no_type_class);
5026
5027 case BUILT_IN_CONSTANT_P:
5028 if (arglist == 0)
5029 return const0_rtx;
5030 else
5031 return (TREE_CODE_CLASS (TREE_VALUE (arglist)) == 'c'
5032 ? const1_rtx : const0_rtx);
5033
5034 case BUILT_IN_FRAME_ADDRESS:
5035 /* The argument must be a nonnegative integer constant.
5036 It counts the number of frames to scan up the stack.
5037 The value is the address of that frame. */
5038 case BUILT_IN_RETURN_ADDRESS:
5039 /* The argument must be a nonnegative integer constant.
5040 It counts the number of frames to scan up the stack.
5041 The value is the return address saved in that frame. */
5042 if (arglist == 0)
5043 /* Warning about missing arg was already issued. */
5044 return const0_rtx;
5045 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5046 {
5047 error ("invalid arg to __builtin_return_address");
5048 return const0_rtx;
5049 }
5050 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5051 {
5052 error ("invalid arg to __builtin_return_address");
5053 return const0_rtx;
5054 }
5055 else
5056 {
5057 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5058 rtx tem = frame_pointer_rtx;
5059 int i;
5060
5061 /* Scan back COUNT frames to the specified frame. */
5062 for (i = 0; i < count; i++)
5063 {
5064 /* Assume the dynamic chain pointer is in the word that
5065 the frame address points to, unless otherwise specified. */
5066#ifdef DYNAMIC_CHAIN_ADDRESS
5067 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5068#endif
5069 tem = memory_address (Pmode, tem);
5070 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5071 }
5072
5073 /* For __builtin_frame_address, return what we've got. */
5074 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5075 return tem;
5076
5077 /* For __builtin_return_address,
5078 Get the return address from that frame. */
5079#ifdef RETURN_ADDR_RTX
5080 return RETURN_ADDR_RTX (count, tem);
5081#else
5082 tem = memory_address (Pmode,
5083 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5084 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5085#endif
5086 }
5087
5088 case BUILT_IN_ALLOCA:
5089 if (arglist == 0
5090 /* Arg could be non-integer if user redeclared this fcn wrong. */
5091 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5092 return const0_rtx;
5093 current_function_calls_alloca = 1;
5094 /* Compute the argument. */
5095 op0 = expand_expr (TREE_VALUE (arglist), 0, VOIDmode, 0);
5096
5097 /* Allocate the desired space. */
8c8a8e34 5098 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5099
5100 /* Record the new stack level for nonlocal gotos. */
5101 if (nonlocal_goto_stack_level != 0)
5102 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
5103 return target;
5104
5105 case BUILT_IN_FFS:
5106 /* If not optimizing, call the library function. */
5107 if (!optimize)
5108 break;
5109
5110 if (arglist == 0
5111 /* Arg could be non-integer if user redeclared this fcn wrong. */
5112 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5113 return const0_rtx;
5114
5115 /* Compute the argument. */
5116 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5117 /* Compute ffs, into TARGET if possible.
5118 Set TARGET to wherever the result comes back. */
5119 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5120 ffs_optab, op0, target, 1);
5121 if (target == 0)
5122 abort ();
5123 return target;
5124
5125 case BUILT_IN_STRLEN:
5126 /* If not optimizing, call the library function. */
5127 if (!optimize)
5128 break;
5129
5130 if (arglist == 0
5131 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5132 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5133 return const0_rtx;
5134 else
5135 {
e7c33f54
RK
5136 tree src = TREE_VALUE (arglist);
5137 tree len = c_strlen (src);
bbf6f052 5138
e7c33f54
RK
5139 int align
5140 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5141
5142 rtx result, src_rtx, char_rtx;
5143 enum machine_mode insn_mode = value_mode, char_mode;
5144 enum insn_code icode;
5145
5146 /* If the length is known, just return it. */
5147 if (len != 0)
5148 return expand_expr (len, target, mode, 0);
5149
5150 /* If SRC is not a pointer type, don't do this operation inline. */
5151 if (align == 0)
5152 break;
5153
5154 /* Call a function if we can't compute strlen in the right mode. */
5155
5156 while (insn_mode != VOIDmode)
5157 {
5158 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5159 if (icode != CODE_FOR_nothing)
5160 break;
5161
5162 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5163 }
5164 if (insn_mode == VOIDmode)
bbf6f052 5165 break;
e7c33f54
RK
5166
5167 /* Make a place to write the result of the instruction. */
5168 result = target;
5169 if (! (result != 0
5170 && GET_CODE (result) == REG
5171 && GET_MODE (result) == insn_mode
5172 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5173 result = gen_reg_rtx (insn_mode);
5174
5175 /* Make the operands are acceptable to the predicates. */
5176
5177 if (! (*insn_operand_predicate[icode][0]) (result, insn_mode))
5178 result = gen_reg_rtx (insn_mode);
5179
5180 src_rtx = memory_address (BLKmode,
5181 expand_expr (src, 0, Pmode,
5182 EXPAND_NORMAL));
5183 if (! (*insn_operand_predicate[icode][1]) (src_rtx, Pmode))
5184 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5185
5186 char_rtx = const0_rtx;
5187 char_mode = insn_operand_mode[icode][2];
5188 if (! (*insn_operand_predicate[icode][2]) (char_rtx, char_mode))
5189 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5190
5191 emit_insn (GEN_FCN (icode) (result,
5192 gen_rtx (MEM, BLKmode, src_rtx),
5193 char_rtx,
5194 gen_rtx (CONST_INT, VOIDmode, align)));
5195
5196 /* Return the value in the proper mode for this function. */
5197 if (GET_MODE (result) == value_mode)
5198 return result;
5199 else if (target != 0)
5200 {
5201 convert_move (target, result, 0);
5202 return target;
5203 }
5204 else
5205 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
5206 }
5207
5208 case BUILT_IN_STRCPY:
5209 /* If not optimizing, call the library function. */
5210 if (!optimize)
5211 break;
5212
5213 if (arglist == 0
5214 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5215 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5216 || TREE_CHAIN (arglist) == 0
5217 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5218 return const0_rtx;
5219 else
5220 {
5221 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5222
5223 if (len == 0)
5224 break;
5225
5226 len = size_binop (PLUS_EXPR, len, integer_one_node);
5227
5228 chainon (arglist, build_tree_list (0, len));
5229 }
5230
5231 /* Drops in. */
5232 case BUILT_IN_MEMCPY:
5233 /* If not optimizing, call the library function. */
5234 if (!optimize)
5235 break;
5236
5237 if (arglist == 0
5238 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5239 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5240 || TREE_CHAIN (arglist) == 0
5241 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5242 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5243 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5244 return const0_rtx;
5245 else
5246 {
5247 tree dest = TREE_VALUE (arglist);
5248 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5249 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5250
5251 int src_align
5252 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5253 int dest_align
5254 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5255 rtx dest_rtx;
5256
5257 /* If either SRC or DEST is not a pointer type, don't do
5258 this operation in-line. */
5259 if (src_align == 0 || dest_align == 0)
5260 {
5261 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5262 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5263 break;
5264 }
5265
5266 dest_rtx = expand_expr (dest, 0, Pmode, EXPAND_NORMAL);
5267
5268 /* Copy word part most expediently. */
5269 emit_block_move (gen_rtx (MEM, BLKmode,
5270 memory_address (BLKmode, dest_rtx)),
5271 gen_rtx (MEM, BLKmode,
5272 memory_address (BLKmode,
5273 expand_expr (src, 0, Pmode,
5274 EXPAND_NORMAL))),
5275 expand_expr (len, 0, VOIDmode, 0),
5276 MIN (src_align, dest_align));
5277 return dest_rtx;
5278 }
5279
5280/* These comparison functions need an instruction that returns an actual
5281 index. An ordinary compare that just sets the condition codes
5282 is not enough. */
5283#ifdef HAVE_cmpstrsi
5284 case BUILT_IN_STRCMP:
5285 /* If not optimizing, call the library function. */
5286 if (!optimize)
5287 break;
5288
5289 if (arglist == 0
5290 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5291 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5292 || TREE_CHAIN (arglist) == 0
5293 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5294 return const0_rtx;
5295 else if (!HAVE_cmpstrsi)
5296 break;
5297 {
5298 tree arg1 = TREE_VALUE (arglist);
5299 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5300 tree offset;
5301 tree len, len2;
5302
5303 len = c_strlen (arg1);
5304 if (len)
5305 len = size_binop (PLUS_EXPR, integer_one_node, len);
5306 len2 = c_strlen (arg2);
5307 if (len2)
5308 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5309
5310 /* If we don't have a constant length for the first, use the length
5311 of the second, if we know it. We don't require a constant for
5312 this case; some cost analysis could be done if both are available
5313 but neither is constant. For now, assume they're equally cheap.
5314
5315 If both strings have constant lengths, use the smaller. This
5316 could arise if optimization results in strcpy being called with
5317 two fixed strings, or if the code was machine-generated. We should
5318 add some code to the `memcmp' handler below to deal with such
5319 situations, someday. */
5320 if (!len || TREE_CODE (len) != INTEGER_CST)
5321 {
5322 if (len2)
5323 len = len2;
5324 else if (len == 0)
5325 break;
5326 }
5327 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5328 {
5329 if (tree_int_cst_lt (len2, len))
5330 len = len2;
5331 }
5332
5333 chainon (arglist, build_tree_list (0, len));
5334 }
5335
5336 /* Drops in. */
5337 case BUILT_IN_MEMCMP:
5338 /* If not optimizing, call the library function. */
5339 if (!optimize)
5340 break;
5341
5342 if (arglist == 0
5343 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5344 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5345 || TREE_CHAIN (arglist) == 0
5346 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5347 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5348 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5349 return const0_rtx;
5350 else if (!HAVE_cmpstrsi)
5351 break;
5352 {
5353 tree arg1 = TREE_VALUE (arglist);
5354 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5355 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5356 rtx result;
5357
5358 int arg1_align
5359 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5360 int arg2_align
5361 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5362 enum machine_mode insn_mode
5363 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
5364
5365 /* If we don't have POINTER_TYPE, call the function. */
5366 if (arg1_align == 0 || arg2_align == 0)
5367 {
5368 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
5369 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5370 break;
5371 }
5372
5373 /* Make a place to write the result of the instruction. */
5374 result = target;
5375 if (! (result != 0
5376 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
5377 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5378 result = gen_reg_rtx (insn_mode);
5379
5380 emit_insn (gen_cmpstrsi (result,
5381 gen_rtx (MEM, BLKmode,
5382 expand_expr (arg1, 0, Pmode, EXPAND_NORMAL)),
5383 gen_rtx (MEM, BLKmode,
5384 expand_expr (arg2, 0, Pmode, EXPAND_NORMAL)),
5385 expand_expr (len, 0, VOIDmode, 0),
5386 gen_rtx (CONST_INT, VOIDmode,
5387 MIN (arg1_align, arg2_align))));
5388
5389 /* Return the value in the proper mode for this function. */
5390 mode = TYPE_MODE (TREE_TYPE (exp));
5391 if (GET_MODE (result) == mode)
5392 return result;
5393 else if (target != 0)
5394 {
5395 convert_move (target, result, 0);
5396 return target;
5397 }
5398 else
5399 return convert_to_mode (mode, result, 0);
5400 }
5401#else
5402 case BUILT_IN_STRCMP:
5403 case BUILT_IN_MEMCMP:
5404 break;
5405#endif
5406
5407 default: /* just do library call, if unknown builtin */
5408 error ("built-in function %s not currently supported",
5409 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5410 }
5411
5412 /* The switch statement above can drop through to cause the function
5413 to be called normally. */
5414
5415 return expand_call (exp, target, ignore);
5416}
5417\f
5418/* Expand code for a post- or pre- increment or decrement
5419 and return the RTX for the result.
5420 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
5421
5422static rtx
5423expand_increment (exp, post)
5424 register tree exp;
5425 int post;
5426{
5427 register rtx op0, op1;
5428 register rtx temp, value;
5429 register tree incremented = TREE_OPERAND (exp, 0);
5430 optab this_optab = add_optab;
5431 int icode;
5432 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5433 int op0_is_copy = 0;
5434
5435 /* Stabilize any component ref that might need to be
5436 evaluated more than once below. */
5437 if (TREE_CODE (incremented) == BIT_FIELD_REF
5438 || (TREE_CODE (incremented) == COMPONENT_REF
5439 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
5440 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
5441 incremented = stabilize_reference (incremented);
5442
5443 /* Compute the operands as RTX.
5444 Note whether OP0 is the actual lvalue or a copy of it:
5445 I believe it is a copy iff it is a register and insns were
5446 generated in computing it or if it is a SUBREG (generated when
5447 the low-order field in a register was referenced). */
5448 temp = get_last_insn ();
5449 op0 = expand_expr (incremented, 0, VOIDmode, 0);
5450 op0_is_copy = (GET_CODE (op0) == SUBREG
5451 || (GET_CODE (op0) == REG && temp != get_last_insn ()));
5452 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5453
5454 /* Decide whether incrementing or decrementing. */
5455 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
5456 || TREE_CODE (exp) == PREDECREMENT_EXPR)
5457 this_optab = sub_optab;
5458
5459 /* If OP0 is not the actual lvalue, but rather a copy in a register,
5460 then we cannot just increment OP0. We must
5461 therefore contrive to increment the original value.
5462 Then we can return OP0 since it is a copy of the old value. */
5463 if (op0_is_copy)
5464 {
5465 /* This is the easiest way to increment the value wherever it is.
5466 Problems with multiple evaluation of INCREMENTED
5467 are prevented because either (1) it is a component_ref,
5468 in which case it was stabilized above, or (2) it is an array_ref
5469 with constant index in an array in a register, which is
5470 safe to reevaluate. */
5471 tree newexp = build ((this_optab == add_optab
5472 ? PLUS_EXPR : MINUS_EXPR),
5473 TREE_TYPE (exp),
5474 incremented,
5475 TREE_OPERAND (exp, 1));
5476 temp = expand_assignment (incremented, newexp, ! post, 0);
5477 return post ? op0 : temp;
5478 }
5479
5480 /* Convert decrement by a constant into a negative increment. */
5481 if (this_optab == sub_optab
5482 && GET_CODE (op1) == CONST_INT)
5483 {
5484 op1 = gen_rtx (CONST_INT, VOIDmode, - INTVAL (op1));
5485 this_optab = add_optab;
5486 }
5487
5488 if (post)
5489 {
5490 /* We have a true reference to the value in OP0.
5491 If there is an insn to add or subtract in this mode, queue it. */
5492
5493#if 0 /* Turned off to avoid making extra insn for indexed memref. */
5494 op0 = stabilize (op0);
5495#endif
5496
5497 icode = (int) this_optab->handlers[(int) mode].insn_code;
5498 if (icode != (int) CODE_FOR_nothing
5499 /* Make sure that OP0 is valid for operands 0 and 1
5500 of the insn we want to queue. */
5501 && (*insn_operand_predicate[icode][0]) (op0, mode)
5502 && (*insn_operand_predicate[icode][1]) (op0, mode))
5503 {
5504 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
5505 op1 = force_reg (mode, op1);
5506
5507 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
5508 }
5509 }
5510
5511 /* Preincrement, or we can't increment with one simple insn. */
5512 if (post)
5513 /* Save a copy of the value before inc or dec, to return it later. */
5514 temp = value = copy_to_reg (op0);
5515 else
5516 /* Arrange to return the incremented value. */
5517 /* Copy the rtx because expand_binop will protect from the queue,
5518 and the results of that would be invalid for us to return
5519 if our caller does emit_queue before using our result. */
5520 temp = copy_rtx (value = op0);
5521
5522 /* Increment however we can. */
5523 op1 = expand_binop (mode, this_optab, value, op1, op0,
5524 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
5525 /* Make sure the value is stored into OP0. */
5526 if (op1 != op0)
5527 emit_move_insn (op0, op1);
5528
5529 return temp;
5530}
5531\f
5532/* Expand all function calls contained within EXP, innermost ones first.
5533 But don't look within expressions that have sequence points.
5534 For each CALL_EXPR, record the rtx for its value
5535 in the CALL_EXPR_RTL field. */
5536
5537static void
5538preexpand_calls (exp)
5539 tree exp;
5540{
5541 register int nops, i;
5542 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5543
5544 if (! do_preexpand_calls)
5545 return;
5546
5547 /* Only expressions and references can contain calls. */
5548
5549 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
5550 return;
5551
5552 switch (TREE_CODE (exp))
5553 {
5554 case CALL_EXPR:
5555 /* Do nothing if already expanded. */
5556 if (CALL_EXPR_RTL (exp) != 0)
5557 return;
5558
5559 /* Do nothing to built-in functions. */
5560 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
5561 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
5562 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8129842c 5563 CALL_EXPR_RTL (exp) = expand_call (exp, 0, 0);
bbf6f052
RK
5564 return;
5565
5566 case COMPOUND_EXPR:
5567 case COND_EXPR:
5568 case TRUTH_ANDIF_EXPR:
5569 case TRUTH_ORIF_EXPR:
5570 /* If we find one of these, then we can be sure
5571 the adjust will be done for it (since it makes jumps).
5572 Do it now, so that if this is inside an argument
5573 of a function, we don't get the stack adjustment
5574 after some other args have already been pushed. */
5575 do_pending_stack_adjust ();
5576 return;
5577
5578 case BLOCK:
5579 case RTL_EXPR:
5580 case WITH_CLEANUP_EXPR:
5581 return;
5582
5583 case SAVE_EXPR:
5584 if (SAVE_EXPR_RTL (exp) != 0)
5585 return;
5586 }
5587
5588 nops = tree_code_length[(int) TREE_CODE (exp)];
5589 for (i = 0; i < nops; i++)
5590 if (TREE_OPERAND (exp, i) != 0)
5591 {
5592 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
5593 if (type == 'e' || type == '<' || type == '1' || type == '2'
5594 || type == 'r')
5595 preexpand_calls (TREE_OPERAND (exp, i));
5596 }
5597}
5598\f
5599/* At the start of a function, record that we have no previously-pushed
5600 arguments waiting to be popped. */
5601
5602void
5603init_pending_stack_adjust ()
5604{
5605 pending_stack_adjust = 0;
5606}
5607
5608/* When exiting from function, if safe, clear out any pending stack adjust
5609 so the adjustment won't get done. */
5610
5611void
5612clear_pending_stack_adjust ()
5613{
5614#ifdef EXIT_IGNORE_STACK
5615 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
5616 && ! (TREE_INLINE (current_function_decl) && ! flag_no_inline)
5617 && ! flag_inline_functions)
5618 pending_stack_adjust = 0;
5619#endif
5620}
5621
5622/* Pop any previously-pushed arguments that have not been popped yet. */
5623
5624void
5625do_pending_stack_adjust ()
5626{
5627 if (inhibit_defer_pop == 0)
5628 {
5629 if (pending_stack_adjust != 0)
5630 adjust_stack (gen_rtx (CONST_INT, VOIDmode, pending_stack_adjust));
5631 pending_stack_adjust = 0;
5632 }
5633}
5634
5635/* Expand all cleanups up to OLD_CLEANUPS.
5636 Needed here, and also for language-dependent calls. */
5637
5638void
5639expand_cleanups_to (old_cleanups)
5640 tree old_cleanups;
5641{
5642 while (cleanups_this_call != old_cleanups)
5643 {
5644 expand_expr (TREE_VALUE (cleanups_this_call), 0, VOIDmode, 0);
5645 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
5646 }
5647}
5648\f
5649/* Expand conditional expressions. */
5650
5651/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
5652 LABEL is an rtx of code CODE_LABEL, in this function and all the
5653 functions here. */
5654
5655void
5656jumpifnot (exp, label)
5657 tree exp;
5658 rtx label;
5659{
5660 do_jump (exp, label, 0);
5661}
5662
5663/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
5664
5665void
5666jumpif (exp, label)
5667 tree exp;
5668 rtx label;
5669{
5670 do_jump (exp, 0, label);
5671}
5672
5673/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
5674 the result is zero, or IF_TRUE_LABEL if the result is one.
5675 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
5676 meaning fall through in that case.
5677
e7c33f54
RK
5678 do_jump always does any pending stack adjust except when it does not
5679 actually perform a jump. An example where there is no jump
5680 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
5681
bbf6f052
RK
5682 This function is responsible for optimizing cases such as
5683 &&, || and comparison operators in EXP. */
5684
5685void
5686do_jump (exp, if_false_label, if_true_label)
5687 tree exp;
5688 rtx if_false_label, if_true_label;
5689{
5690 register enum tree_code code = TREE_CODE (exp);
5691 /* Some cases need to create a label to jump to
5692 in order to properly fall through.
5693 These cases set DROP_THROUGH_LABEL nonzero. */
5694 rtx drop_through_label = 0;
5695 rtx temp;
5696 rtx comparison = 0;
5697 int i;
5698 tree type;
5699
5700 emit_queue ();
5701
5702 switch (code)
5703 {
5704 case ERROR_MARK:
5705 break;
5706
5707 case INTEGER_CST:
5708 temp = integer_zerop (exp) ? if_false_label : if_true_label;
5709 if (temp)
5710 emit_jump (temp);
5711 break;
5712
5713#if 0
5714 /* This is not true with #pragma weak */
5715 case ADDR_EXPR:
5716 /* The address of something can never be zero. */
5717 if (if_true_label)
5718 emit_jump (if_true_label);
5719 break;
5720#endif
5721
5722 case NOP_EXPR:
5723 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
5724 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
5725 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
5726 goto normal;
5727 case CONVERT_EXPR:
5728 /* If we are narrowing the operand, we have to do the compare in the
5729 narrower mode. */
5730 if ((TYPE_PRECISION (TREE_TYPE (exp))
5731 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5732 goto normal;
5733 case NON_LVALUE_EXPR:
5734 case REFERENCE_EXPR:
5735 case ABS_EXPR:
5736 case NEGATE_EXPR:
5737 case LROTATE_EXPR:
5738 case RROTATE_EXPR:
5739 /* These cannot change zero->non-zero or vice versa. */
5740 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5741 break;
5742
5743#if 0
5744 /* This is never less insns than evaluating the PLUS_EXPR followed by
5745 a test and can be longer if the test is eliminated. */
5746 case PLUS_EXPR:
5747 /* Reduce to minus. */
5748 exp = build (MINUS_EXPR, TREE_TYPE (exp),
5749 TREE_OPERAND (exp, 0),
5750 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
5751 TREE_OPERAND (exp, 1))));
5752 /* Process as MINUS. */
5753#endif
5754
5755 case MINUS_EXPR:
5756 /* Non-zero iff operands of minus differ. */
5757 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
5758 TREE_OPERAND (exp, 0),
5759 TREE_OPERAND (exp, 1)),
5760 NE, NE);
5761 break;
5762
5763 case BIT_AND_EXPR:
5764 /* If we are AND'ing with a small constant, do this comparison in the
5765 smallest type that fits. If the machine doesn't have comparisons
5766 that small, it will be converted back to the wider comparison.
5767 This helps if we are testing the sign bit of a narrower object.
5768 combine can't do this for us because it can't know whether a
5769 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
5770
5771 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5772 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_INT
5773 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
5774 && (type = type_for_size (i + 1, 1)) != 0
5775 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)))
5776 {
5777 do_jump (convert (type, exp), if_false_label, if_true_label);
5778 break;
5779 }
5780 goto normal;
5781
5782 case TRUTH_NOT_EXPR:
5783 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5784 break;
5785
5786 case TRUTH_ANDIF_EXPR:
5787 if (if_false_label == 0)
5788 if_false_label = drop_through_label = gen_label_rtx ();
5789 do_jump (TREE_OPERAND (exp, 0), if_false_label, 0);
5790 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5791 break;
5792
5793 case TRUTH_ORIF_EXPR:
5794 if (if_true_label == 0)
5795 if_true_label = drop_through_label = gen_label_rtx ();
5796 do_jump (TREE_OPERAND (exp, 0), 0, if_true_label);
5797 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5798 break;
5799
5800 case COMPOUND_EXPR:
5801 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5802 free_temp_slots ();
5803 emit_queue ();
e7c33f54 5804 do_pending_stack_adjust ();
bbf6f052
RK
5805 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
5806 break;
5807
5808 case COMPONENT_REF:
5809 case BIT_FIELD_REF:
5810 case ARRAY_REF:
5811 {
5812 int bitsize, bitpos, unsignedp;
5813 enum machine_mode mode;
5814 tree type;
5815 int volatilep = 0;
5816
5817 /* Get description of this reference. We don't actually care
5818 about the underlying object here. */
5819 get_inner_reference (exp, &bitsize, &bitpos, &mode, &unsignedp,
5820 &volatilep);
5821
5822 type = type_for_size (bitsize, unsignedp);
e7c33f54 5823 if (type != 0 && bitsize >= 0
bbf6f052
RK
5824 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp)))
5825 {
5826 do_jump (convert (type, exp), if_false_label, if_true_label);
5827 break;
5828 }
5829 goto normal;
5830 }
5831
5832 case COND_EXPR:
5833 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
5834 if (integer_onep (TREE_OPERAND (exp, 1))
5835 && integer_zerop (TREE_OPERAND (exp, 2)))
5836 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5837
5838 else if (integer_zerop (TREE_OPERAND (exp, 1))
5839 && integer_onep (TREE_OPERAND (exp, 2)))
5840 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5841
5842 else
5843 {
5844 register rtx label1 = gen_label_rtx ();
5845 drop_through_label = gen_label_rtx ();
5846 do_jump (TREE_OPERAND (exp, 0), label1, 0);
5847 /* Now the THEN-expression. */
5848 do_jump (TREE_OPERAND (exp, 1),
5849 if_false_label ? if_false_label : drop_through_label,
5850 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
5851 /* In case the do_jump just above never jumps. */
5852 do_pending_stack_adjust ();
bbf6f052
RK
5853 emit_label (label1);
5854 /* Now the ELSE-expression. */
5855 do_jump (TREE_OPERAND (exp, 2),
5856 if_false_label ? if_false_label : drop_through_label,
5857 if_true_label ? if_true_label : drop_through_label);
5858 }
5859 break;
5860
5861 case EQ_EXPR:
5862 if (integer_zerop (TREE_OPERAND (exp, 1)))
5863 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
5864 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5865 == MODE_INT)
5866 &&
5867 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5868 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
5869 else
5870 comparison = compare (exp, EQ, EQ);
5871 break;
5872
5873 case NE_EXPR:
5874 if (integer_zerop (TREE_OPERAND (exp, 1)))
5875 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
5876 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5877 == MODE_INT)
5878 &&
5879 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5880 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
5881 else
5882 comparison = compare (exp, NE, NE);
5883 break;
5884
5885 case LT_EXPR:
5886 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5887 == MODE_INT)
5888 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5889 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
5890 else
5891 comparison = compare (exp, LT, LTU);
5892 break;
5893
5894 case LE_EXPR:
5895 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5896 == MODE_INT)
5897 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5898 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
5899 else
5900 comparison = compare (exp, LE, LEU);
5901 break;
5902
5903 case GT_EXPR:
5904 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5905 == MODE_INT)
5906 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5907 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
5908 else
5909 comparison = compare (exp, GT, GTU);
5910 break;
5911
5912 case GE_EXPR:
5913 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5914 == MODE_INT)
5915 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5916 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
5917 else
5918 comparison = compare (exp, GE, GEU);
5919 break;
5920
5921 default:
5922 normal:
5923 temp = expand_expr (exp, 0, VOIDmode, 0);
5924#if 0
5925 /* This is not needed any more and causes poor code since it causes
5926 comparisons and tests from non-SI objects to have different code
5927 sequences. */
5928 /* Copy to register to avoid generating bad insns by cse
5929 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
5930 if (!cse_not_expected && GET_CODE (temp) == MEM)
5931 temp = copy_to_reg (temp);
5932#endif
5933 do_pending_stack_adjust ();
5934 if (GET_CODE (temp) == CONST_INT)
5935 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
5936 else if (GET_CODE (temp) == LABEL_REF)
5937 comparison = const_true_rtx;
5938 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5939 && !can_compare_p (GET_MODE (temp)))
5940 /* Note swapping the labels gives us not-equal. */
5941 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
5942 else if (GET_MODE (temp) != VOIDmode)
5943 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
5944 NE, 1, GET_MODE (temp), 0, 0);
5945 else
5946 abort ();
5947 }
5948
5949 /* Do any postincrements in the expression that was tested. */
5950 emit_queue ();
5951
5952 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
5953 straight into a conditional jump instruction as the jump condition.
5954 Otherwise, all the work has been done already. */
5955
5956 if (comparison == const_true_rtx)
5957 {
5958 if (if_true_label)
5959 emit_jump (if_true_label);
5960 }
5961 else if (comparison == const0_rtx)
5962 {
5963 if (if_false_label)
5964 emit_jump (if_false_label);
5965 }
5966 else if (comparison)
5967 do_jump_for_compare (comparison, if_false_label, if_true_label);
5968
5969 free_temp_slots ();
5970
5971 if (drop_through_label)
e7c33f54
RK
5972 {
5973 /* If do_jump produces code that might be jumped around,
5974 do any stack adjusts from that code, before the place
5975 where control merges in. */
5976 do_pending_stack_adjust ();
5977 emit_label (drop_through_label);
5978 }
bbf6f052
RK
5979}
5980\f
5981/* Given a comparison expression EXP for values too wide to be compared
5982 with one insn, test the comparison and jump to the appropriate label.
5983 The code of EXP is ignored; we always test GT if SWAP is 0,
5984 and LT if SWAP is 1. */
5985
5986static void
5987do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
5988 tree exp;
5989 int swap;
5990 rtx if_false_label, if_true_label;
5991{
5992 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), 0, VOIDmode, 0);
5993 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), 0, VOIDmode, 0);
5994 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5995 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
5996 rtx drop_through_label = 0;
5997 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
5998 int i;
5999
6000 if (! if_true_label || ! if_false_label)
6001 drop_through_label = gen_label_rtx ();
6002 if (! if_true_label)
6003 if_true_label = drop_through_label;
6004 if (! if_false_label)
6005 if_false_label = drop_through_label;
6006
6007 /* Compare a word at a time, high order first. */
6008 for (i = 0; i < nwords; i++)
6009 {
6010 rtx comp;
6011 rtx op0_word, op1_word;
6012
6013 if (WORDS_BIG_ENDIAN)
6014 {
6015 op0_word = operand_subword_force (op0, i, mode);
6016 op1_word = operand_subword_force (op1, i, mode);
6017 }
6018 else
6019 {
6020 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6021 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6022 }
6023
6024 /* All but high-order word must be compared as unsigned. */
6025 comp = compare_from_rtx (op0_word, op1_word,
6026 (unsignedp || i > 0) ? GTU : GT,
6027 unsignedp, word_mode, 0, 0);
6028 if (comp == const_true_rtx)
6029 emit_jump (if_true_label);
6030 else if (comp != const0_rtx)
6031 do_jump_for_compare (comp, 0, if_true_label);
6032
6033 /* Consider lower words only if these are equal. */
6034 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6035 0, 0);
6036 if (comp == const_true_rtx)
6037 emit_jump (if_false_label);
6038 else if (comp != const0_rtx)
6039 do_jump_for_compare (comp, 0, if_false_label);
6040 }
6041
6042 if (if_false_label)
6043 emit_jump (if_false_label);
6044 if (drop_through_label)
6045 emit_label (drop_through_label);
6046}
6047
6048/* Given an EQ_EXPR expression EXP for values too wide to be compared
6049 with one insn, test the comparison and jump to the appropriate label. */
6050
6051static void
6052do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6053 tree exp;
6054 rtx if_false_label, if_true_label;
6055{
6056 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6057 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6058 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6059 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6060 int i;
6061 rtx drop_through_label = 0;
6062
6063 if (! if_false_label)
6064 drop_through_label = if_false_label = gen_label_rtx ();
6065
6066 for (i = 0; i < nwords; i++)
6067 {
6068 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6069 operand_subword_force (op1, i, mode),
6070 EQ, 0, word_mode, 0, 0);
6071 if (comp == const_true_rtx)
6072 emit_jump (if_false_label);
6073 else if (comp != const0_rtx)
6074 do_jump_for_compare (comp, if_false_label, 0);
6075 }
6076
6077 if (if_true_label)
6078 emit_jump (if_true_label);
6079 if (drop_through_label)
6080 emit_label (drop_through_label);
6081}
6082\f
6083/* Jump according to whether OP0 is 0.
6084 We assume that OP0 has an integer mode that is too wide
6085 for the available compare insns. */
6086
6087static void
6088do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6089 rtx op0;
6090 rtx if_false_label, if_true_label;
6091{
6092 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6093 int i;
6094 rtx drop_through_label = 0;
6095
6096 if (! if_false_label)
6097 drop_through_label = if_false_label = gen_label_rtx ();
6098
6099 for (i = 0; i < nwords; i++)
6100 {
6101 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6102 GET_MODE (op0)),
6103 const0_rtx, EQ, 0, word_mode, 0, 0);
6104 if (comp == const_true_rtx)
6105 emit_jump (if_false_label);
6106 else if (comp != const0_rtx)
6107 do_jump_for_compare (comp, if_false_label, 0);
6108 }
6109
6110 if (if_true_label)
6111 emit_jump (if_true_label);
6112 if (drop_through_label)
6113 emit_label (drop_through_label);
6114}
6115
6116/* Given a comparison expression in rtl form, output conditional branches to
6117 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6118
6119static void
6120do_jump_for_compare (comparison, if_false_label, if_true_label)
6121 rtx comparison, if_false_label, if_true_label;
6122{
6123 if (if_true_label)
6124 {
6125 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6126 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6127 else
6128 abort ();
6129
6130 if (if_false_label)
6131 emit_jump (if_false_label);
6132 }
6133 else if (if_false_label)
6134 {
6135 rtx insn;
6136 rtx prev = PREV_INSN (get_last_insn ());
6137 rtx branch = 0;
6138
6139 /* Output the branch with the opposite condition. Then try to invert
6140 what is generated. If more than one insn is a branch, or if the
6141 branch is not the last insn written, abort. If we can't invert
6142 the branch, emit make a true label, redirect this jump to that,
6143 emit a jump to the false label and define the true label. */
6144
6145 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6146 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6147 else
6148 abort ();
6149
6150 /* Here we get the insn before what was just emitted.
6151 On some machines, emitting the branch can discard
6152 the previous compare insn and emit a replacement. */
6153 if (prev == 0)
6154 /* If there's only one preceding insn... */
6155 insn = get_insns ();
6156 else
6157 insn = NEXT_INSN (prev);
6158
6159 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6160 if (GET_CODE (insn) == JUMP_INSN)
6161 {
6162 if (branch)
6163 abort ();
6164 branch = insn;
6165 }
6166
6167 if (branch != get_last_insn ())
6168 abort ();
6169
6170 if (! invert_jump (branch, if_false_label))
6171 {
6172 if_true_label = gen_label_rtx ();
6173 redirect_jump (branch, if_true_label);
6174 emit_jump (if_false_label);
6175 emit_label (if_true_label);
6176 }
6177 }
6178}
6179\f
6180/* Generate code for a comparison expression EXP
6181 (including code to compute the values to be compared)
6182 and set (CC0) according to the result.
6183 SIGNED_CODE should be the rtx operation for this comparison for
6184 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6185
6186 We force a stack adjustment unless there are currently
6187 things pushed on the stack that aren't yet used. */
6188
6189static rtx
6190compare (exp, signed_code, unsigned_code)
6191 register tree exp;
6192 enum rtx_code signed_code, unsigned_code;
6193{
6194 register rtx op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6195 register rtx op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6196 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6197 register enum machine_mode mode = TYPE_MODE (type);
6198 int unsignedp = TREE_UNSIGNED (type);
6199 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6200
6201 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6202 ((mode == BLKmode)
6203 ? expr_size (TREE_OPERAND (exp, 0)) : 0),
6204 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6205}
6206
6207/* Like compare but expects the values to compare as two rtx's.
6208 The decision as to signed or unsigned comparison must be made by the caller.
6209
6210 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6211 compared.
6212
6213 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6214 size of MODE should be used. */
6215
6216rtx
6217compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6218 register rtx op0, op1;
6219 enum rtx_code code;
6220 int unsignedp;
6221 enum machine_mode mode;
6222 rtx size;
6223 int align;
6224{
6225 /* If one operand is constant, make it the second one. */
6226
6227 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6228 {
6229 rtx tem = op0;
6230 op0 = op1;
6231 op1 = tem;
6232 code = swap_condition (code);
6233 }
6234
6235 if (flag_force_mem)
6236 {
6237 op0 = force_not_mem (op0);
6238 op1 = force_not_mem (op1);
6239 }
6240
6241 do_pending_stack_adjust ();
6242
6243 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6244 return simplify_relational_operation (code, mode, op0, op1);
6245
6246 /* If this is a signed equality comparison, we can do it as an
6247 unsigned comparison since zero-extension is cheaper than sign
6248 extension and comparisons with zero are done as unsigned. If we
6249 are comparing against a constant, we must convert it to what it
6250 would look like unsigned. */
6251 if ((code == EQ || code == NE) && ! unsignedp
6252 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_INT)
6253 {
6254 if (GET_CODE (op1) == CONST_INT
6255 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6256 op1 = gen_rtx (CONST_INT, VOIDmode,
6257 INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6258 unsignedp = 1;
6259 }
6260
6261 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6262
6263 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6264}
6265\f
6266/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
6267 and return an rtx for the result. EXP is either a comparison
6268 or a TRUTH_NOT_EXPR whose operand is a comparison.
6269
bbf6f052
RK
6270 If TARGET is nonzero, store the result there if convenient.
6271
6272 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6273 cheap.
6274
6275 Return zero if there is no suitable set-flag instruction
6276 available on this machine.
6277
6278 Once expand_expr has been called on the arguments of the comparison,
6279 we are committed to doing the store flag, since it is not safe to
6280 re-evaluate the expression. We emit the store-flag insn by calling
6281 emit_store_flag, but only expand the arguments if we have a reason
6282 to believe that emit_store_flag will be successful. If we think that
6283 it will, but it isn't, we have to simulate the store-flag with a
6284 set/jump/set sequence. */
6285
6286static rtx
6287do_store_flag (exp, target, mode, only_cheap)
6288 tree exp;
6289 rtx target;
6290 enum machine_mode mode;
6291 int only_cheap;
6292{
6293 enum rtx_code code;
e7c33f54 6294 tree arg0, arg1, type;
bbf6f052 6295 tree tem;
e7c33f54
RK
6296 enum machine_mode operand_mode;
6297 int invert = 0;
6298 int unsignedp;
bbf6f052
RK
6299 rtx op0, op1;
6300 enum insn_code icode;
6301 rtx subtarget = target;
6302 rtx result, label, pattern, jump_pat;
6303
e7c33f54
RK
6304 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
6305 result at the end. We can't simply invert the test since it would
6306 have already been inverted if it were valid. This case occurs for
6307 some floating-point comparisons. */
6308
6309 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
6310 invert = 1, exp = TREE_OPERAND (exp, 0);
6311
6312 arg0 = TREE_OPERAND (exp, 0);
6313 arg1 = TREE_OPERAND (exp, 1);
6314 type = TREE_TYPE (arg0);
6315 operand_mode = TYPE_MODE (type);
6316 unsignedp = TREE_UNSIGNED (type);
6317
bbf6f052
RK
6318 /* We won't bother with BLKmode store-flag operations because it would mean
6319 passing a lot of information to emit_store_flag. */
6320 if (operand_mode == BLKmode)
6321 return 0;
6322
6323 while (TREE_CODE (arg0) == NON_LVALUE_EXPR)
6324 arg0 = TREE_OPERAND (arg0, 0);
6325
6326 while (TREE_CODE (arg1) == NON_LVALUE_EXPR)
6327 arg1 = TREE_OPERAND (arg1, 0);
6328
6329 /* Get the rtx comparison code to use. We know that EXP is a comparison
6330 operation of some type. Some comparisons against 1 and -1 can be
6331 converted to comparisons with zero. Do so here so that the tests
6332 below will be aware that we have a comparison with zero. These
6333 tests will not catch constants in the first operand, but constants
6334 are rarely passed as the first operand. */
6335
6336 switch (TREE_CODE (exp))
6337 {
6338 case EQ_EXPR:
6339 code = EQ;
6340 break;
6341 case NE_EXPR:
6342 code = NE;
6343 break;
6344 case LT_EXPR:
6345 if (integer_onep (arg1))
6346 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
6347 else
6348 code = unsignedp ? LTU : LT;
6349 break;
6350 case LE_EXPR:
6351 if (integer_all_onesp (arg1))
6352 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
6353 else
6354 code = unsignedp ? LEU : LE;
6355 break;
6356 case GT_EXPR:
6357 if (integer_all_onesp (arg1))
6358 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
6359 else
6360 code = unsignedp ? GTU : GT;
6361 break;
6362 case GE_EXPR:
6363 if (integer_onep (arg1))
6364 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
6365 else
6366 code = unsignedp ? GEU : GE;
6367 break;
6368 default:
6369 abort ();
6370 }
6371
6372 /* Put a constant second. */
6373 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
6374 {
6375 tem = arg0; arg0 = arg1; arg1 = tem;
6376 code = swap_condition (code);
6377 }
6378
6379 /* If this is an equality or inequality test of a single bit, we can
6380 do this by shifting the bit being tested to the low-order bit and
6381 masking the result with the constant 1. If the condition was EQ,
6382 we xor it with 1. This does not require an scc insn and is faster
6383 than an scc insn even if we have it. */
6384
6385 if ((code == NE || code == EQ)
6386 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6387 && integer_pow2p (TREE_OPERAND (arg0, 1))
6388 && TYPE_PRECISION (type) <= HOST_BITS_PER_INT)
6389 {
6390 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
6391 0, VOIDmode, 0)));
6392
6393 if (subtarget == 0 || GET_CODE (subtarget) != REG
6394 || GET_MODE (subtarget) != operand_mode
6395 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
6396 subtarget = 0;
6397
6398 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
6399
6400 if (bitnum != 0)
6401 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
6402 size_int (bitnum), target, 1);
6403
6404 if (GET_MODE (op0) != mode)
6405 op0 = convert_to_mode (mode, op0, 1);
6406
6407 if (bitnum != TYPE_PRECISION (type) - 1)
6408 op0 = expand_and (op0, const1_rtx, target);
6409
e7c33f54 6410 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
6411 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
6412 OPTAB_LIB_WIDEN);
6413
6414 return op0;
6415 }
6416
6417 /* Now see if we are likely to be able to do this. Return if not. */
6418 if (! can_compare_p (operand_mode))
6419 return 0;
6420 icode = setcc_gen_code[(int) code];
6421 if (icode == CODE_FOR_nothing
6422 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
6423 {
6424 /* We can only do this if it is one of the special cases that
6425 can be handled without an scc insn. */
6426 if ((code == LT && integer_zerop (arg1))
6427 || (! only_cheap && code == GE && integer_zerop (arg1)))
6428 ;
6429 else if (BRANCH_COST >= 0
6430 && ! only_cheap && (code == NE || code == EQ)
6431 && TREE_CODE (type) != REAL_TYPE
6432 && ((abs_optab->handlers[(int) operand_mode].insn_code
6433 != CODE_FOR_nothing)
6434 || (ffs_optab->handlers[(int) operand_mode].insn_code
6435 != CODE_FOR_nothing)))
6436 ;
6437 else
6438 return 0;
6439 }
6440
6441 preexpand_calls (exp);
6442 if (subtarget == 0 || GET_CODE (subtarget) != REG
6443 || GET_MODE (subtarget) != operand_mode
6444 || ! safe_from_p (subtarget, arg1))
6445 subtarget = 0;
6446
6447 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
6448 op1 = expand_expr (arg1, 0, VOIDmode, 0);
6449
6450 if (target == 0)
6451 target = gen_reg_rtx (mode);
6452
6453 result = emit_store_flag (target, code, op0, op1, operand_mode,
6454 unsignedp, 1);
6455
6456 if (result)
e7c33f54
RK
6457 {
6458 if (invert)
6459 result = expand_binop (mode, xor_optab, result, const1_rtx,
6460 result, 0, OPTAB_LIB_WIDEN);
6461 return result;
6462 }
bbf6f052
RK
6463
6464 /* If this failed, we have to do this with set/compare/jump/set code. */
6465 if (target == 0 || GET_CODE (target) != REG
6466 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
6467 target = gen_reg_rtx (GET_MODE (target));
6468
e7c33f54 6469 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
bbf6f052
RK
6470 result = compare_from_rtx (op0, op1, code, unsignedp, operand_mode, 0, 0);
6471 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
6472 return (((result == const0_rtx && ! invert)
6473 || (result != const0_rtx && invert))
6474 ? const0_rtx : const1_rtx);
bbf6f052
RK
6475
6476 label = gen_label_rtx ();
6477 if (bcc_gen_fctn[(int) code] == 0)
6478 abort ();
6479
6480 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 6481 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
6482 emit_label (label);
6483
6484 return target;
6485}
6486\f
6487/* Generate a tablejump instruction (used for switch statements). */
6488
6489#ifdef HAVE_tablejump
6490
6491/* INDEX is the value being switched on, with the lowest value
6492 in the table already subtracted.
88d3b7f0 6493 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
6494 RANGE is the length of the jump table.
6495 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
6496
6497 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
6498 index value is out of range. */
6499
6500void
e87b4f3f 6501do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 6502 rtx index, range, table_label, default_label;
e87b4f3f 6503 enum machine_mode mode;
bbf6f052
RK
6504{
6505 register rtx temp, vector;
6506
88d3b7f0
RS
6507 /* Do an unsigned comparison (in the proper mode) between the index
6508 expression and the value which represents the length of the range.
6509 Since we just finished subtracting the lower bound of the range
6510 from the index expression, this comparison allows us to simultaneously
6511 check that the original index expression value is both greater than
6512 or equal to the minimum value of the range and less than or equal to
6513 the maximum value of the range. */
e87b4f3f
RS
6514
6515 emit_cmp_insn (range, index, LTU, 0, mode, 0, 0);
bbf6f052 6516 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
6517
6518 /* If index is in range, it must fit in Pmode.
6519 Convert to Pmode so we can index with it. */
6520 if (mode != Pmode)
6521 index = convert_to_mode (Pmode, index, 1);
6522
bbf6f052
RK
6523 /* If flag_force_addr were to affect this address
6524 it could interfere with the tricky assumptions made
6525 about addresses that contain label-refs,
6526 which may be valid only very near the tablejump itself. */
6527 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
6528 GET_MODE_SIZE, because this indicates how large insns are. The other
6529 uses should all be Pmode, because they are addresses. This code
6530 could fail if addresses and insns are not the same size. */
6531 index = memory_address_noforce
6532 (CASE_VECTOR_MODE,
6533 gen_rtx (PLUS, Pmode,
6534 gen_rtx (MULT, Pmode, index,
6535 gen_rtx (CONST_INT, VOIDmode,
6536 GET_MODE_SIZE (CASE_VECTOR_MODE))),
6537 gen_rtx (LABEL_REF, Pmode, table_label)));
6538 temp = gen_reg_rtx (CASE_VECTOR_MODE);
6539 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
6540 RTX_UNCHANGING_P (vector) = 1;
6541 convert_move (temp, vector, 0);
6542
6543 emit_jump_insn (gen_tablejump (temp, table_label));
6544
6545#ifndef CASE_VECTOR_PC_RELATIVE
6546 /* If we are generating PIC code or if the table is PC-relative, the
6547 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
6548 if (! flag_pic)
6549 emit_barrier ();
6550#endif
6551}
6552
6553#endif /* HAVE_tablejump */
This page took 0.61804 seconds and 5 git commands to generate.